diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/config.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/config.json new file mode 100644 index 0000000000000000000000000000000000000000..7036e6fceb1eb4af29fce71c3e01a990ab139d7c --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure_qk_nonorm_no_clip/layer_wise_new_code_rand", + "model": "d12", + "batch_size": 4, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 10000.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "shuffle_files": true, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 44, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500 + }, + "run_uuid": "5b3c6861-d93c-48e1-b6be-fb835f5d0065", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..cdc754b9f56b51f49161ac2b3637e3d95f504a3f --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.948828935623169, + "total_l1_linf_norm": 16924.23828125, + "total_spectral_norm": 1.948828935623169, + "embed_lm_head_update_fnorm": 1.3264018297195435, + "embed_lm_head_max_l1_linf_norm": 0.3750900626182556, + "embed_lm_head_max_spectral_norm": 0.32108667492866516, + "layer_1_update_fnorm": 0.3762279450893402, + "layer_1_max_l1_linf_norm": 0.5143324732780457, + "layer_1_max_spectral_norm": 0.08016017079353333, + "layer_2_update_fnorm": 0.36995670199394226, + "layer_2_max_l1_linf_norm": 0.6746499538421631, + "layer_2_max_spectral_norm": 0.0990210548043251, + "layer_3_update_fnorm": 0.3863861560821533, + "layer_3_max_l1_linf_norm": 0.6475040912628174, + "layer_3_max_spectral_norm": 0.09363085776567459, + "layer_4_update_fnorm": 0.368022084236145, + "layer_4_max_l1_linf_norm": 0.5836291909217834, + "layer_4_max_spectral_norm": 0.09727839380502701, + "layer_5_update_fnorm": 0.3437543213367462, + "layer_5_max_l1_linf_norm": 0.5312632322311401, + "layer_5_max_spectral_norm": 0.08785983175039291, + "layer_6_update_fnorm": 0.3914293348789215, + "layer_6_max_l1_linf_norm": 0.5625848770141602, + "layer_6_max_spectral_norm": 0.09579366445541382, + "layer_7_update_fnorm": 0.4109397232532501, + "layer_7_max_l1_linf_norm": 0.5983707904815674, + "layer_7_max_spectral_norm": 0.08972013741731644, + "layer_8_update_fnorm": 0.435416579246521, + "layer_8_max_l1_linf_norm": 0.4848668575286865, + "layer_8_max_spectral_norm": 0.07618333399295807, + "layer_9_update_fnorm": 0.46093979477882385, + "layer_9_max_l1_linf_norm": 0.497328519821167, + "layer_9_max_spectral_norm": 0.07702817022800446, + "layer_10_update_fnorm": 0.47266483306884766, + "layer_10_max_l1_linf_norm": 0.5085608959197998, + "layer_10_max_spectral_norm": 0.07451915740966797, + "layer_11_update_fnorm": 0.46859678626060486, + "layer_11_max_l1_linf_norm": 0.5220758318901062, + "layer_11_max_spectral_norm": 0.07812685519456863, + "layer_12_update_fnorm": 0.4360488951206207, + "layer_12_max_l1_linf_norm": 0.5262923240661621, + "layer_12_max_spectral_norm": 0.08316241204738617, + "block0_q_update_fnorm": 0.10574071109294891, + "block0_q_max_l1_linf_norm": 0.20630404353141785, + "block0_q_max_spectral_norm": 0.06049178168177605, + "block0_k_update_fnorm": 0.11093495041131973, + "block0_k_max_l1_linf_norm": 0.27005812525749207, + "block0_k_max_spectral_norm": 0.07335882633924484, + "block0_v_update_fnorm": 0.12648259103298187, + "block0_v_max_l1_linf_norm": 0.26660406589508057, + "block0_v_max_spectral_norm": 0.0727049931883812, + "block0_o_update_fnorm": 0.1414608508348465, + "block0_o_max_l1_linf_norm": 0.16686081886291504, + "block0_o_max_spectral_norm": 0.06615713983774185, + "block0_mlp_win_update_fnorm": 0.21349242329597473, + "block0_mlp_win_max_l1_linf_norm": 0.16545483469963074, + "block0_mlp_win_max_spectral_norm": 0.06354954838752747, + "block0_mlp_wout_update_fnorm": 0.19079147279262543, + "block0_mlp_wout_max_l1_linf_norm": 0.5143324732780457, + "block0_mlp_wout_max_spectral_norm": 0.08016017079353333, + "block3_q_update_fnorm": 0.10952853411436081, + "block3_q_max_l1_linf_norm": 0.14902180433273315, + "block3_q_max_spectral_norm": 0.038596365600824356, + "block3_k_update_fnorm": 0.09983249008655548, + "block3_k_max_l1_linf_norm": 0.19077512621879578, + "block3_k_max_spectral_norm": 0.030456366017460823, + "block3_v_update_fnorm": 0.09747668355703354, + "block3_v_max_l1_linf_norm": 0.12630456686019897, + "block3_v_max_spectral_norm": 0.038459111005067825, + "block3_o_update_fnorm": 0.11476222425699234, + "block3_o_max_l1_linf_norm": 0.14949646592140198, + "block3_o_max_spectral_norm": 0.047322921454906464, + "block3_mlp_win_update_fnorm": 0.23812216520309448, + "block3_mlp_win_max_l1_linf_norm": 0.21361874043941498, + "block3_mlp_win_max_spectral_norm": 0.07008353620767593, + "block3_mlp_wout_update_fnorm": 0.18448275327682495, + "block3_mlp_wout_max_l1_linf_norm": 0.5836291909217834, + "block3_mlp_wout_max_spectral_norm": 0.09727839380502701, + "block7_q_update_fnorm": 0.13654102385044098, + "block7_q_max_l1_linf_norm": 0.16205886006355286, + "block7_q_max_spectral_norm": 0.03945790231227875, + "block7_k_update_fnorm": 0.11893443763256073, + "block7_k_max_l1_linf_norm": 0.15665404498577118, + "block7_k_max_spectral_norm": 0.0302044115960598, + "block7_v_update_fnorm": 0.10952524840831757, + "block7_v_max_l1_linf_norm": 0.13235989212989807, + "block7_v_max_spectral_norm": 0.04275299608707428, + "block7_o_update_fnorm": 0.12765270471572876, + "block7_o_max_l1_linf_norm": 0.13559457659721375, + "block7_o_max_spectral_norm": 0.04544537514448166, + "block7_mlp_win_update_fnorm": 0.25965410470962524, + "block7_mlp_win_max_l1_linf_norm": 0.16961607336997986, + "block7_mlp_win_max_spectral_norm": 0.07618333399295807, + "block7_mlp_wout_update_fnorm": 0.24695312976837158, + "block7_mlp_wout_max_l1_linf_norm": 0.4848668575286865, + "block7_mlp_wout_max_spectral_norm": 0.06475596874952316, + "block11_q_update_fnorm": 0.1291249692440033, + "block11_q_max_l1_linf_norm": 0.16024357080459595, + "block11_q_max_spectral_norm": 0.0445767343044281, + "block11_k_update_fnorm": 0.1122606098651886, + "block11_k_max_l1_linf_norm": 0.21840043365955353, + "block11_k_max_spectral_norm": 0.03204161301255226, + "block11_v_update_fnorm": 0.1110105812549591, + "block11_v_max_l1_linf_norm": 0.14708983898162842, + "block11_v_max_spectral_norm": 0.04762391373515129, + "block11_o_update_fnorm": 0.14530301094055176, + "block11_o_max_l1_linf_norm": 0.1715191751718521, + "block11_o_max_spectral_norm": 0.05426162853837013, + "block11_mlp_win_update_fnorm": 0.25562068819999695, + "block11_mlp_win_max_l1_linf_norm": 0.18568038940429688, + "block11_mlp_win_max_spectral_norm": 0.07811781018972397, + "block11_mlp_wout_update_fnorm": 0.24903804063796997, + "block11_mlp_wout_max_l1_linf_norm": 0.5262923240661621, + "block11_mlp_wout_max_spectral_norm": 0.08316241204738617, + "total_sharpness": 0.0655054822564125, + "block_total_sharpness": 0.11641881614923477, + "v_norm_block": 1.4277926683425903, + "v_T_H_v_block": 0.23733045160770416, + "v_norm": 1.948828935623169, + "ip_v_neg_g_hvp": 0.14434711635112762, + "cos_v_neg_g_hvp": 0.16495737433433533, + "g_hvp_norm": 0.44901689887046814, + "ip_v_neg_g_t": 0.1448391079902649, + "cos_v_neg_g_t": 0.18612518906593323, + "g_t_norm": 0.3993070423603058, + "g_norm": 0.44901689887046814, + "hv_norm": 1.0052897930145264, + "cos_v_hv": 0.12698723375797272, + "hg_norm": 16.603429794311523, + "cos_g_hg": 0.2933233082294464, + "v_parallel_norm": 0.017342016100883484, + "v_perp_norm": 1.9487518072128296, + "embed_lm_head_v_norm": 1.3264018297195435, + "embed_lm_head_cos_v_neg_g": 0.09494615346193314, + "layer_1_v_norm": 0.3762279450893402, + "layer_1_cos_v_neg_g": 0.2623859941959381, + "layer_2_v_norm": 0.36995670199394226, + "layer_2_cos_v_neg_g": 0.22616910934448242, + "layer_3_v_norm": 0.3863861560821533, + "layer_3_cos_v_neg_g": 0.2190624326467514, + "layer_4_v_norm": 0.368022084236145, + "layer_4_cos_v_neg_g": 0.2045302540063858, + "layer_5_v_norm": 0.3437543213367462, + "layer_5_cos_v_neg_g": 0.1765674203634262, + "layer_6_v_norm": 0.3914293348789215, + "layer_6_cos_v_neg_g": 0.18235182762145996, + "layer_7_v_norm": 0.4109397232532501, + "layer_7_cos_v_neg_g": 0.20909947156906128, + "layer_8_v_norm": 0.435416579246521, + "layer_8_cos_v_neg_g": 0.23508384823799133, + "layer_9_v_norm": 0.46093979477882385, + "layer_9_cos_v_neg_g": 0.21319937705993652, + "layer_10_v_norm": 0.47266483306884766, + "layer_10_cos_v_neg_g": 0.26792672276496887, + "layer_11_v_norm": 0.46859678626060486, + "layer_11_cos_v_neg_g": 0.26625096797943115, + "layer_12_v_norm": 0.4360488951206207, + "layer_12_cos_v_neg_g": 0.30112776160240173, + "block0_q_v_norm": 0.10574071109294891, + "block0_q_cos_v_neg_g": 0.28385138511657715, + "block0_k_v_norm": 0.11093495041131973, + "block0_k_cos_v_neg_g": 0.34384751319885254, + "block0_v_v_norm": 0.12648259103298187, + "block0_v_cos_v_neg_g": 0.39146268367767334, + "block0_o_v_norm": 0.1414608508348465, + "block0_o_cos_v_neg_g": 0.318513423204422, + "block0_mlp_win_v_norm": 0.21349242329597473, + "block0_mlp_win_cos_v_neg_g": 0.3420645296573639, + "block0_mlp_wout_v_norm": 0.19079147279262543, + "block0_mlp_wout_cos_v_neg_g": 0.39197102189064026, + "block3_q_v_norm": 0.10952853411436081, + "block3_q_cos_v_neg_g": 0.1849060207605362, + "block3_k_v_norm": 0.09983249008655548, + "block3_k_cos_v_neg_g": 0.18916350603103638, + "block3_v_v_norm": 0.09747668355703354, + "block3_v_cos_v_neg_g": 0.16927260160446167, + "block3_o_v_norm": 0.11476222425699234, + "block3_o_cos_v_neg_g": 0.2764381170272827, + "block3_mlp_win_v_norm": 0.23812216520309448, + "block3_mlp_win_cos_v_neg_g": 0.19946148991584778, + "block3_mlp_wout_v_norm": 0.18448275327682495, + "block3_mlp_wout_cos_v_neg_g": 0.4301105737686157, + "block7_q_v_norm": 0.13654102385044098, + "block7_q_cos_v_neg_g": 0.18912401795387268, + "block7_k_v_norm": 0.11893443763256073, + "block7_k_cos_v_neg_g": 0.2521347105503082, + "block7_v_v_norm": 0.10952524840831757, + "block7_v_cos_v_neg_g": 0.2652699053287506, + "block7_o_v_norm": 0.12765270471572876, + "block7_o_cos_v_neg_g": 0.3564887046813965, + "block7_mlp_win_v_norm": 0.25965410470962524, + "block7_mlp_win_cos_v_neg_g": 0.28665465116500854, + "block7_mlp_wout_v_norm": 0.24695312976837158, + "block7_mlp_wout_cos_v_neg_g": 0.3475126326084137, + "block11_q_v_norm": 0.1291249692440033, + "block11_q_cos_v_neg_g": 0.26746997237205505, + "block11_k_v_norm": 0.1122606098651886, + "block11_k_cos_v_neg_g": 0.31595632433891296, + "block11_v_v_norm": 0.1110105812549591, + "block11_v_cos_v_neg_g": 0.3573928773403168, + "block11_o_v_norm": 0.14530301094055176, + "block11_o_cos_v_neg_g": 0.3045036494731903, + "block11_mlp_win_v_norm": 0.25562068819999695, + "block11_mlp_win_cos_v_neg_g": 0.32481855154037476, + "block11_mlp_wout_v_norm": 0.24903804063796997, + "block11_mlp_wout_cos_v_neg_g": 0.32185786962509155, + "embed_lm_head_sharpness": 0.0006780258845537901, + "layer_1_sharpness": 0.11920906603336334, + "layer_2_sharpness": 0.010428464971482754, + "layer_3_sharpness": 0.009432455524802208, + "layer_4_sharpness": 0.015098451636731625, + "layer_5_sharpness": 0.015287244692444801, + "layer_6_sharpness": 0.011111462488770485, + "layer_7_sharpness": 0.01626097969710827, + "layer_8_sharpness": 0.0184559877961874, + "layer_9_sharpness": 0.011559084057807922, + "layer_10_sharpness": 0.006762396078556776, + "layer_11_sharpness": 0.006272643338888884, + "layer_12_sharpness": 0.011320048943161964, + "block0_q_sharpness": 0.007410325575619936, + "block0_k_sharpness": 0.009081358090043068, + "block0_v_sharpness": 0.14210842549800873, + "block0_o_sharpness": 0.0951785147190094, + "block0_mlp_win_sharpness": 0.011096121743321419, + "block0_mlp_wout_sharpness": 0.04021996259689331, + "block3_q_sharpness": 0.008741130121052265, + "block3_k_sharpness": 0.005515645258128643, + "block3_v_sharpness": 0.008481129072606564, + "block3_o_sharpness": 0.005516431760042906, + "block3_mlp_win_sharpness": 0.0011903373524546623, + "block3_mlp_wout_sharpness": 0.0072236983105540276, + "block7_q_sharpness": 0.0009302247199229896, + "block7_k_sharpness": 0.006816819775849581, + "block7_v_sharpness": 0.0222928524017334, + "block7_o_sharpness": 0.007862973026931286, + "block7_mlp_win_sharpness": 0.0058569349348545074, + "block7_mlp_wout_sharpness": 0.003632625797763467, + "block11_q_sharpness": 0.0006104414351284504, + "block11_k_sharpness": 0.002383151790127158, + "block11_v_sharpness": 0.00839344970881939, + "block11_o_sharpness": 0.0014534397050738335, + "block11_mlp_win_sharpness": 0.004551377613097429, + "block11_mlp_wout_sharpness": 0.003860721131786704, + "sum_layer_numerators": 0.0390046192798606, + "block_diag_sharpness": 0.019133117917163378, + "cross_layer_sharpness": 0.0972856982320714 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_10000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..7e0277fd1228337b372fd00cd20b1adab8278145 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_10000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.001266414183191955, + "total_l1_linf_norm": 11.360734939575195, + "total_spectral_norm": 0.001266414183191955, + "embed_lm_head_update_fnorm": 0.0006815015221945941, + "embed_lm_head_max_l1_linf_norm": 0.00018347300647292286, + "embed_lm_head_max_spectral_norm": 0.00013025499356444925, + "layer_1_update_fnorm": 0.0003079880552832037, + "layer_1_max_l1_linf_norm": 0.00043646933045238256, + "layer_1_max_spectral_norm": 6.277742068050429e-05, + "layer_2_update_fnorm": 0.0003037213464267552, + "layer_2_max_l1_linf_norm": 0.0003796375240199268, + "layer_2_max_spectral_norm": 5.111786231282167e-05, + "layer_3_update_fnorm": 0.0003079383459407836, + "layer_3_max_l1_linf_norm": 0.0004392766277305782, + "layer_3_max_spectral_norm": 6.680395745206624e-05, + "layer_4_update_fnorm": 0.0003094300627708435, + "layer_4_max_l1_linf_norm": 0.0004975923802703619, + "layer_4_max_spectral_norm": 7.54203720134683e-05, + "layer_5_update_fnorm": 0.0003022146411240101, + "layer_5_max_l1_linf_norm": 0.00045742944348603487, + "layer_5_max_spectral_norm": 6.805380689911544e-05, + "layer_6_update_fnorm": 0.00030887668253853917, + "layer_6_max_l1_linf_norm": 0.0005880577955394983, + "layer_6_max_spectral_norm": 8.300074114231393e-05, + "layer_7_update_fnorm": 0.00030891606002114713, + "layer_7_max_l1_linf_norm": 0.0005306476377882063, + "layer_7_max_spectral_norm": 7.527785055572167e-05, + "layer_8_update_fnorm": 0.00030812170007266104, + "layer_8_max_l1_linf_norm": 0.0004674288211390376, + "layer_8_max_spectral_norm": 7.124074909370393e-05, + "layer_9_update_fnorm": 0.00030899079865776, + "layer_9_max_l1_linf_norm": 0.00037515221629291773, + "layer_9_max_spectral_norm": 5.218804653850384e-05, + "layer_10_update_fnorm": 0.00030861530103720725, + "layer_10_max_l1_linf_norm": 0.0003482492465991527, + "layer_10_max_spectral_norm": 3.602010474423878e-05, + "layer_11_update_fnorm": 0.00031048664823174477, + "layer_11_max_l1_linf_norm": 0.00033004191936925054, + "layer_11_max_spectral_norm": 3.4081625926773995e-05, + "layer_12_update_fnorm": 0.0003121807239949703, + "layer_12_max_l1_linf_norm": 0.00039644562639296055, + "layer_12_max_spectral_norm": 5.629927181871608e-05, + "block0_q_update_fnorm": 8.803973469184712e-05, + "block0_q_max_l1_linf_norm": 9.555144788464531e-05, + "block0_q_max_spectral_norm": 2.637337274791207e-05, + "block0_k_update_fnorm": 8.755421731621027e-05, + "block0_k_max_l1_linf_norm": 0.00010500948701519519, + "block0_k_max_spectral_norm": 2.5882145564537495e-05, + "block0_v_update_fnorm": 8.881645771907642e-05, + "block0_v_max_l1_linf_norm": 0.00010588258010102436, + "block0_v_max_spectral_norm": 2.625228626129683e-05, + "block0_o_update_fnorm": 8.82665190147236e-05, + "block0_o_max_l1_linf_norm": 8.490377513226122e-05, + "block0_o_max_spectral_norm": 1.845146471168846e-05, + "block0_mlp_win_update_fnorm": 0.00017798886983655393, + "block0_mlp_win_max_l1_linf_norm": 9.541265899315476e-05, + "block0_mlp_win_max_spectral_norm": 3.694188853842206e-05, + "block0_mlp_wout_update_fnorm": 0.00017885911802295595, + "block0_mlp_wout_max_l1_linf_norm": 0.00043646933045238256, + "block0_mlp_wout_max_spectral_norm": 6.277742068050429e-05, + "block3_q_update_fnorm": 8.713633724255487e-05, + "block3_q_max_l1_linf_norm": 8.669939415995032e-05, + "block3_q_max_spectral_norm": 2.0494502678047866e-05, + "block3_k_update_fnorm": 8.887182775652036e-05, + "block3_k_max_l1_linf_norm": 0.00012464215978980064, + "block3_k_max_spectral_norm": 2.6194129532086663e-05, + "block3_v_update_fnorm": 9.179420885629952e-05, + "block3_v_max_l1_linf_norm": 0.00012046646588714793, + "block3_v_max_spectral_norm": 2.660438076418359e-05, + "block3_o_update_fnorm": 9.072549437405542e-05, + "block3_o_max_l1_linf_norm": 0.00011473475024104118, + "block3_o_max_spectral_norm": 3.487371213850565e-05, + "block3_mlp_win_update_fnorm": 0.0001764518383424729, + "block3_mlp_win_max_l1_linf_norm": 0.0001115230334107764, + "block3_mlp_win_max_spectral_norm": 3.944369746022858e-05, + "block3_mlp_wout_update_fnorm": 0.00017984923033509403, + "block3_mlp_wout_max_l1_linf_norm": 0.0004975923802703619, + "block3_mlp_wout_max_spectral_norm": 7.54203720134683e-05, + "block7_q_update_fnorm": 8.685854845680296e-05, + "block7_q_max_l1_linf_norm": 8.751686254981905e-05, + "block7_q_max_spectral_norm": 1.3981110896565951e-05, + "block7_k_update_fnorm": 8.623459871159866e-05, + "block7_k_max_l1_linf_norm": 8.887679723557085e-05, + "block7_k_max_spectral_norm": 1.5397448805742897e-05, + "block7_v_update_fnorm": 9.052333189174533e-05, + "block7_v_max_l1_linf_norm": 0.00010928935807896778, + "block7_v_max_spectral_norm": 1.7968401152756996e-05, + "block7_o_update_fnorm": 9.005684842122719e-05, + "block7_o_max_l1_linf_norm": 8.485865691909567e-05, + "block7_o_max_spectral_norm": 1.699511631159112e-05, + "block7_mlp_win_update_fnorm": 0.00017716978618409485, + "block7_mlp_win_max_l1_linf_norm": 9.394945664098486e-05, + "block7_mlp_win_max_spectral_norm": 2.5057652237592265e-05, + "block7_mlp_wout_update_fnorm": 0.00017935439245775342, + "block7_mlp_wout_max_l1_linf_norm": 0.0004674288211390376, + "block7_mlp_wout_max_spectral_norm": 7.124074909370393e-05, + "block11_q_update_fnorm": 8.79600047483109e-05, + "block11_q_max_l1_linf_norm": 8.458711090497673e-05, + "block11_q_max_spectral_norm": 1.4406477021111641e-05, + "block11_k_update_fnorm": 8.845139382174239e-05, + "block11_k_max_l1_linf_norm": 9.457868873141706e-05, + "block11_k_max_spectral_norm": 1.3492067409970332e-05, + "block11_v_update_fnorm": 9.111862891586497e-05, + "block11_v_max_l1_linf_norm": 0.00010036329331342131, + "block11_v_max_spectral_norm": 1.9802888346021064e-05, + "block11_o_update_fnorm": 9.012673399411142e-05, + "block11_o_max_l1_linf_norm": 9.831311763264239e-05, + "block11_o_max_spectral_norm": 2.4720253350096755e-05, + "block11_mlp_win_update_fnorm": 0.0001789551752153784, + "block11_mlp_win_max_l1_linf_norm": 0.00010941187792923301, + "block11_mlp_win_max_spectral_norm": 3.142894638585858e-05, + "block11_mlp_wout_update_fnorm": 0.00018261180957779288, + "block11_mlp_wout_max_l1_linf_norm": 0.00039644562639296055, + "block11_mlp_wout_max_spectral_norm": 5.629927181871608e-05, + "total_sharpness": 0.017554696649312973, + "block_total_sharpness": 0.0231955386698246, + "v_norm_block": 0.0010674083605408669, + "v_T_H_v_block": 2.6428081056906194e-08, + "v_norm": 0.0012664145324379206, + "ip_v_neg_g_hvp": 2.572897392383311e-05, + "cos_v_neg_g_hvp": 0.06923110038042068, + "g_hvp_norm": 0.29345759749412537, + "ip_v_neg_g_t": 2.595977821329143e-05, + "cos_v_neg_g_t": 0.13648644089698792, + "g_t_norm": 0.1501881182193756, + "g_norm": 0.29345759749412537, + "hv_norm": 0.000653305382002145, + "cos_v_hv": 0.03402923792600632, + "hg_norm": 8.061843872070312, + "cos_g_hg": 0.4013764262199402, + "v_parallel_norm": 5.1518986765586305e-06, + "v_perp_norm": 0.0012664045207202435, + "embed_lm_head_v_norm": 0.000681502278894186, + "embed_lm_head_cos_v_neg_g": 0.10592488944530487, + "layer_1_v_norm": 0.0003079896850977093, + "layer_1_cos_v_neg_g": 0.10920330882072449, + "layer_2_v_norm": 0.0003037230053450912, + "layer_2_cos_v_neg_g": 0.06574942916631699, + "layer_3_v_norm": 0.0003079399757552892, + "layer_3_cos_v_neg_g": 0.050261855125427246, + "layer_4_v_norm": 0.0003094316634815186, + "layer_4_cos_v_neg_g": 0.05976366624236107, + "layer_5_v_norm": 0.0003022163000423461, + "layer_5_cos_v_neg_g": 0.03760548681020737, + "layer_6_v_norm": 0.0003088782832492143, + "layer_6_cos_v_neg_g": 0.052249833941459656, + "layer_7_v_norm": 0.0003089176898356527, + "layer_7_cos_v_neg_g": 0.060228120535612106, + "layer_8_v_norm": 0.0003081233298871666, + "layer_8_cos_v_neg_g": 0.061586230993270874, + "layer_9_v_norm": 0.0003089924284722656, + "layer_9_cos_v_neg_g": 0.06346778571605682, + "layer_10_v_norm": 0.0003086169308517128, + "layer_10_cos_v_neg_g": 0.07691667973995209, + "layer_11_v_norm": 0.0003104882489424199, + "layer_11_cos_v_neg_g": 0.08807568997144699, + "layer_12_v_norm": 0.00031218232470564544, + "layer_12_cos_v_neg_g": 0.12576422095298767, + "block0_q_v_norm": 8.804540993878618e-05, + "block0_q_cos_v_neg_g": 0.16620531678199768, + "block0_k_v_norm": 8.75599289429374e-05, + "block0_k_cos_v_neg_g": 0.15943293273448944, + "block0_v_v_norm": 8.88220893102698e-05, + "block0_v_cos_v_neg_g": 0.17631767690181732, + "block0_o_v_norm": 8.827218698570505e-05, + "block0_o_cos_v_neg_g": 0.1361011415719986, + "block0_mlp_win_v_norm": 0.000177991678356193, + "block0_mlp_win_cos_v_neg_g": 0.07783197611570358, + "block0_mlp_wout_v_norm": 0.0001788619119906798, + "block0_mlp_wout_cos_v_neg_g": 0.1459311842918396, + "block3_q_v_norm": 8.714207069715485e-05, + "block3_q_cos_v_neg_g": 0.07349614799022675, + "block3_k_v_norm": 8.887745207175612e-05, + "block3_k_cos_v_neg_g": 0.10368167608976364, + "block3_v_v_norm": 9.179965854855254e-05, + "block3_v_cos_v_neg_g": 0.03908827528357506, + "block3_o_v_norm": 9.073100227396935e-05, + "block3_o_cos_v_neg_g": 0.1427045315504074, + "block3_mlp_win_v_norm": 0.00017645466141402721, + "block3_mlp_win_cos_v_neg_g": 0.053872037678956985, + "block3_mlp_wout_v_norm": 0.00017985200975090265, + "block3_mlp_wout_cos_v_neg_g": 0.18998652696609497, + "block7_q_v_norm": 8.686430373927578e-05, + "block7_q_cos_v_neg_g": 0.0687209814786911, + "block7_k_v_norm": 8.624039764981717e-05, + "block7_k_cos_v_neg_g": 0.19141869246959686, + "block7_v_v_norm": 9.05288543435745e-05, + "block7_v_cos_v_neg_g": 0.041606444865465164, + "block7_o_v_norm": 9.006239997688681e-05, + "block7_o_cos_v_neg_g": 0.19095809757709503, + "block7_mlp_win_v_norm": 0.00017717260925564915, + "block7_mlp_win_cos_v_neg_g": 0.07410057634115219, + "block7_mlp_wout_v_norm": 0.00017935717187356204, + "block7_mlp_wout_cos_v_neg_g": 0.17089125514030457, + "block11_q_v_norm": 8.796568727120757e-05, + "block11_q_cos_v_neg_g": 0.10961327701807022, + "block11_k_v_norm": 8.84570472408086e-05, + "block11_k_cos_v_neg_g": 0.1696273684501648, + "block11_v_v_norm": 9.112411498790607e-05, + "block11_v_cos_v_neg_g": 0.06402038782835007, + "block11_o_v_norm": 9.013228554977104e-05, + "block11_o_cos_v_neg_g": 0.19414913654327393, + "block11_mlp_win_v_norm": 0.00017895796918310225, + "block11_mlp_win_cos_v_neg_g": 0.1325765997171402, + "block11_mlp_wout_v_norm": 0.00018261454533785582, + "block11_mlp_wout_cos_v_neg_g": 0.16414664685726166, + "embed_lm_head_sharpness": 0.0006446132902055979, + "layer_1_sharpness": 0.008840120397508144, + "layer_2_sharpness": 0.0025424808263778687, + "layer_3_sharpness": 0.006313459016382694, + "layer_4_sharpness": 0.005370890721678734, + "layer_5_sharpness": 0.001962121808901429, + "layer_6_sharpness": 0.003554667579010129, + "layer_7_sharpness": 0.004791130777448416, + "layer_8_sharpness": 0.006884571630507708, + "layer_9_sharpness": 0.00564617570489645, + "layer_10_sharpness": 0.002284163609147072, + "layer_11_sharpness": 0.0020567451138049364, + "layer_12_sharpness": 0.008091464638710022, + "block0_q_sharpness": 0.0020636855624616146, + "block0_k_sharpness": 0.003184598870575428, + "block0_v_sharpness": 0.003626306541264057, + "block0_o_sharpness": 0.004583106376230717, + "block0_mlp_win_sharpness": 0.0025391296949237585, + "block0_mlp_wout_sharpness": 0.004311267752200365, + "block3_q_sharpness": 0.0005182431195862591, + "block3_k_sharpness": 0.0014333582948893309, + "block3_v_sharpness": 0.016413310542702675, + "block3_o_sharpness": 0.0020149725023657084, + "block3_mlp_win_sharpness": 0.0004084956308361143, + "block3_mlp_wout_sharpness": 0.0009530616807751358, + "block7_q_sharpness": 0.00022849715605843812, + "block7_k_sharpness": 0.00022326935140881687, + "block7_v_sharpness": 0.014324702322483063, + "block7_o_sharpness": 0.0005127156618982553, + "block7_mlp_win_sharpness": 0.0014726587105542421, + "block7_mlp_wout_sharpness": 0.002674557501450181, + "block11_q_sharpness": 0.000313677970552817, + "block11_k_sharpness": 0.00033198902383446693, + "block11_v_sharpness": 0.0020258016884326935, + "block11_o_sharpness": 0.00032430386636406183, + "block11_mlp_win_sharpness": 0.001664847950451076, + "block11_mlp_wout_sharpness": 0.009855646640062332, + "sum_layer_numerators": 5.558696949383763e-09, + "block_diag_sharpness": 0.004878786320686583, + "cross_layer_sharpness": 0.018316752349138016 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..d39d5389d7298d93fd323db7ea001bb7b3cb8a4c --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_1500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.9232699871063232, + "total_l1_linf_norm": 16578.30859375, + "total_spectral_norm": 1.9232699871063232, + "embed_lm_head_update_fnorm": 1.3082352876663208, + "embed_lm_head_max_l1_linf_norm": 0.34094029664993286, + "embed_lm_head_max_spectral_norm": 0.28375062346458435, + "layer_1_update_fnorm": 0.341115802526474, + "layer_1_max_l1_linf_norm": 0.5233801603317261, + "layer_1_max_spectral_norm": 0.06577228009700775, + "layer_2_update_fnorm": 0.36471495032310486, + "layer_2_max_l1_linf_norm": 0.5696210265159607, + "layer_2_max_spectral_norm": 0.07834026217460632, + "layer_3_update_fnorm": 0.3747999370098114, + "layer_3_max_l1_linf_norm": 0.5682519674301147, + "layer_3_max_spectral_norm": 0.07808811962604523, + "layer_4_update_fnorm": 0.35283076763153076, + "layer_4_max_l1_linf_norm": 0.6598700284957886, + "layer_4_max_spectral_norm": 0.08348463475704193, + "layer_5_update_fnorm": 0.30875375866889954, + "layer_5_max_l1_linf_norm": 0.44062912464141846, + "layer_5_max_spectral_norm": 0.06333416700363159, + "layer_6_update_fnorm": 0.37346673011779785, + "layer_6_max_l1_linf_norm": 0.5729587078094482, + "layer_6_max_spectral_norm": 0.07430745661258698, + "layer_7_update_fnorm": 0.4047241806983948, + "layer_7_max_l1_linf_norm": 0.5283786058425903, + "layer_7_max_spectral_norm": 0.0686311423778534, + "layer_8_update_fnorm": 0.44055983424186707, + "layer_8_max_l1_linf_norm": 0.47471776604652405, + "layer_8_max_spectral_norm": 0.0632677972316742, + "layer_9_update_fnorm": 0.4650084972381592, + "layer_9_max_l1_linf_norm": 0.48960787057876587, + "layer_9_max_spectral_norm": 0.0661308765411377, + "layer_10_update_fnorm": 0.4739609360694885, + "layer_10_max_l1_linf_norm": 0.4910178780555725, + "layer_10_max_spectral_norm": 0.06518631428480148, + "layer_11_update_fnorm": 0.4753251373767853, + "layer_11_max_l1_linf_norm": 0.511614978313446, + "layer_11_max_spectral_norm": 0.06777193397283554, + "layer_12_update_fnorm": 0.46221157908439636, + "layer_12_max_l1_linf_norm": 0.5264849662780762, + "layer_12_max_spectral_norm": 0.08734578639268875, + "block0_q_update_fnorm": 0.08078420907258987, + "block0_q_max_l1_linf_norm": 0.13576224446296692, + "block0_q_max_spectral_norm": 0.034602414816617966, + "block0_k_update_fnorm": 0.07783537358045578, + "block0_k_max_l1_linf_norm": 0.1485421359539032, + "block0_k_max_spectral_norm": 0.037305548787117004, + "block0_v_update_fnorm": 0.09495196491479874, + "block0_v_max_l1_linf_norm": 0.1306101679801941, + "block0_v_max_spectral_norm": 0.03959131985902786, + "block0_o_update_fnorm": 0.10600217431783676, + "block0_o_max_l1_linf_norm": 0.10898609459400177, + "block0_o_max_spectral_norm": 0.02833794429898262, + "block0_mlp_win_update_fnorm": 0.21314215660095215, + "block0_mlp_win_max_l1_linf_norm": 0.14666159451007843, + "block0_mlp_win_max_spectral_norm": 0.060343507677316666, + "block0_mlp_wout_update_fnorm": 0.1950451284646988, + "block0_mlp_wout_max_l1_linf_norm": 0.5233801603317261, + "block0_mlp_wout_max_spectral_norm": 0.06577228009700775, + "block3_q_update_fnorm": 0.09915437549352646, + "block3_q_max_l1_linf_norm": 0.11978904902935028, + "block3_q_max_spectral_norm": 0.035802267491817474, + "block3_k_update_fnorm": 0.09007017314434052, + "block3_k_max_l1_linf_norm": 0.1491609811782837, + "block3_k_max_spectral_norm": 0.02635914459824562, + "block3_v_update_fnorm": 0.07669064402580261, + "block3_v_max_l1_linf_norm": 0.10394594073295593, + "block3_v_max_spectral_norm": 0.03205399960279465, + "block3_o_update_fnorm": 0.08294866234064102, + "block3_o_max_l1_linf_norm": 0.12798187136650085, + "block3_o_max_spectral_norm": 0.03145650029182434, + "block3_mlp_win_update_fnorm": 0.23507313430309296, + "block3_mlp_win_max_l1_linf_norm": 0.21654878556728363, + "block3_mlp_win_max_spectral_norm": 0.06675060838460922, + "block3_mlp_wout_update_fnorm": 0.19609160721302032, + "block3_mlp_wout_max_l1_linf_norm": 0.6598700284957886, + "block3_mlp_wout_max_spectral_norm": 0.08348463475704193, + "block7_q_update_fnorm": 0.13329806923866272, + "block7_q_max_l1_linf_norm": 0.14599980413913727, + "block7_q_max_spectral_norm": 0.03455616533756256, + "block7_k_update_fnorm": 0.12507414817810059, + "block7_k_max_l1_linf_norm": 0.1612628847360611, + "block7_k_max_spectral_norm": 0.02651619352400303, + "block7_v_update_fnorm": 0.10254105925559998, + "block7_v_max_l1_linf_norm": 0.12271597981452942, + "block7_v_max_spectral_norm": 0.03374450281262398, + "block7_o_update_fnorm": 0.11668885499238968, + "block7_o_max_l1_linf_norm": 0.12499095499515533, + "block7_o_max_spectral_norm": 0.03279413655400276, + "block7_mlp_win_update_fnorm": 0.26434797048568726, + "block7_mlp_win_max_l1_linf_norm": 0.16152313351631165, + "block7_mlp_win_max_spectral_norm": 0.0632677972316742, + "block7_mlp_wout_update_fnorm": 0.2580053508281708, + "block7_mlp_wout_max_l1_linf_norm": 0.47471776604652405, + "block7_mlp_wout_max_spectral_norm": 0.04678642749786377, + "block11_q_update_fnorm": 0.1416664570569992, + "block11_q_max_l1_linf_norm": 0.18970546126365662, + "block11_q_max_spectral_norm": 0.04908042773604393, + "block11_k_update_fnorm": 0.12885569036006927, + "block11_k_max_l1_linf_norm": 0.209860160946846, + "block11_k_max_spectral_norm": 0.04583973065018654, + "block11_v_update_fnorm": 0.10538167506456375, + "block11_v_max_l1_linf_norm": 0.14806264638900757, + "block11_v_max_spectral_norm": 0.044577717781066895, + "block11_o_update_fnorm": 0.1324349045753479, + "block11_o_max_l1_linf_norm": 0.13851146399974823, + "block11_o_max_spectral_norm": 0.04506872594356537, + "block11_mlp_win_update_fnorm": 0.27841076254844666, + "block11_mlp_win_max_l1_linf_norm": 0.16917520761489868, + "block11_mlp_win_max_spectral_norm": 0.06911075860261917, + "block11_mlp_wout_update_fnorm": 0.26594820618629456, + "block11_mlp_wout_max_l1_linf_norm": 0.5264849662780762, + "block11_mlp_wout_max_spectral_norm": 0.08734578639268875, + "total_sharpness": 0.023443087935447693, + "block_total_sharpness": 0.040211744606494904, + "v_norm_block": 1.409782886505127, + "v_T_H_v_block": 0.07992034405469894, + "v_norm": 1.9232699871063232, + "ip_v_neg_g_hvp": 0.0956394225358963, + "cos_v_neg_g_hvp": 0.15492293238639832, + "g_hvp_norm": 0.3209822475910187, + "ip_v_neg_g_t": 0.09602774679660797, + "cos_v_neg_g_t": 0.18538044393062592, + "g_t_norm": 0.2693348526954651, + "g_norm": 0.3209822475910187, + "hv_norm": 0.4791932702064514, + "cos_v_hv": 0.09409019351005554, + "hg_norm": 6.639050483703613, + "cos_g_hg": 0.2966487407684326, + "v_parallel_norm": 0.022914009168744087, + "v_perp_norm": 1.9231334924697876, + "embed_lm_head_v_norm": 1.3082352876663208, + "embed_lm_head_cos_v_neg_g": 0.10355299711227417, + "layer_1_v_norm": 0.341115802526474, + "layer_1_cos_v_neg_g": 0.2615382671356201, + "layer_2_v_norm": 0.36471495032310486, + "layer_2_cos_v_neg_g": 0.16334185004234314, + "layer_3_v_norm": 0.3747999668121338, + "layer_3_cos_v_neg_g": 0.1565578579902649, + "layer_4_v_norm": 0.35283076763153076, + "layer_4_cos_v_neg_g": 0.1678524762392044, + "layer_5_v_norm": 0.30875375866889954, + "layer_5_cos_v_neg_g": 0.12664563953876495, + "layer_6_v_norm": 0.37346673011779785, + "layer_6_cos_v_neg_g": 0.14611674845218658, + "layer_7_v_norm": 0.4047241806983948, + "layer_7_cos_v_neg_g": 0.18076442182064056, + "layer_8_v_norm": 0.44055983424186707, + "layer_8_cos_v_neg_g": 0.19723652303218842, + "layer_9_v_norm": 0.4650084972381592, + "layer_9_cos_v_neg_g": 0.1925349086523056, + "layer_10_v_norm": 0.4739609360694885, + "layer_10_cos_v_neg_g": 0.22868354618549347, + "layer_11_v_norm": 0.4753251373767853, + "layer_11_cos_v_neg_g": 0.23513023555278778, + "layer_12_v_norm": 0.46221157908439636, + "layer_12_cos_v_neg_g": 0.28734850883483887, + "block0_q_v_norm": 0.08078420907258987, + "block0_q_cos_v_neg_g": 0.3355288803577423, + "block0_k_v_norm": 0.07783537358045578, + "block0_k_cos_v_neg_g": 0.3300905227661133, + "block0_v_v_norm": 0.09495196491479874, + "block0_v_cos_v_neg_g": 0.3244650363922119, + "block0_o_v_norm": 0.10600217431783676, + "block0_o_cos_v_neg_g": 0.3092131018638611, + "block0_mlp_win_v_norm": 0.21314215660095215, + "block0_mlp_win_cos_v_neg_g": 0.35048362612724304, + "block0_mlp_wout_v_norm": 0.1950451284646988, + "block0_mlp_wout_cos_v_neg_g": 0.3406444489955902, + "block3_q_v_norm": 0.09915437549352646, + "block3_q_cos_v_neg_g": 0.1867102086544037, + "block3_k_v_norm": 0.09007017314434052, + "block3_k_cos_v_neg_g": 0.1814897060394287, + "block3_v_v_norm": 0.07669064402580261, + "block3_v_cos_v_neg_g": 0.14504127204418182, + "block3_o_v_norm": 0.08294866234064102, + "block3_o_cos_v_neg_g": 0.2927109897136688, + "block3_mlp_win_v_norm": 0.23507313430309296, + "block3_mlp_win_cos_v_neg_g": 0.16675221920013428, + "block3_mlp_wout_v_norm": 0.19609160721302032, + "block3_mlp_wout_cos_v_neg_g": 0.33838531374931335, + "block7_q_v_norm": 0.13329806923866272, + "block7_q_cos_v_neg_g": 0.1957254856824875, + "block7_k_v_norm": 0.12507414817810059, + "block7_k_cos_v_neg_g": 0.29257434606552124, + "block7_v_v_norm": 0.10254105925559998, + "block7_v_cos_v_neg_g": 0.19009940326213837, + "block7_o_v_norm": 0.11668885499238968, + "block7_o_cos_v_neg_g": 0.31455564498901367, + "block7_mlp_win_v_norm": 0.26434797048568726, + "block7_mlp_win_cos_v_neg_g": 0.23700276017189026, + "block7_mlp_wout_v_norm": 0.2580053508281708, + "block7_mlp_wout_cos_v_neg_g": 0.29367491602897644, + "block11_q_v_norm": 0.1416664570569992, + "block11_q_cos_v_neg_g": 0.28026464581489563, + "block11_k_v_norm": 0.12885569036006927, + "block11_k_cos_v_neg_g": 0.33630597591400146, + "block11_v_v_norm": 0.10538167506456375, + "block11_v_cos_v_neg_g": 0.29635700583457947, + "block11_o_v_norm": 0.1324349045753479, + "block11_o_cos_v_neg_g": 0.3204993009567261, + "block11_mlp_win_v_norm": 0.27841076254844666, + "block11_mlp_win_cos_v_neg_g": 0.29548659920692444, + "block11_mlp_wout_v_norm": 0.26594820618629456, + "block11_mlp_wout_cos_v_neg_g": 0.30090558528900146, + "embed_lm_head_sharpness": 0.0005628727376461029, + "layer_1_sharpness": 0.03933535888791084, + "layer_2_sharpness": 0.0030330922454595566, + "layer_3_sharpness": 0.0037161409854888916, + "layer_4_sharpness": 0.006585008930414915, + "layer_5_sharpness": 0.008032118901610374, + "layer_6_sharpness": 0.0043462710455060005, + "layer_7_sharpness": 0.005666099023073912, + "layer_8_sharpness": 0.006572321057319641, + "layer_9_sharpness": 0.005057093687355518, + "layer_10_sharpness": 0.0033175302669405937, + "layer_11_sharpness": 0.0033446350134909153, + "layer_12_sharpness": 0.007571721915155649, + "block0_q_sharpness": 0.006223268341273069, + "block0_k_sharpness": 0.0055359043180942535, + "block0_v_sharpness": 0.02662118710577488, + "block0_o_sharpness": 0.018717406317591667, + "block0_mlp_win_sharpness": 0.009891982190310955, + "block0_mlp_wout_sharpness": 0.01688617840409279, + "block3_q_sharpness": 0.004808380734175444, + "block3_k_sharpness": 0.0019163336837664247, + "block3_v_sharpness": 0.006540609989315271, + "block3_o_sharpness": 0.0035583896096795797, + "block3_mlp_win_sharpness": 0.0004236121312715113, + "block3_mlp_wout_sharpness": 0.002299015875905752, + "block7_q_sharpness": 0.000544414680916816, + "block7_k_sharpness": 0.0032649694476276636, + "block7_v_sharpness": 0.01225918997079134, + "block7_o_sharpness": 0.003209129674360156, + "block7_mlp_win_sharpness": 0.0017769852420315146, + "block7_mlp_wout_sharpness": 0.00127993558999151, + "block11_q_sharpness": 0.0005728487158194184, + "block11_k_sharpness": 0.002810113597661257, + "block11_v_sharpness": 0.00579913379624486, + "block11_o_sharpness": 0.000850385578814894, + "block11_mlp_win_sharpness": 0.0022476932499557734, + "block11_mlp_wout_sharpness": 0.0034968331456184387, + "sum_layer_numerators": 0.014110004379885955, + "block_diag_sharpness": 0.007099416897850168, + "cross_layer_sharpness": 0.03311232770864474 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..9cd475e4b9f03b26e0e4a47a81034221cd8d785a --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.0014257431030273, + "total_l1_linf_norm": 17441.609375, + "total_spectral_norm": 2.0014259815216064, + "embed_lm_head_update_fnorm": 1.3095966577529907, + "embed_lm_head_max_l1_linf_norm": 0.35311317443847656, + "embed_lm_head_max_spectral_norm": 0.25627049803733826, + "layer_1_update_fnorm": 0.36611121892929077, + "layer_1_max_l1_linf_norm": 0.4707452654838562, + "layer_1_max_spectral_norm": 0.06141002103686333, + "layer_2_update_fnorm": 0.41461390256881714, + "layer_2_max_l1_linf_norm": 0.5065367817878723, + "layer_2_max_spectral_norm": 0.09188212454319, + "layer_3_update_fnorm": 0.4175533950328827, + "layer_3_max_l1_linf_norm": 0.5497859716415405, + "layer_3_max_spectral_norm": 0.0749676302075386, + "layer_4_update_fnorm": 0.40861430764198303, + "layer_4_max_l1_linf_norm": 0.5200268030166626, + "layer_4_max_spectral_norm": 0.07006392627954483, + "layer_5_update_fnorm": 0.36867082118988037, + "layer_5_max_l1_linf_norm": 0.3899345397949219, + "layer_5_max_spectral_norm": 0.06110319867730141, + "layer_6_update_fnorm": 0.42717471718788147, + "layer_6_max_l1_linf_norm": 0.4682227671146393, + "layer_6_max_spectral_norm": 0.06253049522638321, + "layer_7_update_fnorm": 0.4439132809638977, + "layer_7_max_l1_linf_norm": 0.46694451570510864, + "layer_7_max_spectral_norm": 0.059534765779972076, + "layer_8_update_fnorm": 0.4585456848144531, + "layer_8_max_l1_linf_norm": 0.4843520522117615, + "layer_8_max_spectral_norm": 0.05543571338057518, + "layer_9_update_fnorm": 0.4708572030067444, + "layer_9_max_l1_linf_norm": 0.5103946924209595, + "layer_9_max_spectral_norm": 0.053001925349235535, + "layer_10_update_fnorm": 0.485883891582489, + "layer_10_max_l1_linf_norm": 0.508702278137207, + "layer_10_max_spectral_norm": 0.05608217790722847, + "layer_11_update_fnorm": 0.48697429895401, + "layer_11_max_l1_linf_norm": 0.5281105041503906, + "layer_11_max_spectral_norm": 0.058526672422885895, + "layer_12_update_fnorm": 0.471964567899704, + "layer_12_max_l1_linf_norm": 0.494601845741272, + "layer_12_max_spectral_norm": 0.07808341830968857, + "block0_q_update_fnorm": 0.1066834032535553, + "block0_q_max_l1_linf_norm": 0.15187236666679382, + "block0_q_max_spectral_norm": 0.040834322571754456, + "block0_k_update_fnorm": 0.10259869694709778, + "block0_k_max_l1_linf_norm": 0.1739589124917984, + "block0_k_max_spectral_norm": 0.04470943659543991, + "block0_v_update_fnorm": 0.09680559486150742, + "block0_v_max_l1_linf_norm": 0.12558946013450623, + "block0_v_max_spectral_norm": 0.03484946861863136, + "block0_o_update_fnorm": 0.09566830843687057, + "block0_o_max_l1_linf_norm": 0.10684390366077423, + "block0_o_max_spectral_norm": 0.02551419474184513, + "block0_mlp_win_update_fnorm": 0.22369220852851868, + "block0_mlp_win_max_l1_linf_norm": 0.13889195024967194, + "block0_mlp_win_max_spectral_norm": 0.04979977011680603, + "block0_mlp_wout_update_fnorm": 0.20861278474330902, + "block0_mlp_wout_max_l1_linf_norm": 0.4707452654838562, + "block0_mlp_wout_max_spectral_norm": 0.06141002103686333, + "block3_q_update_fnorm": 0.12055464088916779, + "block3_q_max_l1_linf_norm": 0.14314612746238708, + "block3_q_max_spectral_norm": 0.038682255893945694, + "block3_k_update_fnorm": 0.11287803202867508, + "block3_k_max_l1_linf_norm": 0.20165038108825684, + "block3_k_max_spectral_norm": 0.03462306782603264, + "block3_v_update_fnorm": 0.08906113356351852, + "block3_v_max_l1_linf_norm": 0.11360028386116028, + "block3_v_max_spectral_norm": 0.033270347863435745, + "block3_o_update_fnorm": 0.10403335094451904, + "block3_o_max_l1_linf_norm": 0.11999273300170898, + "block3_o_max_spectral_norm": 0.03374272584915161, + "block3_mlp_win_update_fnorm": 0.26961779594421387, + "block3_mlp_win_max_l1_linf_norm": 0.21733728051185608, + "block3_mlp_win_max_spectral_norm": 0.06939829140901566, + "block3_mlp_wout_update_fnorm": 0.21944521367549896, + "block3_mlp_wout_max_l1_linf_norm": 0.5200268030166626, + "block3_mlp_wout_max_spectral_norm": 0.07006392627954483, + "block7_q_update_fnorm": 0.14744438230991364, + "block7_q_max_l1_linf_norm": 0.1567586362361908, + "block7_q_max_spectral_norm": 0.028945794329047203, + "block7_k_update_fnorm": 0.13500602543354034, + "block7_k_max_l1_linf_norm": 0.15603527426719666, + "block7_k_max_spectral_norm": 0.027092818170785904, + "block7_v_update_fnorm": 0.10752572119235992, + "block7_v_max_l1_linf_norm": 0.11977812647819519, + "block7_v_max_spectral_norm": 0.02816586196422577, + "block7_o_update_fnorm": 0.12399515509605408, + "block7_o_max_l1_linf_norm": 0.12133394181728363, + "block7_o_max_spectral_norm": 0.028106270357966423, + "block7_mlp_win_update_fnorm": 0.2733132839202881, + "block7_mlp_win_max_l1_linf_norm": 0.16421252489089966, + "block7_mlp_win_max_spectral_norm": 0.05543571338057518, + "block7_mlp_wout_update_fnorm": 0.2618655264377594, + "block7_mlp_wout_max_l1_linf_norm": 0.4843520522117615, + "block7_mlp_wout_max_spectral_norm": 0.04682524874806404, + "block11_q_update_fnorm": 0.14735037088394165, + "block11_q_max_l1_linf_norm": 0.1698063760995865, + "block11_q_max_spectral_norm": 0.03573795408010483, + "block11_k_update_fnorm": 0.13430756330490112, + "block11_k_max_l1_linf_norm": 0.1725679337978363, + "block11_k_max_spectral_norm": 0.026331311091780663, + "block11_v_update_fnorm": 0.10968075692653656, + "block11_v_max_l1_linf_norm": 0.1393241137266159, + "block11_v_max_spectral_norm": 0.03424732759594917, + "block11_o_update_fnorm": 0.1374727487564087, + "block11_o_max_l1_linf_norm": 0.14806747436523438, + "block11_o_max_spectral_norm": 0.03443266451358795, + "block11_mlp_win_update_fnorm": 0.28604820370674133, + "block11_mlp_win_max_l1_linf_norm": 0.16261513531208038, + "block11_mlp_win_max_spectral_norm": 0.05807986855506897, + "block11_mlp_wout_update_fnorm": 0.2648967206478119, + "block11_mlp_wout_max_l1_linf_norm": 0.494601845741272, + "block11_mlp_wout_max_spectral_norm": 0.07808341830968857, + "total_sharpness": 0.014692222699522972, + "block_total_sharpness": 0.023730788379907608, + "v_norm_block": 1.5134934186935425, + "v_T_H_v_block": 0.05435922369360924, + "v_norm": 2.0014257431030273, + "ip_v_neg_g_hvp": 0.07012423872947693, + "cos_v_neg_g_hvp": 0.11681490391492844, + "g_hvp_norm": 0.29993727803230286, + "ip_v_neg_g_t": 0.07036402821540833, + "cos_v_neg_g_t": 0.14338548481464386, + "g_t_norm": 0.24519184231758118, + "g_norm": 0.29993727803230286, + "hv_norm": 0.5593423843383789, + "cos_v_hv": 0.0525713674724102, + "hg_norm": 3.4769227504730225, + "cos_g_hg": 0.5010712146759033, + "v_parallel_norm": 0.016341177746653557, + "v_perp_norm": 2.001359224319458, + "embed_lm_head_v_norm": 1.3095966577529907, + "embed_lm_head_cos_v_neg_g": 0.11024348437786102, + "layer_1_v_norm": 0.36611121892929077, + "layer_1_cos_v_neg_g": 0.2110137790441513, + "layer_2_v_norm": 0.41461390256881714, + "layer_2_cos_v_neg_g": 0.10468044877052307, + "layer_3_v_norm": 0.4175533652305603, + "layer_3_cos_v_neg_g": 0.10150793194770813, + "layer_4_v_norm": 0.40861430764198303, + "layer_4_cos_v_neg_g": 0.09772288799285889, + "layer_5_v_norm": 0.36867082118988037, + "layer_5_cos_v_neg_g": 0.08350116014480591, + "layer_6_v_norm": 0.42717471718788147, + "layer_6_cos_v_neg_g": 0.10130023956298828, + "layer_7_v_norm": 0.4439132809638977, + "layer_7_cos_v_neg_g": 0.12281867861747742, + "layer_8_v_norm": 0.4585457146167755, + "layer_8_cos_v_neg_g": 0.13013571500778198, + "layer_9_v_norm": 0.4708572030067444, + "layer_9_cos_v_neg_g": 0.13196314871311188, + "layer_10_v_norm": 0.485883891582489, + "layer_10_cos_v_neg_g": 0.15841902792453766, + "layer_11_v_norm": 0.48697429895401, + "layer_11_cos_v_neg_g": 0.16752012073993683, + "layer_12_v_norm": 0.471964567899704, + "layer_12_cos_v_neg_g": 0.20191840827465057, + "block0_q_v_norm": 0.1066834032535553, + "block0_q_cos_v_neg_g": 0.3316997289657593, + "block0_k_v_norm": 0.10259869694709778, + "block0_k_cos_v_neg_g": 0.33033236861228943, + "block0_v_v_norm": 0.09680559486150742, + "block0_v_cos_v_neg_g": 0.2629797160625458, + "block0_o_v_norm": 0.09566830843687057, + "block0_o_cos_v_neg_g": 0.24854440987110138, + "block0_mlp_win_v_norm": 0.22369220852851868, + "block0_mlp_win_cos_v_neg_g": 0.25730466842651367, + "block0_mlp_wout_v_norm": 0.20861278474330902, + "block0_mlp_wout_cos_v_neg_g": 0.2483360469341278, + "block3_q_v_norm": 0.12055464088916779, + "block3_q_cos_v_neg_g": 0.0861038789153099, + "block3_k_v_norm": 0.11287803202867508, + "block3_k_cos_v_neg_g": 0.09069368243217468, + "block3_v_v_norm": 0.08906113356351852, + "block3_v_cos_v_neg_g": 0.09136512875556946, + "block3_o_v_norm": 0.10403335094451904, + "block3_o_cos_v_neg_g": 0.19883368909358978, + "block3_mlp_win_v_norm": 0.26961779594421387, + "block3_mlp_win_cos_v_neg_g": 0.11040006577968597, + "block3_mlp_wout_v_norm": 0.21944521367549896, + "block3_mlp_wout_cos_v_neg_g": 0.23790672421455383, + "block7_q_v_norm": 0.14744438230991364, + "block7_q_cos_v_neg_g": 0.13323557376861572, + "block7_k_v_norm": 0.13500602543354034, + "block7_k_cos_v_neg_g": 0.20265746116638184, + "block7_v_v_norm": 0.10752572119235992, + "block7_v_cos_v_neg_g": 0.10210362076759338, + "block7_o_v_norm": 0.12399515509605408, + "block7_o_cos_v_neg_g": 0.22918854653835297, + "block7_mlp_win_v_norm": 0.2733132839202881, + "block7_mlp_win_cos_v_neg_g": 0.1686643660068512, + "block7_mlp_wout_v_norm": 0.2618655264377594, + "block7_mlp_wout_cos_v_neg_g": 0.2290925681591034, + "block11_q_v_norm": 0.14735037088394165, + "block11_q_cos_v_neg_g": 0.18187135457992554, + "block11_k_v_norm": 0.13430756330490112, + "block11_k_cos_v_neg_g": 0.21250757575035095, + "block11_v_v_norm": 0.10968075692653656, + "block11_v_cos_v_neg_g": 0.1776837706565857, + "block11_o_v_norm": 0.1374727487564087, + "block11_o_cos_v_neg_g": 0.23987255990505219, + "block11_mlp_win_v_norm": 0.28604820370674133, + "block11_mlp_win_cos_v_neg_g": 0.2119571417570114, + "block11_mlp_wout_v_norm": 0.2648967206478119, + "block11_mlp_wout_cos_v_neg_g": 0.21609266102313995, + "embed_lm_head_sharpness": 0.00044532757601700723, + "layer_1_sharpness": 0.0383736677467823, + "layer_2_sharpness": 0.006049062125384808, + "layer_3_sharpness": 0.009005218744277954, + "layer_4_sharpness": 0.0026515054050832987, + "layer_5_sharpness": 0.003226696513593197, + "layer_6_sharpness": 0.0018508280627429485, + "layer_7_sharpness": 0.0026416887994855642, + "layer_8_sharpness": 0.0028613407630473375, + "layer_9_sharpness": 0.0022196494974195957, + "layer_10_sharpness": 0.0016739738639444113, + "layer_11_sharpness": 0.0016665910370647907, + "layer_12_sharpness": 0.0027520477306097746, + "block0_q_sharpness": 0.011638952419161797, + "block0_k_sharpness": 0.016154933720827103, + "block0_v_sharpness": 0.017391683533787727, + "block0_o_sharpness": 0.01019052229821682, + "block0_mlp_win_sharpness": 0.0052457391284406185, + "block0_mlp_wout_sharpness": 0.011275633238255978, + "block3_q_sharpness": 0.0010396265424787998, + "block3_k_sharpness": 0.0021199360489845276, + "block3_v_sharpness": 0.004336846061050892, + "block3_o_sharpness": 0.0018156872829422355, + "block3_mlp_win_sharpness": 0.0002910644398070872, + "block3_mlp_wout_sharpness": 0.0006492843385785818, + "block7_q_sharpness": 0.00023961086117196828, + "block7_k_sharpness": 0.0010562072275206447, + "block7_v_sharpness": 0.006610176991671324, + "block7_o_sharpness": 0.0014142993604764342, + "block7_mlp_win_sharpness": 0.000931801158003509, + "block7_mlp_wout_sharpness": 0.0006924528279341757, + "block11_q_sharpness": 0.0002940725244116038, + "block11_k_sharpness": 0.0006767542799934745, + "block11_v_sharpness": 0.0019742909353226423, + "block11_o_sharpness": 0.00046747183660045266, + "block11_mlp_win_sharpness": 0.0009836619719862938, + "block11_mlp_wout_sharpness": 0.0019145499682053924, + "sum_layer_numerators": 0.011990205755592332, + "block_diag_sharpness": 0.0052343837879489175, + "cross_layer_sharpness": 0.01849640459195869 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..7a86a6df522009bf5099a6fe6223367cb36a8e2b --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_2500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1100969314575195, + "total_l1_linf_norm": 18580.080078125, + "total_spectral_norm": 2.1100966930389404, + "embed_lm_head_update_fnorm": 1.3289134502410889, + "embed_lm_head_max_l1_linf_norm": 0.3729897141456604, + "embed_lm_head_max_spectral_norm": 0.2357056736946106, + "layer_1_update_fnorm": 0.44348347187042236, + "layer_1_max_l1_linf_norm": 0.5675358772277832, + "layer_1_max_spectral_norm": 0.08980712294578552, + "layer_2_update_fnorm": 0.4548477828502655, + "layer_2_max_l1_linf_norm": 0.5494040250778198, + "layer_2_max_spectral_norm": 0.08474904298782349, + "layer_3_update_fnorm": 0.4578539729118347, + "layer_3_max_l1_linf_norm": 0.6282535791397095, + "layer_3_max_spectral_norm": 0.08374297618865967, + "layer_4_update_fnorm": 0.4520277976989746, + "layer_4_max_l1_linf_norm": 0.5748071670532227, + "layer_4_max_spectral_norm": 0.0862821564078331, + "layer_5_update_fnorm": 0.40821388363838196, + "layer_5_max_l1_linf_norm": 0.4250273108482361, + "layer_5_max_spectral_norm": 0.06556092202663422, + "layer_6_update_fnorm": 0.46244752407073975, + "layer_6_max_l1_linf_norm": 0.5439902544021606, + "layer_6_max_spectral_norm": 0.07716754078865051, + "layer_7_update_fnorm": 0.47383713722229004, + "layer_7_max_l1_linf_norm": 0.5081132650375366, + "layer_7_max_spectral_norm": 0.06765812635421753, + "layer_8_update_fnorm": 0.4843768775463104, + "layer_8_max_l1_linf_norm": 0.5103834867477417, + "layer_8_max_spectral_norm": 0.05821816995739937, + "layer_9_update_fnorm": 0.4965367913246155, + "layer_9_max_l1_linf_norm": 0.5132300853729248, + "layer_9_max_spectral_norm": 0.05568889528512955, + "layer_10_update_fnorm": 0.5126816034317017, + "layer_10_max_l1_linf_norm": 0.5342195630073547, + "layer_10_max_spectral_norm": 0.0550139844417572, + "layer_11_update_fnorm": 0.5157620906829834, + "layer_11_max_l1_linf_norm": 0.5434901714324951, + "layer_11_max_spectral_norm": 0.058040689677000046, + "layer_12_update_fnorm": 0.5039180517196655, + "layer_12_max_l1_linf_norm": 0.5217772126197815, + "layer_12_max_spectral_norm": 0.0901368111371994, + "block0_q_update_fnorm": 0.13604006171226501, + "block0_q_max_l1_linf_norm": 0.3462058901786804, + "block0_q_max_spectral_norm": 0.08116251975297928, + "block0_k_update_fnorm": 0.13069339096546173, + "block0_k_max_l1_linf_norm": 0.308743953704834, + "block0_k_max_spectral_norm": 0.08980712294578552, + "block0_v_update_fnorm": 0.1290629357099533, + "block0_v_max_l1_linf_norm": 0.17947563529014587, + "block0_v_max_spectral_norm": 0.057920191437006, + "block0_o_update_fnorm": 0.13747388124465942, + "block0_o_max_l1_linf_norm": 0.14506296813488007, + "block0_o_max_spectral_norm": 0.03315187245607376, + "block0_mlp_win_update_fnorm": 0.2608160078525543, + "block0_mlp_win_max_l1_linf_norm": 0.15708115696907043, + "block0_mlp_win_max_spectral_norm": 0.062097251415252686, + "block0_mlp_wout_update_fnorm": 0.23964323103427887, + "block0_mlp_wout_max_l1_linf_norm": 0.5675358772277832, + "block0_mlp_wout_max_spectral_norm": 0.0760171189904213, + "block3_q_update_fnorm": 0.13226163387298584, + "block3_q_max_l1_linf_norm": 0.1463460624217987, + "block3_q_max_spectral_norm": 0.04211627319455147, + "block3_k_update_fnorm": 0.12822532653808594, + "block3_k_max_l1_linf_norm": 0.2372729331254959, + "block3_k_max_spectral_norm": 0.036463089287281036, + "block3_v_update_fnorm": 0.10789583623409271, + "block3_v_max_l1_linf_norm": 0.13500183820724487, + "block3_v_max_spectral_norm": 0.03587619587779045, + "block3_o_update_fnorm": 0.11785177886486053, + "block3_o_max_l1_linf_norm": 0.1290331780910492, + "block3_o_max_spectral_norm": 0.0390356220304966, + "block3_mlp_win_update_fnorm": 0.2896668314933777, + "block3_mlp_win_max_l1_linf_norm": 0.19800592958927155, + "block3_mlp_win_max_spectral_norm": 0.06986693292856216, + "block3_mlp_wout_update_fnorm": 0.2466687113046646, + "block3_mlp_wout_max_l1_linf_norm": 0.5748071670532227, + "block3_mlp_wout_max_spectral_norm": 0.0862821564078331, + "block7_q_update_fnorm": 0.15300028026103973, + "block7_q_max_l1_linf_norm": 0.16025206446647644, + "block7_q_max_spectral_norm": 0.03216816484928131, + "block7_k_update_fnorm": 0.14302082359790802, + "block7_k_max_l1_linf_norm": 0.1550372987985611, + "block7_k_max_spectral_norm": 0.02641293592751026, + "block7_v_update_fnorm": 0.12001699954271317, + "block7_v_max_l1_linf_norm": 0.13581982254981995, + "block7_v_max_spectral_norm": 0.033353231847286224, + "block7_o_update_fnorm": 0.136564239859581, + "block7_o_max_l1_linf_norm": 0.1370096504688263, + "block7_o_max_spectral_norm": 0.03109130822122097, + "block7_mlp_win_update_fnorm": 0.2883926331996918, + "block7_mlp_win_max_l1_linf_norm": 0.1608424186706543, + "block7_mlp_win_max_spectral_norm": 0.05821816995739937, + "block7_mlp_wout_update_fnorm": 0.2728092670440674, + "block7_mlp_wout_max_l1_linf_norm": 0.5103834867477417, + "block7_mlp_wout_max_spectral_norm": 0.055926982313394547, + "block11_q_update_fnorm": 0.15479451417922974, + "block11_q_max_l1_linf_norm": 0.1768139898777008, + "block11_q_max_spectral_norm": 0.03383889049291611, + "block11_k_update_fnorm": 0.14515459537506104, + "block11_k_max_l1_linf_norm": 0.1658593714237213, + "block11_k_max_spectral_norm": 0.02668498456478119, + "block11_v_update_fnorm": 0.12627825140953064, + "block11_v_max_l1_linf_norm": 0.15322233736515045, + "block11_v_max_spectral_norm": 0.04594806581735611, + "block11_o_update_fnorm": 0.14865431189537048, + "block11_o_max_l1_linf_norm": 0.15954071283340454, + "block11_o_max_spectral_norm": 0.041459016501903534, + "block11_mlp_win_update_fnorm": 0.30594155192375183, + "block11_mlp_win_max_l1_linf_norm": 0.17853613197803497, + "block11_mlp_win_max_spectral_norm": 0.06316843628883362, + "block11_mlp_wout_update_fnorm": 0.2777605354785919, + "block11_mlp_wout_max_l1_linf_norm": 0.5217772126197815, + "block11_mlp_wout_max_spectral_norm": 0.0901368111371994, + "total_sharpness": 0.016846925020217896, + "block_total_sharpness": 0.025893516838550568, + "v_norm_block": 1.6390538215637207, + "v_T_H_v_block": 0.0695628672838211, + "v_norm": 2.1100969314575195, + "ip_v_neg_g_hvp": 0.0851113572716713, + "cos_v_neg_g_hvp": 0.10439679771661758, + "g_hvp_norm": 0.38636514544487, + "ip_v_neg_g_t": 0.08541024476289749, + "cos_v_neg_g_t": 0.1169130727648735, + "g_t_norm": 0.3462138772010803, + "g_norm": 0.38636514544487, + "hv_norm": 0.6011273860931396, + "cos_v_hv": 0.05913662910461426, + "hg_norm": 3.3264803886413574, + "cos_g_hg": 0.7244349718093872, + "v_parallel_norm": 0.013792537152767181, + "v_perp_norm": 2.1100518703460693, + "embed_lm_head_v_norm": 1.3289134502410889, + "embed_lm_head_cos_v_neg_g": 0.08330406993627548, + "layer_1_v_norm": 0.44348347187042236, + "layer_1_cos_v_neg_g": 0.21270513534545898, + "layer_2_v_norm": 0.4548477828502655, + "layer_2_cos_v_neg_g": 0.10528423637151718, + "layer_3_v_norm": 0.4578539729118347, + "layer_3_cos_v_neg_g": 0.08838809281587601, + "layer_4_v_norm": 0.4520277976989746, + "layer_4_cos_v_neg_g": 0.09783529490232468, + "layer_5_v_norm": 0.40821388363838196, + "layer_5_cos_v_neg_g": 0.0785817876458168, + "layer_6_v_norm": 0.46244749426841736, + "layer_6_cos_v_neg_g": 0.09232354164123535, + "layer_7_v_norm": 0.47383713722229004, + "layer_7_cos_v_neg_g": 0.11742084473371506, + "layer_8_v_norm": 0.4843768775463104, + "layer_8_cos_v_neg_g": 0.11072112619876862, + "layer_9_v_norm": 0.4965367913246155, + "layer_9_cos_v_neg_g": 0.10439682751893997, + "layer_10_v_norm": 0.5126816034317017, + "layer_10_cos_v_neg_g": 0.11753036826848984, + "layer_11_v_norm": 0.5157620906829834, + "layer_11_cos_v_neg_g": 0.1353950947523117, + "layer_12_v_norm": 0.5039180517196655, + "layer_12_cos_v_neg_g": 0.18559163808822632, + "block0_q_v_norm": 0.13604006171226501, + "block0_q_cos_v_neg_g": 0.3214094042778015, + "block0_k_v_norm": 0.13069339096546173, + "block0_k_cos_v_neg_g": 0.3041665554046631, + "block0_v_v_norm": 0.1290629357099533, + "block0_v_cos_v_neg_g": 0.26327741146087646, + "block0_o_v_norm": 0.13747388124465942, + "block0_o_cos_v_neg_g": 0.2446107417345047, + "block0_mlp_win_v_norm": 0.2608160078525543, + "block0_mlp_win_cos_v_neg_g": 0.27315154671669006, + "block0_mlp_wout_v_norm": 0.23964323103427887, + "block0_mlp_wout_cos_v_neg_g": 0.2754480242729187, + "block3_q_v_norm": 0.13226163387298584, + "block3_q_cos_v_neg_g": 0.09597449749708176, + "block3_k_v_norm": 0.12822532653808594, + "block3_k_cos_v_neg_g": 0.11967069655656815, + "block3_v_v_norm": 0.10789583623409271, + "block3_v_cos_v_neg_g": 0.08146022260189056, + "block3_o_v_norm": 0.11785177886486053, + "block3_o_cos_v_neg_g": 0.20643845200538635, + "block3_mlp_win_v_norm": 0.2896668314933777, + "block3_mlp_win_cos_v_neg_g": 0.10453082621097565, + "block3_mlp_wout_v_norm": 0.2466687113046646, + "block3_mlp_wout_cos_v_neg_g": 0.2809959053993225, + "block7_q_v_norm": 0.15300028026103973, + "block7_q_cos_v_neg_g": 0.13210588693618774, + "block7_k_v_norm": 0.14302082359790802, + "block7_k_cos_v_neg_g": 0.23540134727954865, + "block7_v_v_norm": 0.12001699954271317, + "block7_v_cos_v_neg_g": 0.10113997757434845, + "block7_o_v_norm": 0.136564239859581, + "block7_o_cos_v_neg_g": 0.25998473167419434, + "block7_mlp_win_v_norm": 0.2883926331996918, + "block7_mlp_win_cos_v_neg_g": 0.15266475081443787, + "block7_mlp_wout_v_norm": 0.2728092670440674, + "block7_mlp_wout_cos_v_neg_g": 0.2564615309238434, + "block11_q_v_norm": 0.15479451417922974, + "block11_q_cos_v_neg_g": 0.18407244980335236, + "block11_k_v_norm": 0.14515459537506104, + "block11_k_cos_v_neg_g": 0.23375289142131805, + "block11_v_v_norm": 0.12627825140953064, + "block11_v_cos_v_neg_g": 0.1688770353794098, + "block11_o_v_norm": 0.14865431189537048, + "block11_o_cos_v_neg_g": 0.2625209093093872, + "block11_mlp_win_v_norm": 0.30594155192375183, + "block11_mlp_win_cos_v_neg_g": 0.19595374166965485, + "block11_mlp_wout_v_norm": 0.2777605354785919, + "block11_mlp_wout_cos_v_neg_g": 0.20237988233566284, + "embed_lm_head_sharpness": 0.0004365421482361853, + "layer_1_sharpness": 0.0255899615585804, + "layer_2_sharpness": 0.001177299185656011, + "layer_3_sharpness": 0.003120346460491419, + "layer_4_sharpness": 0.002742262091487646, + "layer_5_sharpness": 0.0034540982451289892, + "layer_6_sharpness": 0.002611203119158745, + "layer_7_sharpness": 0.003393758786842227, + "layer_8_sharpness": 0.0042994022369384766, + "layer_9_sharpness": 0.003961472772061825, + "layer_10_sharpness": 0.0023284335620701313, + "layer_11_sharpness": 0.0023233918473124504, + "layer_12_sharpness": 0.00646533677354455, + "block0_q_sharpness": 0.005741771310567856, + "block0_k_sharpness": 0.004332084208726883, + "block0_v_sharpness": 0.012268200516700745, + "block0_o_sharpness": 0.005408558528870344, + "block0_mlp_win_sharpness": 0.004511740989983082, + "block0_mlp_wout_sharpness": 0.0111961979418993, + "block3_q_sharpness": 0.0016642393311485648, + "block3_k_sharpness": 0.0009902447927743196, + "block3_v_sharpness": 0.003969849087297916, + "block3_o_sharpness": 0.0015890878858044744, + "block3_mlp_win_sharpness": 0.000254489277722314, + "block3_mlp_wout_sharpness": 0.0008310999255627394, + "block7_q_sharpness": 0.00027667387621477246, + "block7_k_sharpness": 0.0010949332499876618, + "block7_v_sharpness": 0.008122500963509083, + "block7_o_sharpness": 0.0014273235574364662, + "block7_mlp_win_sharpness": 0.0014017719076946378, + "block7_mlp_wout_sharpness": 0.0008800532086752355, + "block11_q_sharpness": 0.0002195721463067457, + "block11_k_sharpness": 0.0007344020996242762, + "block11_v_sharpness": 0.003495912067592144, + "block11_o_sharpness": 0.0004689720517490059, + "block11_mlp_win_sharpness": 0.0017206653719767928, + "block11_mlp_wout_sharpness": 0.005109189543873072, + "sum_layer_numerators": 0.013244215451004666, + "block_diag_sharpness": 0.00492991927079203, + "cross_layer_sharpness": 0.02096359756775854 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..b52eceb38b09dda411a4083f1b941d266e95431a --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1571953296661377, + "total_l1_linf_norm": 19071.8671875, + "total_spectral_norm": 2.157195806503296, + "embed_lm_head_update_fnorm": 1.330280065536499, + "embed_lm_head_max_l1_linf_norm": 0.37500378489494324, + "embed_lm_head_max_spectral_norm": 0.22101831436157227, + "layer_1_update_fnorm": 0.4674645960330963, + "layer_1_max_l1_linf_norm": 0.5856213569641113, + "layer_1_max_spectral_norm": 0.08369995653629303, + "layer_2_update_fnorm": 0.48206639289855957, + "layer_2_max_l1_linf_norm": 0.6021900773048401, + "layer_2_max_spectral_norm": 0.08444845676422119, + "layer_3_update_fnorm": 0.48067864775657654, + "layer_3_max_l1_linf_norm": 0.5963025689125061, + "layer_3_max_spectral_norm": 0.091731958091259, + "layer_4_update_fnorm": 0.47369107604026794, + "layer_4_max_l1_linf_norm": 0.5958322286605835, + "layer_4_max_spectral_norm": 0.09690827876329422, + "layer_5_update_fnorm": 0.43800726532936096, + "layer_5_max_l1_linf_norm": 0.4474153518676758, + "layer_5_max_spectral_norm": 0.07015056908130646, + "layer_6_update_fnorm": 0.4776001274585724, + "layer_6_max_l1_linf_norm": 0.5950088500976562, + "layer_6_max_spectral_norm": 0.08297109603881836, + "layer_7_update_fnorm": 0.4879586398601532, + "layer_7_max_l1_linf_norm": 0.5797938108444214, + "layer_7_max_spectral_norm": 0.07210254669189453, + "layer_8_update_fnorm": 0.4982866644859314, + "layer_8_max_l1_linf_norm": 0.5546844005584717, + "layer_8_max_spectral_norm": 0.06288114190101624, + "layer_9_update_fnorm": 0.5074686408042908, + "layer_9_max_l1_linf_norm": 0.5274676084518433, + "layer_9_max_spectral_norm": 0.05240005627274513, + "layer_10_update_fnorm": 0.5216464996337891, + "layer_10_max_l1_linf_norm": 0.5388344526290894, + "layer_10_max_spectral_norm": 0.051057953387498856, + "layer_11_update_fnorm": 0.5229418873786926, + "layer_11_max_l1_linf_norm": 0.5484491586685181, + "layer_11_max_spectral_norm": 0.05370308831334114, + "layer_12_update_fnorm": 0.5177342295646667, + "layer_12_max_l1_linf_norm": 0.5597347021102905, + "layer_12_max_spectral_norm": 0.08018643409013748, + "block0_q_update_fnorm": 0.13992349803447723, + "block0_q_max_l1_linf_norm": 0.27575457096099854, + "block0_q_max_spectral_norm": 0.07128419727087021, + "block0_k_update_fnorm": 0.13652297854423523, + "block0_k_max_l1_linf_norm": 0.30440711975097656, + "block0_k_max_spectral_norm": 0.08369995653629303, + "block0_v_update_fnorm": 0.13975252211093903, + "block0_v_max_l1_linf_norm": 0.22304095327854156, + "block0_v_max_spectral_norm": 0.06525560468435287, + "block0_o_update_fnorm": 0.14134052395820618, + "block0_o_max_l1_linf_norm": 0.14533057808876038, + "block0_o_max_spectral_norm": 0.04540518671274185, + "block0_mlp_win_update_fnorm": 0.27916067838668823, + "block0_mlp_win_max_l1_linf_norm": 0.16204304993152618, + "block0_mlp_win_max_spectral_norm": 0.060491256415843964, + "block0_mlp_wout_update_fnorm": 0.2505505084991455, + "block0_mlp_wout_max_l1_linf_norm": 0.5856213569641113, + "block0_mlp_wout_max_spectral_norm": 0.07682277262210846, + "block3_q_update_fnorm": 0.14195232093334198, + "block3_q_max_l1_linf_norm": 0.15412603318691254, + "block3_q_max_spectral_norm": 0.045127544552087784, + "block3_k_update_fnorm": 0.13703368604183197, + "block3_k_max_l1_linf_norm": 0.18461942672729492, + "block3_k_max_spectral_norm": 0.03483940660953522, + "block3_v_update_fnorm": 0.11785867810249329, + "block3_v_max_l1_linf_norm": 0.13494637608528137, + "block3_v_max_spectral_norm": 0.038972482085227966, + "block3_o_update_fnorm": 0.1258920282125473, + "block3_o_max_l1_linf_norm": 0.14911893010139465, + "block3_o_max_spectral_norm": 0.04489034414291382, + "block3_mlp_win_update_fnorm": 0.29948747158050537, + "block3_mlp_win_max_l1_linf_norm": 0.22688338160514832, + "block3_mlp_win_max_spectral_norm": 0.07226531207561493, + "block3_mlp_wout_update_fnorm": 0.2567216455936432, + "block3_mlp_wout_max_l1_linf_norm": 0.5958322286605835, + "block3_mlp_wout_max_spectral_norm": 0.09690827876329422, + "block7_q_update_fnorm": 0.15730157494544983, + "block7_q_max_l1_linf_norm": 0.1562500298023224, + "block7_q_max_spectral_norm": 0.028844337910413742, + "block7_k_update_fnorm": 0.14947345852851868, + "block7_k_max_l1_linf_norm": 0.1729409098625183, + "block7_k_max_spectral_norm": 0.029441971331834793, + "block7_v_update_fnorm": 0.12619787454605103, + "block7_v_max_l1_linf_norm": 0.14013230800628662, + "block7_v_max_spectral_norm": 0.03364362195134163, + "block7_o_update_fnorm": 0.14076699316501617, + "block7_o_max_l1_linf_norm": 0.1380922496318817, + "block7_o_max_spectral_norm": 0.03024314157664776, + "block7_mlp_win_update_fnorm": 0.297170490026474, + "block7_mlp_win_max_l1_linf_norm": 0.17425379157066345, + "block7_mlp_win_max_spectral_norm": 0.05460706725716591, + "block7_mlp_wout_update_fnorm": 0.27756285667419434, + "block7_mlp_wout_max_l1_linf_norm": 0.5546844005584717, + "block7_mlp_wout_max_spectral_norm": 0.06288114190101624, + "block11_q_update_fnorm": 0.16187599301338196, + "block11_q_max_l1_linf_norm": 0.1777813732624054, + "block11_q_max_spectral_norm": 0.035705436021089554, + "block11_k_update_fnorm": 0.15246304869651794, + "block11_k_max_l1_linf_norm": 0.17347152531147003, + "block11_k_max_spectral_norm": 0.02719898894429207, + "block11_v_update_fnorm": 0.13300129771232605, + "block11_v_max_l1_linf_norm": 0.14975181221961975, + "block11_v_max_spectral_norm": 0.042757827788591385, + "block11_o_update_fnorm": 0.15186205506324768, + "block11_o_max_l1_linf_norm": 0.15651655197143555, + "block11_o_max_spectral_norm": 0.041747067123651505, + "block11_mlp_win_update_fnorm": 0.3117331266403198, + "block11_mlp_win_max_l1_linf_norm": 0.19533640146255493, + "block11_mlp_win_max_spectral_norm": 0.059219807386398315, + "block11_mlp_wout_update_fnorm": 0.2838191092014313, + "block11_mlp_wout_max_l1_linf_norm": 0.5597347021102905, + "block11_mlp_wout_max_spectral_norm": 0.08018643409013748, + "total_sharpness": 0.012894455343484879, + "block_total_sharpness": 0.01928604021668434, + "v_norm_block": 1.6981897354125977, + "v_T_H_v_block": 0.05561801418662071, + "v_norm": 2.1571953296661377, + "ip_v_neg_g_hvp": 0.07740399241447449, + "cos_v_neg_g_hvp": 0.09316260367631912, + "g_hvp_norm": 0.38515210151672363, + "ip_v_neg_g_t": 0.07783927768468857, + "cos_v_neg_g_t": 0.10492230206727982, + "g_t_norm": 0.3439073860645294, + "g_norm": 0.38515210151672363, + "hv_norm": 0.5626689791679382, + "cos_v_hv": 0.0494355708360672, + "hg_norm": 3.472238302230835, + "cos_g_hg": 0.6975959539413452, + "v_parallel_norm": 0.01253454014658928, + "v_perp_norm": 2.1571590900421143, + "embed_lm_head_v_norm": 1.330280065536499, + "embed_lm_head_cos_v_neg_g": 0.07895409315824509, + "layer_1_v_norm": 0.4674645960330963, + "layer_1_cos_v_neg_g": 0.19743739068508148, + "layer_2_v_norm": 0.48206639289855957, + "layer_2_cos_v_neg_g": 0.09683883190155029, + "layer_3_v_norm": 0.4806786775588989, + "layer_3_cos_v_neg_g": 0.0746961161494255, + "layer_4_v_norm": 0.47369107604026794, + "layer_4_cos_v_neg_g": 0.08280377089977264, + "layer_5_v_norm": 0.43800726532936096, + "layer_5_cos_v_neg_g": 0.06453979760408401, + "layer_6_v_norm": 0.4776001274585724, + "layer_6_cos_v_neg_g": 0.08116012066602707, + "layer_7_v_norm": 0.4879586398601532, + "layer_7_cos_v_neg_g": 0.09495221823453903, + "layer_8_v_norm": 0.4982866942882538, + "layer_8_cos_v_neg_g": 0.09793158620595932, + "layer_9_v_norm": 0.5074686408042908, + "layer_9_cos_v_neg_g": 0.09087488055229187, + "layer_10_v_norm": 0.5216464996337891, + "layer_10_cos_v_neg_g": 0.10257212817668915, + "layer_11_v_norm": 0.5229418873786926, + "layer_11_cos_v_neg_g": 0.11892806738615036, + "layer_12_v_norm": 0.5177342295646667, + "layer_12_cos_v_neg_g": 0.18134728074073792, + "block0_q_v_norm": 0.13992349803447723, + "block0_q_cos_v_neg_g": 0.2429889738559723, + "block0_k_v_norm": 0.13652297854423523, + "block0_k_cos_v_neg_g": 0.23501865565776825, + "block0_v_v_norm": 0.13975252211093903, + "block0_v_cos_v_neg_g": 0.2576218247413635, + "block0_o_v_norm": 0.14134052395820618, + "block0_o_cos_v_neg_g": 0.25435230135917664, + "block0_mlp_win_v_norm": 0.27916067838668823, + "block0_mlp_win_cos_v_neg_g": 0.21296148002147675, + "block0_mlp_wout_v_norm": 0.2505505084991455, + "block0_mlp_wout_cos_v_neg_g": 0.2447064220905304, + "block3_q_v_norm": 0.14195232093334198, + "block3_q_cos_v_neg_g": 0.10167522728443146, + "block3_k_v_norm": 0.13703368604183197, + "block3_k_cos_v_neg_g": 0.12659215927124023, + "block3_v_v_norm": 0.11785867810249329, + "block3_v_cos_v_neg_g": 0.06866328418254852, + "block3_o_v_norm": 0.1258920282125473, + "block3_o_cos_v_neg_g": 0.20760303735733032, + "block3_mlp_win_v_norm": 0.29948747158050537, + "block3_mlp_win_cos_v_neg_g": 0.0874212235212326, + "block3_mlp_wout_v_norm": 0.2567216455936432, + "block3_mlp_wout_cos_v_neg_g": 0.2817263603210449, + "block7_q_v_norm": 0.15730157494544983, + "block7_q_cos_v_neg_g": 0.11211328953504562, + "block7_k_v_norm": 0.14947345852851868, + "block7_k_cos_v_neg_g": 0.22776828706264496, + "block7_v_v_norm": 0.12619787454605103, + "block7_v_cos_v_neg_g": 0.08731257170438766, + "block7_o_v_norm": 0.14076699316501617, + "block7_o_cos_v_neg_g": 0.23851647973060608, + "block7_mlp_win_v_norm": 0.297170490026474, + "block7_mlp_win_cos_v_neg_g": 0.1275804191827774, + "block7_mlp_wout_v_norm": 0.27756285667419434, + "block7_mlp_wout_cos_v_neg_g": 0.24122750759124756, + "block11_q_v_norm": 0.16187599301338196, + "block11_q_cos_v_neg_g": 0.17412663996219635, + "block11_k_v_norm": 0.15246304869651794, + "block11_k_cos_v_neg_g": 0.22881540656089783, + "block11_v_v_norm": 0.13300129771232605, + "block11_v_cos_v_neg_g": 0.1422645002603531, + "block11_o_v_norm": 0.15186205506324768, + "block11_o_cos_v_neg_g": 0.24585770070552826, + "block11_mlp_win_v_norm": 0.3117331266403198, + "block11_mlp_win_cos_v_neg_g": 0.18795011937618256, + "block11_mlp_wout_v_norm": 0.2838191092014313, + "block11_mlp_wout_cos_v_neg_g": 0.22173768281936646, + "embed_lm_head_sharpness": 0.00043228728463873267, + "layer_1_sharpness": 0.020078860223293304, + "layer_2_sharpness": 0.0010930802673101425, + "layer_3_sharpness": 0.0021777197252959013, + "layer_4_sharpness": 0.002757501555606723, + "layer_5_sharpness": 0.0030269906856119633, + "layer_6_sharpness": 0.0019642941188067198, + "layer_7_sharpness": 0.0024242387153208256, + "layer_8_sharpness": 0.0032203695736825466, + "layer_9_sharpness": 0.0028791246004402637, + "layer_10_sharpness": 0.0017732756678014994, + "layer_11_sharpness": 0.001713064732030034, + "layer_12_sharpness": 0.0048701101914048195, + "block0_q_sharpness": 0.002048833528533578, + "block0_k_sharpness": 0.002716062357649207, + "block0_v_sharpness": 0.01229151152074337, + "block0_o_sharpness": 0.007122598122805357, + "block0_mlp_win_sharpness": 0.004831543657928705, + "block0_mlp_wout_sharpness": 0.008282876573503017, + "block3_q_sharpness": 0.0012476863339543343, + "block3_k_sharpness": 0.0008048894815146923, + "block3_v_sharpness": 0.004502379335463047, + "block3_o_sharpness": 0.002141508972272277, + "block3_mlp_win_sharpness": 0.0002558752312324941, + "block3_mlp_wout_sharpness": 0.0008889946038834751, + "block7_q_sharpness": 0.0002882343251258135, + "block7_k_sharpness": 0.0008200257434509695, + "block7_v_sharpness": 0.006893540732562542, + "block7_o_sharpness": 0.0009715344058349729, + "block7_mlp_win_sharpness": 0.00090366683434695, + "block7_mlp_wout_sharpness": 0.0009387772879563272, + "block11_q_sharpness": 0.00032952826586551964, + "block11_k_sharpness": 0.0008173524984158576, + "block11_v_sharpness": 0.0024254615418612957, + "block11_o_sharpness": 0.00045067022438161075, + "block11_mlp_win_sharpness": 0.0012246762635186315, + "block11_mlp_wout_sharpness": 0.004039207939058542, + "sum_layer_numerators": 0.011167086521058012, + "block_diag_sharpness": 0.00387228628534586, + "cross_layer_sharpness": 0.01541375393133848 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..905f5613c48aa2ae951d7ad0493cc34c722f68b5 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_3500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1352994441986084, + "total_l1_linf_norm": 18844.873046875, + "total_spectral_norm": 2.1352999210357666, + "embed_lm_head_update_fnorm": 1.3246617317199707, + "embed_lm_head_max_l1_linf_norm": 0.35847872495651245, + "embed_lm_head_max_spectral_norm": 0.21157336235046387, + "layer_1_update_fnorm": 0.44662871956825256, + "layer_1_max_l1_linf_norm": 0.5116063356399536, + "layer_1_max_spectral_norm": 0.07707773894071579, + "layer_2_update_fnorm": 0.4744322597980499, + "layer_2_max_l1_linf_norm": 0.5376400947570801, + "layer_2_max_spectral_norm": 0.08216404914855957, + "layer_3_update_fnorm": 0.47121065855026245, + "layer_3_max_l1_linf_norm": 0.6138707399368286, + "layer_3_max_spectral_norm": 0.07735877484083176, + "layer_4_update_fnorm": 0.4633129835128784, + "layer_4_max_l1_linf_norm": 0.5350746512413025, + "layer_4_max_spectral_norm": 0.07569902390241623, + "layer_5_update_fnorm": 0.4364733099937439, + "layer_5_max_l1_linf_norm": 0.47040045261383057, + "layer_5_max_spectral_norm": 0.05836787819862366, + "layer_6_update_fnorm": 0.4730973243713379, + "layer_6_max_l1_linf_norm": 0.5478395223617554, + "layer_6_max_spectral_norm": 0.07154068350791931, + "layer_7_update_fnorm": 0.48602166771888733, + "layer_7_max_l1_linf_norm": 0.5394322276115417, + "layer_7_max_spectral_norm": 0.0643559917807579, + "layer_8_update_fnorm": 0.4930630922317505, + "layer_8_max_l1_linf_norm": 0.49833351373672485, + "layer_8_max_spectral_norm": 0.05634787678718567, + "layer_9_update_fnorm": 0.5013819932937622, + "layer_9_max_l1_linf_norm": 0.5186892747879028, + "layer_9_max_spectral_norm": 0.04703427851200104, + "layer_10_update_fnorm": 0.5186288356781006, + "layer_10_max_l1_linf_norm": 0.5335279703140259, + "layer_10_max_spectral_norm": 0.04732391983270645, + "layer_11_update_fnorm": 0.5213342308998108, + "layer_11_max_l1_linf_norm": 0.5261121988296509, + "layer_11_max_spectral_norm": 0.049611859023571014, + "layer_12_update_fnorm": 0.5076491832733154, + "layer_12_max_l1_linf_norm": 0.56745445728302, + "layer_12_max_spectral_norm": 0.07237511873245239, + "block0_q_update_fnorm": 0.1319323480129242, + "block0_q_max_l1_linf_norm": 0.2814573645591736, + "block0_q_max_spectral_norm": 0.06740882247686386, + "block0_k_update_fnorm": 0.12996719777584076, + "block0_k_max_l1_linf_norm": 0.2937090992927551, + "block0_k_max_spectral_norm": 0.07707773894071579, + "block0_v_update_fnorm": 0.12786009907722473, + "block0_v_max_l1_linf_norm": 0.20273897051811218, + "block0_v_max_spectral_norm": 0.05156230181455612, + "block0_o_update_fnorm": 0.12359726428985596, + "block0_o_max_l1_linf_norm": 0.1385180801153183, + "block0_o_max_spectral_norm": 0.034445978701114655, + "block0_mlp_win_update_fnorm": 0.27646106481552124, + "block0_mlp_win_max_l1_linf_norm": 0.14734159409999847, + "block0_mlp_win_max_spectral_norm": 0.05182144418358803, + "block0_mlp_wout_update_fnorm": 0.2388399839401245, + "block0_mlp_wout_max_l1_linf_norm": 0.5116063356399536, + "block0_mlp_wout_max_spectral_norm": 0.06860858201980591, + "block3_q_update_fnorm": 0.13579846918582916, + "block3_q_max_l1_linf_norm": 0.1744174361228943, + "block3_q_max_spectral_norm": 0.04293343797326088, + "block3_k_update_fnorm": 0.1299634575843811, + "block3_k_max_l1_linf_norm": 0.17242741584777832, + "block3_k_max_spectral_norm": 0.03191252797842026, + "block3_v_update_fnorm": 0.11279002577066422, + "block3_v_max_l1_linf_norm": 0.14622007310390472, + "block3_v_max_spectral_norm": 0.034495752304792404, + "block3_o_update_fnorm": 0.11886846274137497, + "block3_o_max_l1_linf_norm": 0.1393699198961258, + "block3_o_max_spectral_norm": 0.03479425236582756, + "block3_mlp_win_update_fnorm": 0.2973766028881073, + "block3_mlp_win_max_l1_linf_norm": 0.20118054747581482, + "block3_mlp_win_max_spectral_norm": 0.06732916086912155, + "block3_mlp_wout_update_fnorm": 0.25285476446151733, + "block3_mlp_wout_max_l1_linf_norm": 0.5350746512413025, + "block3_mlp_wout_max_spectral_norm": 0.07569902390241623, + "block7_q_update_fnorm": 0.1566980928182602, + "block7_q_max_l1_linf_norm": 0.1544836163520813, + "block7_q_max_spectral_norm": 0.02935013175010681, + "block7_k_update_fnorm": 0.1470503807067871, + "block7_k_max_l1_linf_norm": 0.16106075048446655, + "block7_k_max_spectral_norm": 0.026854844763875008, + "block7_v_update_fnorm": 0.12648294866085052, + "block7_v_max_l1_linf_norm": 0.13483473658561707, + "block7_v_max_spectral_norm": 0.02738208882510662, + "block7_o_update_fnorm": 0.1399068385362625, + "block7_o_max_l1_linf_norm": 0.13396580517292023, + "block7_o_max_spectral_norm": 0.025175251066684723, + "block7_mlp_win_update_fnorm": 0.2968999445438385, + "block7_mlp_win_max_l1_linf_norm": 0.1702175885438919, + "block7_mlp_win_max_spectral_norm": 0.05078093335032463, + "block7_mlp_wout_update_fnorm": 0.27038902044296265, + "block7_mlp_wout_max_l1_linf_norm": 0.49833351373672485, + "block7_mlp_wout_max_spectral_norm": 0.05634787678718567, + "block11_q_update_fnorm": 0.1597764641046524, + "block11_q_max_l1_linf_norm": 0.16366665065288544, + "block11_q_max_spectral_norm": 0.030828220769762993, + "block11_k_update_fnorm": 0.15052075684070587, + "block11_k_max_l1_linf_norm": 0.165665403008461, + "block11_k_max_spectral_norm": 0.024412017315626144, + "block11_v_update_fnorm": 0.13011014461517334, + "block11_v_max_l1_linf_norm": 0.15795384347438812, + "block11_v_max_spectral_norm": 0.03595714643597603, + "block11_o_update_fnorm": 0.14602623879909515, + "block11_o_max_l1_linf_norm": 0.14367398619651794, + "block11_o_max_spectral_norm": 0.035136137157678604, + "block11_mlp_win_update_fnorm": 0.31010758876800537, + "block11_mlp_win_max_l1_linf_norm": 0.17550702393054962, + "block11_mlp_win_max_spectral_norm": 0.05581262335181236, + "block11_mlp_wout_update_fnorm": 0.27385205030441284, + "block11_mlp_wout_max_l1_linf_norm": 0.56745445728302, + "block11_mlp_wout_max_spectral_norm": 0.07237511873245239, + "total_sharpness": 0.007271277252584696, + "block_total_sharpness": 0.010752948932349682, + "v_norm_block": 1.6747466325759888, + "v_T_H_v_block": 0.030159616842865944, + "v_norm": 2.1352994441986084, + "ip_v_neg_g_hvp": 0.06271152943372726, + "cos_v_neg_g_hvp": 0.08575694262981415, + "g_hvp_norm": 0.3424674868583679, + "ip_v_neg_g_t": 0.06297754496335983, + "cos_v_neg_g_t": 0.10152609646320343, + "g_t_norm": 0.29050207138061523, + "g_norm": 0.3424674868583679, + "hv_norm": 0.3885328471660614, + "cos_v_hv": 0.03996150195598602, + "hg_norm": 3.303175449371338, + "cos_g_hg": 0.6629388332366943, + "v_parallel_norm": 0.013891633599996567, + "v_perp_norm": 2.135254383087158, + "embed_lm_head_v_norm": 1.3246617317199707, + "embed_lm_head_cos_v_neg_g": 0.079798623919487, + "layer_1_v_norm": 0.44662871956825256, + "layer_1_cos_v_neg_g": 0.15792222321033478, + "layer_2_v_norm": 0.4744322597980499, + "layer_2_cos_v_neg_g": 0.0791822150349617, + "layer_3_v_norm": 0.47121068835258484, + "layer_3_cos_v_neg_g": 0.06847267597913742, + "layer_4_v_norm": 0.4633129835128784, + "layer_4_cos_v_neg_g": 0.07619206607341766, + "layer_5_v_norm": 0.4364733099937439, + "layer_5_cos_v_neg_g": 0.05510693043470383, + "layer_6_v_norm": 0.4730973541736603, + "layer_6_cos_v_neg_g": 0.06935131549835205, + "layer_7_v_norm": 0.48602166771888733, + "layer_7_cos_v_neg_g": 0.08801822364330292, + "layer_8_v_norm": 0.4930630922317505, + "layer_8_cos_v_neg_g": 0.08859365433454514, + "layer_9_v_norm": 0.5013819932937622, + "layer_9_cos_v_neg_g": 0.08453591912984848, + "layer_10_v_norm": 0.5186288356781006, + "layer_10_cos_v_neg_g": 0.09981566667556763, + "layer_11_v_norm": 0.521334171295166, + "layer_11_cos_v_neg_g": 0.11608055979013443, + "layer_12_v_norm": 0.5076491832733154, + "layer_12_cos_v_neg_g": 0.1585799753665924, + "block0_q_v_norm": 0.1319323480129242, + "block0_q_cos_v_neg_g": 0.18020011484622955, + "block0_k_v_norm": 0.12996719777584076, + "block0_k_cos_v_neg_g": 0.15701636672019958, + "block0_v_v_norm": 0.12786009907722473, + "block0_v_cos_v_neg_g": 0.21004453301429749, + "block0_o_v_norm": 0.12359726428985596, + "block0_o_cos_v_neg_g": 0.206975057721138, + "block0_mlp_win_v_norm": 0.27646106481552124, + "block0_mlp_win_cos_v_neg_g": 0.1588355153799057, + "block0_mlp_wout_v_norm": 0.2388399839401245, + "block0_mlp_wout_cos_v_neg_g": 0.1987210363149643, + "block3_q_v_norm": 0.13579846918582916, + "block3_q_cos_v_neg_g": 0.10400176793336868, + "block3_k_v_norm": 0.1299634575843811, + "block3_k_cos_v_neg_g": 0.1117510125041008, + "block3_v_v_norm": 0.11279002577066422, + "block3_v_cos_v_neg_g": 0.04610390588641167, + "block3_o_v_norm": 0.11886846274137497, + "block3_o_cos_v_neg_g": 0.15817540884017944, + "block3_mlp_win_v_norm": 0.2973766028881073, + "block3_mlp_win_cos_v_neg_g": 0.08041854947805405, + "block3_mlp_wout_v_norm": 0.25285476446151733, + "block3_mlp_wout_cos_v_neg_g": 0.2342735081911087, + "block7_q_v_norm": 0.1566980928182602, + "block7_q_cos_v_neg_g": 0.10975159704685211, + "block7_k_v_norm": 0.1470503807067871, + "block7_k_cos_v_neg_g": 0.20678003132343292, + "block7_v_v_norm": 0.12648294866085052, + "block7_v_cos_v_neg_g": 0.06980042159557343, + "block7_o_v_norm": 0.1399068385362625, + "block7_o_cos_v_neg_g": 0.22127129137516022, + "block7_mlp_win_v_norm": 0.2968999445438385, + "block7_mlp_win_cos_v_neg_g": 0.11798452585935593, + "block7_mlp_wout_v_norm": 0.27038902044296265, + "block7_mlp_wout_cos_v_neg_g": 0.23114894330501556, + "block11_q_v_norm": 0.1597764641046524, + "block11_q_cos_v_neg_g": 0.15754525363445282, + "block11_k_v_norm": 0.15052075684070587, + "block11_k_cos_v_neg_g": 0.1901334524154663, + "block11_v_v_norm": 0.13011014461517334, + "block11_v_cos_v_neg_g": 0.10732821375131607, + "block11_o_v_norm": 0.14602623879909515, + "block11_o_cos_v_neg_g": 0.22177565097808838, + "block11_mlp_win_v_norm": 0.31010758876800537, + "block11_mlp_win_cos_v_neg_g": 0.16143715381622314, + "block11_mlp_wout_v_norm": 0.27385205030441284, + "block11_mlp_wout_cos_v_neg_g": 0.19065941870212555, + "embed_lm_head_sharpness": 0.00034090710687451065, + "layer_1_sharpness": 0.008061197586357594, + "layer_2_sharpness": 0.00044769467785954475, + "layer_3_sharpness": 0.0008227400248870254, + "layer_4_sharpness": 0.0013474751031026244, + "layer_5_sharpness": 0.0012252237647771835, + "layer_6_sharpness": 0.0011149442289024591, + "layer_7_sharpness": 0.001741659827530384, + "layer_8_sharpness": 0.0021062027662992477, + "layer_9_sharpness": 0.0019639187958091497, + "layer_10_sharpness": 0.0014804115053266287, + "layer_11_sharpness": 0.0014098603278398514, + "layer_12_sharpness": 0.003458836115896702, + "block0_q_sharpness": 0.0019347770139575005, + "block0_k_sharpness": 0.002320258878171444, + "block0_v_sharpness": 0.007757252547889948, + "block0_o_sharpness": 0.005859408061951399, + "block0_mlp_win_sharpness": 0.0020948355086147785, + "block0_mlp_wout_sharpness": 0.003893520450219512, + "block3_q_sharpness": 0.0011293302522972226, + "block3_k_sharpness": 0.00038869722629897296, + "block3_v_sharpness": 0.0017588784685358405, + "block3_o_sharpness": 0.001103076385334134, + "block3_mlp_win_sharpness": 0.00023524697462562472, + "block3_mlp_wout_sharpness": 0.0004040801140945405, + "block7_q_sharpness": 0.00016441976185888052, + "block7_k_sharpness": 0.0004696255491580814, + "block7_v_sharpness": 0.0043723625130951405, + "block7_o_sharpness": 0.0006945753120817244, + "block7_mlp_win_sharpness": 0.0006306276191025972, + "block7_mlp_wout_sharpness": 0.0005909443134441972, + "block11_q_sharpness": 0.00020257706637494266, + "block11_k_sharpness": 0.0004671461647376418, + "block11_v_sharpness": 0.0013427464291453362, + "block11_o_sharpness": 0.0003605907841119915, + "block11_mlp_win_sharpness": 0.0010555913904681802, + "block11_mlp_wout_sharpness": 0.003013515379279852, + "sum_layer_numerators": 0.005753584983223663, + "block_diag_sharpness": 0.002051352550800846, + "cross_layer_sharpness": 0.008701596381548837 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..a27dd83c96ef0fa62cf863d5fa1abc629793f13f --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1891286373138428, + "total_l1_linf_norm": 19408.640625, + "total_spectral_norm": 2.189128875732422, + "embed_lm_head_update_fnorm": 1.3342341184616089, + "embed_lm_head_max_l1_linf_norm": 0.3744962513446808, + "embed_lm_head_max_spectral_norm": 0.21354739367961884, + "layer_1_update_fnorm": 0.49208304286003113, + "layer_1_max_l1_linf_norm": 0.57256019115448, + "layer_1_max_spectral_norm": 0.09121046960353851, + "layer_2_update_fnorm": 0.5092917084693909, + "layer_2_max_l1_linf_norm": 0.5563849210739136, + "layer_2_max_spectral_norm": 0.08266014605760574, + "layer_3_update_fnorm": 0.48899614810943604, + "layer_3_max_l1_linf_norm": 0.623583197593689, + "layer_3_max_spectral_norm": 0.07989451289176941, + "layer_4_update_fnorm": 0.4863094687461853, + "layer_4_max_l1_linf_norm": 0.5884454250335693, + "layer_4_max_spectral_norm": 0.07830683141946793, + "layer_5_update_fnorm": 0.47708189487457275, + "layer_5_max_l1_linf_norm": 0.526530385017395, + "layer_5_max_spectral_norm": 0.06398355215787888, + "layer_6_update_fnorm": 0.4920320212841034, + "layer_6_max_l1_linf_norm": 0.5784273743629456, + "layer_6_max_spectral_norm": 0.07167555391788483, + "layer_7_update_fnorm": 0.4955845773220062, + "layer_7_max_l1_linf_norm": 0.557025671005249, + "layer_7_max_spectral_norm": 0.06807978451251984, + "layer_8_update_fnorm": 0.5009586215019226, + "layer_8_max_l1_linf_norm": 0.5682758092880249, + "layer_8_max_spectral_norm": 0.06704764068126678, + "layer_9_update_fnorm": 0.5053191184997559, + "layer_9_max_l1_linf_norm": 0.6014889478683472, + "layer_9_max_spectral_norm": 0.05694830045104027, + "layer_10_update_fnorm": 0.5222657322883606, + "layer_10_max_l1_linf_norm": 0.5466157793998718, + "layer_10_max_spectral_norm": 0.045166220515966415, + "layer_11_update_fnorm": 0.5271266102790833, + "layer_11_max_l1_linf_norm": 0.5313611030578613, + "layer_11_max_spectral_norm": 0.04736759513616562, + "layer_12_update_fnorm": 0.5125707387924194, + "layer_12_max_l1_linf_norm": 0.5470181107521057, + "layer_12_max_spectral_norm": 0.0818348228931427, + "block0_q_update_fnorm": 0.16109304130077362, + "block0_q_max_l1_linf_norm": 0.2738548517227173, + "block0_q_max_spectral_norm": 0.09121046960353851, + "block0_k_update_fnorm": 0.15503792464733124, + "block0_k_max_l1_linf_norm": 0.2997093200683594, + "block0_k_max_spectral_norm": 0.09120002388954163, + "block0_v_update_fnorm": 0.14617912471294403, + "block0_v_max_l1_linf_norm": 0.2069830596446991, + "block0_v_max_spectral_norm": 0.055980924516916275, + "block0_o_update_fnorm": 0.1458231657743454, + "block0_o_max_l1_linf_norm": 0.16398638486862183, + "block0_o_max_spectral_norm": 0.0416584312915802, + "block0_mlp_win_update_fnorm": 0.2938811182975769, + "block0_mlp_win_max_l1_linf_norm": 0.1746998131275177, + "block0_mlp_win_max_spectral_norm": 0.051365628838539124, + "block0_mlp_wout_update_fnorm": 0.25112468004226685, + "block0_mlp_wout_max_l1_linf_norm": 0.57256019115448, + "block0_mlp_wout_max_spectral_norm": 0.07466626167297363, + "block3_q_update_fnorm": 0.14940354228019714, + "block3_q_max_l1_linf_norm": 0.17772233486175537, + "block3_q_max_spectral_norm": 0.04580846428871155, + "block3_k_update_fnorm": 0.14370213449001312, + "block3_k_max_l1_linf_norm": 0.22454243898391724, + "block3_k_max_spectral_norm": 0.04051398113369942, + "block3_v_update_fnorm": 0.12411053478717804, + "block3_v_max_l1_linf_norm": 0.15610767900943756, + "block3_v_max_spectral_norm": 0.04053772985935211, + "block3_o_update_fnorm": 0.12936332821846008, + "block3_o_max_l1_linf_norm": 0.15044787526130676, + "block3_o_max_spectral_norm": 0.044912636280059814, + "block3_mlp_win_update_fnorm": 0.30643418431282043, + "block3_mlp_win_max_l1_linf_norm": 0.20296710729599, + "block3_mlp_win_max_spectral_norm": 0.07243499159812927, + "block3_mlp_wout_update_fnorm": 0.25953301787376404, + "block3_mlp_wout_max_l1_linf_norm": 0.5884454250335693, + "block3_mlp_wout_max_spectral_norm": 0.07830683141946793, + "block7_q_update_fnorm": 0.16072605550289154, + "block7_q_max_l1_linf_norm": 0.16835813224315643, + "block7_q_max_spectral_norm": 0.02701818384230137, + "block7_k_update_fnorm": 0.15302471816539764, + "block7_k_max_l1_linf_norm": 0.171339213848114, + "block7_k_max_spectral_norm": 0.031061740592122078, + "block7_v_update_fnorm": 0.12868058681488037, + "block7_v_max_l1_linf_norm": 0.13146600127220154, + "block7_v_max_spectral_norm": 0.0294073186814785, + "block7_o_update_fnorm": 0.14362263679504395, + "block7_o_max_l1_linf_norm": 0.13874131441116333, + "block7_o_max_spectral_norm": 0.028942624107003212, + "block7_mlp_win_update_fnorm": 0.30125072598457336, + "block7_mlp_win_max_l1_linf_norm": 0.16603076457977295, + "block7_mlp_win_max_spectral_norm": 0.05004073306918144, + "block7_mlp_wout_update_fnorm": 0.27140605449676514, + "block7_mlp_wout_max_l1_linf_norm": 0.5682758092880249, + "block7_mlp_wout_max_spectral_norm": 0.06704764068126678, + "block11_q_update_fnorm": 0.1582556515932083, + "block11_q_max_l1_linf_norm": 0.16084031760692596, + "block11_q_max_spectral_norm": 0.027464380487799644, + "block11_k_update_fnorm": 0.15162847936153412, + "block11_k_max_l1_linf_norm": 0.18303942680358887, + "block11_k_max_spectral_norm": 0.024675441905856133, + "block11_v_update_fnorm": 0.134083554148674, + "block11_v_max_l1_linf_norm": 0.15685579180717468, + "block11_v_max_spectral_norm": 0.03352329134941101, + "block11_o_update_fnorm": 0.1478126049041748, + "block11_o_max_l1_linf_norm": 0.14972999691963196, + "block11_o_max_spectral_norm": 0.033879950642585754, + "block11_mlp_win_update_fnorm": 0.31303659081459045, + "block11_mlp_win_max_l1_linf_norm": 0.1652635633945465, + "block11_mlp_win_max_spectral_norm": 0.05390956252813339, + "block11_mlp_wout_update_fnorm": 0.2770698070526123, + "block11_mlp_wout_max_l1_linf_norm": 0.5470181107521057, + "block11_mlp_wout_max_spectral_norm": 0.0818348228931427, + "total_sharpness": 0.007688366342335939, + "block_total_sharpness": 0.011367122642695904, + "v_norm_block": 1.735541582107544, + "v_T_H_v_block": 0.034238964319229126, + "v_norm": 2.1891286373138428, + "ip_v_neg_g_hvp": 0.06205519288778305, + "cos_v_neg_g_hvp": 0.07990709692239761, + "g_hvp_norm": 0.35474926233291626, + "ip_v_neg_g_t": 0.06237782537937164, + "cos_v_neg_g_t": 0.09451153874397278, + "g_t_norm": 0.301490843296051, + "g_norm": 0.35474926233291626, + "hv_norm": 0.43466469645500183, + "cos_v_hv": 0.038721390068531036, + "hg_norm": 3.731163501739502, + "cos_g_hg": 0.6819245219230652, + "v_parallel_norm": 0.011257987469434738, + "v_perp_norm": 2.1890995502471924, + "embed_lm_head_v_norm": 1.3342341184616089, + "embed_lm_head_cos_v_neg_g": 0.08292388916015625, + "layer_1_v_norm": 0.49208304286003113, + "layer_1_cos_v_neg_g": 0.14880162477493286, + "layer_2_v_norm": 0.5092917084693909, + "layer_2_cos_v_neg_g": 0.07606568187475204, + "layer_3_v_norm": 0.48899614810943604, + "layer_3_cos_v_neg_g": 0.06219722703099251, + "layer_4_v_norm": 0.4863094687461853, + "layer_4_cos_v_neg_g": 0.0707363709807396, + "layer_5_v_norm": 0.47708189487457275, + "layer_5_cos_v_neg_g": 0.05116119235754013, + "layer_6_v_norm": 0.4920320212841034, + "layer_6_cos_v_neg_g": 0.07133069634437561, + "layer_7_v_norm": 0.4955845773220062, + "layer_7_cos_v_neg_g": 0.08353083580732346, + "layer_8_v_norm": 0.5009586215019226, + "layer_8_cos_v_neg_g": 0.07992586493492126, + "layer_9_v_norm": 0.5053191184997559, + "layer_9_cos_v_neg_g": 0.07856655865907669, + "layer_10_v_norm": 0.5222657322883606, + "layer_10_cos_v_neg_g": 0.09060589969158173, + "layer_11_v_norm": 0.5271266102790833, + "layer_11_cos_v_neg_g": 0.09865335375070572, + "layer_12_v_norm": 0.5125707387924194, + "layer_12_cos_v_neg_g": 0.14164727926254272, + "block0_q_v_norm": 0.16109304130077362, + "block0_q_cos_v_neg_g": 0.22982968389987946, + "block0_k_v_norm": 0.15503792464733124, + "block0_k_cos_v_neg_g": 0.20256976783275604, + "block0_v_v_norm": 0.14617912471294403, + "block0_v_cos_v_neg_g": 0.19425728917121887, + "block0_o_v_norm": 0.1458231657743454, + "block0_o_cos_v_neg_g": 0.17850272357463837, + "block0_mlp_win_v_norm": 0.2938811182975769, + "block0_mlp_win_cos_v_neg_g": 0.1307239830493927, + "block0_mlp_wout_v_norm": 0.25112468004226685, + "block0_mlp_wout_cos_v_neg_g": 0.20061905682086945, + "block3_q_v_norm": 0.14940354228019714, + "block3_q_cos_v_neg_g": 0.09409365803003311, + "block3_k_v_norm": 0.14370213449001312, + "block3_k_cos_v_neg_g": 0.08953520655632019, + "block3_v_v_norm": 0.12411053478717804, + "block3_v_cos_v_neg_g": 0.046055883169174194, + "block3_o_v_norm": 0.12936332821846008, + "block3_o_cos_v_neg_g": 0.17058058083057404, + "block3_mlp_win_v_norm": 0.30643418431282043, + "block3_mlp_win_cos_v_neg_g": 0.07244936376810074, + "block3_mlp_wout_v_norm": 0.25953301787376404, + "block3_mlp_wout_cos_v_neg_g": 0.23295149207115173, + "block7_q_v_norm": 0.16072605550289154, + "block7_q_cos_v_neg_g": 0.0948965921998024, + "block7_k_v_norm": 0.15302471816539764, + "block7_k_cos_v_neg_g": 0.20689906179904938, + "block7_v_v_norm": 0.12868058681488037, + "block7_v_cos_v_neg_g": 0.06551281362771988, + "block7_o_v_norm": 0.14362263679504395, + "block7_o_cos_v_neg_g": 0.2231724113225937, + "block7_mlp_win_v_norm": 0.30125072598457336, + "block7_mlp_win_cos_v_neg_g": 0.11030875146389008, + "block7_mlp_wout_v_norm": 0.27140605449676514, + "block7_mlp_wout_cos_v_neg_g": 0.23696961998939514, + "block11_q_v_norm": 0.1582556515932083, + "block11_q_cos_v_neg_g": 0.1470712423324585, + "block11_k_v_norm": 0.15162847936153412, + "block11_k_cos_v_neg_g": 0.19410394132137299, + "block11_v_v_norm": 0.134083554148674, + "block11_v_cos_v_neg_g": 0.08603288978338242, + "block11_o_v_norm": 0.1478126049041748, + "block11_o_cos_v_neg_g": 0.21162328124046326, + "block11_mlp_win_v_norm": 0.31303659081459045, + "block11_mlp_win_cos_v_neg_g": 0.15428394079208374, + "block11_mlp_wout_v_norm": 0.2770698070526123, + "block11_mlp_wout_cos_v_neg_g": 0.16990357637405396, + "embed_lm_head_sharpness": 0.0003170310810673982, + "layer_1_sharpness": 0.008991542272269726, + "layer_2_sharpness": 0.0006730868481099606, + "layer_3_sharpness": 0.0018111609388142824, + "layer_4_sharpness": 0.001484932261519134, + "layer_5_sharpness": 0.0012989894021302462, + "layer_6_sharpness": 0.0013485662639141083, + "layer_7_sharpness": 0.00183630280662328, + "layer_8_sharpness": 0.0027391721960157156, + "layer_9_sharpness": 0.0023789876140654087, + "layer_10_sharpness": 0.0013069488340988755, + "layer_11_sharpness": 0.0012349584139883518, + "layer_12_sharpness": 0.003396065440028906, + "block0_q_sharpness": 0.0035424549132585526, + "block0_k_sharpness": 0.0032603463623672724, + "block0_v_sharpness": 0.006013426464051008, + "block0_o_sharpness": 0.0050218719989061356, + "block0_mlp_win_sharpness": 0.0015085854101926088, + "block0_mlp_wout_sharpness": 0.004222501069307327, + "block3_q_sharpness": 0.0008526155143044889, + "block3_k_sharpness": 0.0007381511968560517, + "block3_v_sharpness": 0.0019782492890954018, + "block3_o_sharpness": 0.0015120991738513112, + "block3_mlp_win_sharpness": 0.00021900942374486476, + "block3_mlp_wout_sharpness": 0.0004185989673715085, + "block7_q_sharpness": 0.00015579810133203864, + "block7_k_sharpness": 0.00041756403516046703, + "block7_v_sharpness": 0.005239121615886688, + "block7_o_sharpness": 0.0006994017167016864, + "block7_mlp_win_sharpness": 0.0007936859037727118, + "block7_mlp_wout_sharpness": 0.0007705456810072064, + "block11_q_sharpness": 0.00019696236995514482, + "block11_k_sharpness": 0.0004293518722988665, + "block11_v_sharpness": 0.0010391444666311145, + "block11_o_sharpness": 0.00029301742324605584, + "block11_mlp_win_sharpness": 0.00100876041688025, + "block11_mlp_wout_sharpness": 0.003458264283835888, + "sum_layer_numerators": 0.007096021346368372, + "block_diag_sharpness": 0.0023558349819222807, + "cross_layer_sharpness": 0.009011287660773623 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..accd79be27c8b04794eaf327f87ce155c825a502 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_4500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1714046001434326, + "total_l1_linf_norm": 19227.578125, + "total_spectral_norm": 2.1714043617248535, + "embed_lm_head_update_fnorm": 1.328256607055664, + "embed_lm_head_max_l1_linf_norm": 0.3277861475944519, + "embed_lm_head_max_spectral_norm": 0.208640456199646, + "layer_1_update_fnorm": 0.4602302014827728, + "layer_1_max_l1_linf_norm": 0.5928559303283691, + "layer_1_max_spectral_norm": 0.0775192603468895, + "layer_2_update_fnorm": 0.48541468381881714, + "layer_2_max_l1_linf_norm": 0.579940915107727, + "layer_2_max_spectral_norm": 0.08244159817695618, + "layer_3_update_fnorm": 0.4899580776691437, + "layer_3_max_l1_linf_norm": 0.682616651058197, + "layer_3_max_spectral_norm": 0.08658310770988464, + "layer_4_update_fnorm": 0.48049190640449524, + "layer_4_max_l1_linf_norm": 0.593638002872467, + "layer_4_max_spectral_norm": 0.08646192401647568, + "layer_5_update_fnorm": 0.457284152507782, + "layer_5_max_l1_linf_norm": 0.4972172677516937, + "layer_5_max_spectral_norm": 0.06700348109006882, + "layer_6_update_fnorm": 0.4886195659637451, + "layer_6_max_l1_linf_norm": 0.5960761308670044, + "layer_6_max_spectral_norm": 0.0814308300614357, + "layer_7_update_fnorm": 0.4958381950855255, + "layer_7_max_l1_linf_norm": 0.6321161985397339, + "layer_7_max_spectral_norm": 0.07343313097953796, + "layer_8_update_fnorm": 0.4979635775089264, + "layer_8_max_l1_linf_norm": 0.5996477603912354, + "layer_8_max_spectral_norm": 0.06968145817518234, + "layer_9_update_fnorm": 0.5097531080245972, + "layer_9_max_l1_linf_norm": 0.534991979598999, + "layer_9_max_spectral_norm": 0.056283459067344666, + "layer_10_update_fnorm": 0.5274613499641418, + "layer_10_max_l1_linf_norm": 0.5596722364425659, + "layer_10_max_spectral_norm": 0.04699961841106415, + "layer_11_update_fnorm": 0.5314599275588989, + "layer_11_max_l1_linf_norm": 0.5569703578948975, + "layer_11_max_spectral_norm": 0.0483320876955986, + "layer_12_update_fnorm": 0.5197756290435791, + "layer_12_max_l1_linf_norm": 0.5646744966506958, + "layer_12_max_spectral_norm": 0.0780535340309143, + "block0_q_update_fnorm": 0.13045501708984375, + "block0_q_max_l1_linf_norm": 0.17689728736877441, + "block0_q_max_spectral_norm": 0.05051816999912262, + "block0_k_update_fnorm": 0.12522707879543304, + "block0_k_max_l1_linf_norm": 0.1768093705177307, + "block0_k_max_spectral_norm": 0.06039140745997429, + "block0_v_update_fnorm": 0.12934234738349915, + "block0_v_max_l1_linf_norm": 0.16123272478580475, + "block0_v_max_spectral_norm": 0.05200298875570297, + "block0_o_update_fnorm": 0.12987235188484192, + "block0_o_max_l1_linf_norm": 0.1423969864845276, + "block0_o_max_spectral_norm": 0.04127703234553337, + "block0_mlp_win_update_fnorm": 0.2886350750923157, + "block0_mlp_win_max_l1_linf_norm": 0.16508272290229797, + "block0_mlp_win_max_spectral_norm": 0.0522451177239418, + "block0_mlp_wout_update_fnorm": 0.2492254376411438, + "block0_mlp_wout_max_l1_linf_norm": 0.5928559303283691, + "block0_mlp_wout_max_spectral_norm": 0.0775192603468895, + "block3_q_update_fnorm": 0.1381329894065857, + "block3_q_max_l1_linf_norm": 0.15387415885925293, + "block3_q_max_spectral_norm": 0.03858722746372223, + "block3_k_update_fnorm": 0.13617156445980072, + "block3_k_max_l1_linf_norm": 0.22371989488601685, + "block3_k_max_spectral_norm": 0.032224997878074646, + "block3_v_update_fnorm": 0.12241804599761963, + "block3_v_max_l1_linf_norm": 0.17147023975849152, + "block3_v_max_spectral_norm": 0.04086342081427574, + "block3_o_update_fnorm": 0.1280902475118637, + "block3_o_max_l1_linf_norm": 0.14434798061847687, + "block3_o_max_spectral_norm": 0.04662386327981949, + "block3_mlp_win_update_fnorm": 0.30488964915275574, + "block3_mlp_win_max_l1_linf_norm": 0.225664883852005, + "block3_mlp_win_max_spectral_norm": 0.0712662860751152, + "block3_mlp_wout_update_fnorm": 0.2622353136539459, + "block3_mlp_wout_max_l1_linf_norm": 0.593638002872467, + "block3_mlp_wout_max_spectral_norm": 0.08646192401647568, + "block7_q_update_fnorm": 0.15607118606567383, + "block7_q_max_l1_linf_norm": 0.15132969617843628, + "block7_q_max_spectral_norm": 0.02556481398642063, + "block7_k_update_fnorm": 0.14752508699893951, + "block7_k_max_l1_linf_norm": 0.1635173112154007, + "block7_k_max_spectral_norm": 0.02544447034597397, + "block7_v_update_fnorm": 0.1309301257133484, + "block7_v_max_l1_linf_norm": 0.13563372194766998, + "block7_v_max_spectral_norm": 0.028869183734059334, + "block7_o_update_fnorm": 0.14350241422653198, + "block7_o_max_l1_linf_norm": 0.13527265191078186, + "block7_o_max_spectral_norm": 0.029354076832532883, + "block7_mlp_win_update_fnorm": 0.3015453815460205, + "block7_mlp_win_max_l1_linf_norm": 0.17108747363090515, + "block7_mlp_win_max_spectral_norm": 0.04847593978047371, + "block7_mlp_wout_update_fnorm": 0.27031785249710083, + "block7_mlp_wout_max_l1_linf_norm": 0.5996477603912354, + "block7_mlp_wout_max_spectral_norm": 0.06968145817518234, + "block11_q_update_fnorm": 0.1599021852016449, + "block11_q_max_l1_linf_norm": 0.15825098752975464, + "block11_q_max_spectral_norm": 0.03018535114824772, + "block11_k_update_fnorm": 0.15239639580249786, + "block11_k_max_l1_linf_norm": 0.19957002997398376, + "block11_k_max_spectral_norm": 0.026317385956645012, + "block11_v_update_fnorm": 0.13928453624248505, + "block11_v_max_l1_linf_norm": 0.15541860461235046, + "block11_v_max_spectral_norm": 0.035902414470911026, + "block11_o_update_fnorm": 0.1500517874956131, + "block11_o_max_l1_linf_norm": 0.15048348903656006, + "block11_o_max_spectral_norm": 0.036396004259586334, + "block11_mlp_win_update_fnorm": 0.3169000744819641, + "block11_mlp_win_max_l1_linf_norm": 0.18123260140419006, + "block11_mlp_win_max_spectral_norm": 0.0522601343691349, + "block11_mlp_wout_update_fnorm": 0.28091660141944885, + "block11_mlp_wout_max_l1_linf_norm": 0.5646744966506958, + "block11_mlp_wout_max_spectral_norm": 0.0780535340309143, + "total_sharpness": 0.007213624194264412, + "block_total_sharpness": 0.010646353475749493, + "v_norm_block": 1.7177695035934448, + "v_T_H_v_block": 0.031414538621902466, + "v_norm": 2.1714046001434326, + "ip_v_neg_g_hvp": 0.061302632093429565, + "cos_v_neg_g_hvp": 0.07842967659235, + "g_hvp_norm": 0.359963059425354, + "ip_v_neg_g_t": 0.06178080663084984, + "cos_v_neg_g_t": 0.09238758683204651, + "g_t_norm": 0.30796346068382263, + "g_norm": 0.359963059425354, + "hv_norm": 0.4360343813896179, + "cos_v_hv": 0.035923078656196594, + "hg_norm": 4.588783264160156, + "cos_g_hg": 0.6477633714675903, + "v_parallel_norm": 0.01171993836760521, + "v_perp_norm": 2.171373128890991, + "embed_lm_head_v_norm": 1.328256607055664, + "embed_lm_head_cos_v_neg_g": 0.08055899292230606, + "layer_1_v_norm": 0.4602302014827728, + "layer_1_cos_v_neg_g": 0.15918709337711334, + "layer_2_v_norm": 0.48541468381881714, + "layer_2_cos_v_neg_g": 0.07396949827671051, + "layer_3_v_norm": 0.48995810747146606, + "layer_3_cos_v_neg_g": 0.05986905097961426, + "layer_4_v_norm": 0.48049190640449524, + "layer_4_cos_v_neg_g": 0.06767325103282928, + "layer_5_v_norm": 0.457284152507782, + "layer_5_cos_v_neg_g": 0.04772544279694557, + "layer_6_v_norm": 0.4886195659637451, + "layer_6_cos_v_neg_g": 0.06314989179372787, + "layer_7_v_norm": 0.4958381950855255, + "layer_7_cos_v_neg_g": 0.07806608080863953, + "layer_8_v_norm": 0.4979635775089264, + "layer_8_cos_v_neg_g": 0.0745483785867691, + "layer_9_v_norm": 0.5097531080245972, + "layer_9_cos_v_neg_g": 0.07351768016815186, + "layer_10_v_norm": 0.5274613499641418, + "layer_10_cos_v_neg_g": 0.08582132309675217, + "layer_11_v_norm": 0.5314599275588989, + "layer_11_cos_v_neg_g": 0.10101144015789032, + "layer_12_v_norm": 0.5197756290435791, + "layer_12_cos_v_neg_g": 0.1471850872039795, + "block0_q_v_norm": 0.13045501708984375, + "block0_q_cos_v_neg_g": 0.20247946679592133, + "block0_k_v_norm": 0.12522707879543304, + "block0_k_cos_v_neg_g": 0.19688791036605835, + "block0_v_v_norm": 0.12934234738349915, + "block0_v_cos_v_neg_g": 0.2835932672023773, + "block0_o_v_norm": 0.12987235188484192, + "block0_o_cos_v_neg_g": 0.21251004934310913, + "block0_mlp_win_v_norm": 0.2886350750923157, + "block0_mlp_win_cos_v_neg_g": 0.11569817364215851, + "block0_mlp_wout_v_norm": 0.2492254376411438, + "block0_mlp_wout_cos_v_neg_g": 0.20974047482013702, + "block3_q_v_norm": 0.1381329894065857, + "block3_q_cos_v_neg_g": 0.07988820970058441, + "block3_k_v_norm": 0.13617156445980072, + "block3_k_cos_v_neg_g": 0.10122432559728622, + "block3_v_v_norm": 0.12241804599761963, + "block3_v_cos_v_neg_g": 0.0483393520116806, + "block3_o_v_norm": 0.1280902475118637, + "block3_o_cos_v_neg_g": 0.1759917438030243, + "block3_mlp_win_v_norm": 0.30488964915275574, + "block3_mlp_win_cos_v_neg_g": 0.06644876301288605, + "block3_mlp_wout_v_norm": 0.2622353136539459, + "block3_mlp_wout_cos_v_neg_g": 0.23551331460475922, + "block7_q_v_norm": 0.15607118606567383, + "block7_q_cos_v_neg_g": 0.08871952444314957, + "block7_k_v_norm": 0.14752508699893951, + "block7_k_cos_v_neg_g": 0.20292195677757263, + "block7_v_v_norm": 0.1309301257133484, + "block7_v_cos_v_neg_g": 0.05814090743660927, + "block7_o_v_norm": 0.14350241422653198, + "block7_o_cos_v_neg_g": 0.22232723236083984, + "block7_mlp_win_v_norm": 0.3015453815460205, + "block7_mlp_win_cos_v_neg_g": 0.09602958709001541, + "block7_mlp_wout_v_norm": 0.27031785249710083, + "block7_mlp_wout_cos_v_neg_g": 0.23182116448879242, + "block11_q_v_norm": 0.1599021852016449, + "block11_q_cos_v_neg_g": 0.13948294520378113, + "block11_k_v_norm": 0.15239639580249786, + "block11_k_cos_v_neg_g": 0.18654198944568634, + "block11_v_v_norm": 0.13928453624248505, + "block11_v_cos_v_neg_g": 0.09768994152545929, + "block11_o_v_norm": 0.1500517874956131, + "block11_o_cos_v_neg_g": 0.23717890679836273, + "block11_mlp_win_v_norm": 0.3169000744819641, + "block11_mlp_win_cos_v_neg_g": 0.14534758031368256, + "block11_mlp_wout_v_norm": 0.28091660141944885, + "block11_mlp_wout_cos_v_neg_g": 0.19301900267601013, + "embed_lm_head_sharpness": 0.0003547882952261716, + "layer_1_sharpness": 0.011375944130122662, + "layer_2_sharpness": 0.0007091548177413642, + "layer_3_sharpness": 0.0014248951338231564, + "layer_4_sharpness": 0.001515766023658216, + "layer_5_sharpness": 0.0009693836909718812, + "layer_6_sharpness": 0.001312196720391512, + "layer_7_sharpness": 0.0017213383689522743, + "layer_8_sharpness": 0.002261198591440916, + "layer_9_sharpness": 0.0019524507224559784, + "layer_10_sharpness": 0.0010832739062607288, + "layer_11_sharpness": 0.0011924062855541706, + "layer_12_sharpness": 0.003330220002681017, + "block0_q_sharpness": 0.0007931244326755404, + "block0_k_sharpness": 0.0015225928509607911, + "block0_v_sharpness": 0.018206335604190826, + "block0_o_sharpness": 0.007187642622739077, + "block0_mlp_win_sharpness": 0.0016106012044474483, + "block0_mlp_wout_sharpness": 0.004503688309341669, + "block3_q_sharpness": 0.0006697451462969184, + "block3_k_sharpness": 0.0006007247720845044, + "block3_v_sharpness": 0.002237583277747035, + "block3_o_sharpness": 0.0015247836709022522, + "block3_mlp_win_sharpness": 0.00018496965640224516, + "block3_mlp_wout_sharpness": 0.0004741017473861575, + "block7_q_sharpness": 0.0001722215674817562, + "block7_k_sharpness": 0.0003806043532676995, + "block7_v_sharpness": 0.003741338849067688, + "block7_o_sharpness": 0.00056642503477633, + "block7_mlp_win_sharpness": 0.0006870866054669023, + "block7_mlp_wout_sharpness": 0.0007697161636315286, + "block11_q_sharpness": 0.00024018669500946999, + "block11_k_sharpness": 0.00032997020753100514, + "block11_v_sharpness": 0.0012037535198032856, + "block11_o_sharpness": 0.0003403603914193809, + "block11_mlp_win_sharpness": 0.0009302624966949224, + "block11_mlp_wout_sharpness": 0.0032290176022797823, + "sum_layer_numerators": 0.0068137943878701975, + "block_diag_sharpness": 0.0023091877649533085, + "cross_layer_sharpness": 0.008337165710796185 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..bfa28629e2d2c7e5e607140ecadeb420aa48b7d5 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.1538681983947754, + "total_l1_linf_norm": 9460.552734375, + "total_spectral_norm": 1.1538680791854858, + "embed_lm_head_update_fnorm": 0.9410490393638611, + "embed_lm_head_max_l1_linf_norm": 0.2529723048210144, + "embed_lm_head_max_spectral_norm": 0.285024493932724, + "layer_1_update_fnorm": 0.17564398050308228, + "layer_1_max_l1_linf_norm": 0.2006392478942871, + "layer_1_max_spectral_norm": 0.041556958109140396, + "layer_2_update_fnorm": 0.17763946950435638, + "layer_2_max_l1_linf_norm": 0.22137153148651123, + "layer_2_max_spectral_norm": 0.04154994711279869, + "layer_3_update_fnorm": 0.18698838353157043, + "layer_3_max_l1_linf_norm": 0.22851359844207764, + "layer_3_max_spectral_norm": 0.0462174154818058, + "layer_4_update_fnorm": 0.191396102309227, + "layer_4_max_l1_linf_norm": 0.24451033771038055, + "layer_4_max_spectral_norm": 0.04505180940032005, + "layer_5_update_fnorm": 0.19065822660923004, + "layer_5_max_l1_linf_norm": 0.24559208750724792, + "layer_5_max_spectral_norm": 0.04096437245607376, + "layer_6_update_fnorm": 0.18919041752815247, + "layer_6_max_l1_linf_norm": 0.24030031263828278, + "layer_6_max_spectral_norm": 0.040544383227825165, + "layer_7_update_fnorm": 0.1977671980857849, + "layer_7_max_l1_linf_norm": 0.2586372494697571, + "layer_7_max_spectral_norm": 0.04011640325188637, + "layer_8_update_fnorm": 0.20145903527736664, + "layer_8_max_l1_linf_norm": 0.2725108861923218, + "layer_8_max_spectral_norm": 0.040870483964681625, + "layer_9_update_fnorm": 0.20761221647262573, + "layer_9_max_l1_linf_norm": 0.2645640969276428, + "layer_9_max_spectral_norm": 0.04096290469169617, + "layer_10_update_fnorm": 0.2088020145893097, + "layer_10_max_l1_linf_norm": 0.2764626741409302, + "layer_10_max_spectral_norm": 0.042024947702884674, + "layer_11_update_fnorm": 0.20117639005184174, + "layer_11_max_l1_linf_norm": 0.27702051401138306, + "layer_11_max_spectral_norm": 0.03914739191532135, + "layer_12_update_fnorm": 0.18116354942321777, + "layer_12_max_l1_linf_norm": 0.2681461274623871, + "layer_12_max_spectral_norm": 0.05556364357471466, + "block0_q_update_fnorm": 0.06679651886224747, + "block0_q_max_l1_linf_norm": 0.10276146233081818, + "block0_q_max_spectral_norm": 0.030021511018276215, + "block0_k_update_fnorm": 0.060117948800325394, + "block0_k_max_l1_linf_norm": 0.13634729385375977, + "block0_k_max_spectral_norm": 0.041556958109140396, + "block0_v_update_fnorm": 0.029743928462266922, + "block0_v_max_l1_linf_norm": 0.07423052191734314, + "block0_v_max_spectral_norm": 0.0208964254707098, + "block0_o_update_fnorm": 0.04207603633403778, + "block0_o_max_l1_linf_norm": 0.0673215463757515, + "block0_o_max_spectral_norm": 0.017227111384272575, + "block0_mlp_win_update_fnorm": 0.1069423109292984, + "block0_mlp_win_max_l1_linf_norm": 0.08022233843803406, + "block0_mlp_win_max_spectral_norm": 0.0368657112121582, + "block0_mlp_wout_update_fnorm": 0.09285100549459457, + "block0_mlp_wout_max_l1_linf_norm": 0.2006392478942871, + "block0_mlp_wout_max_spectral_norm": 0.03024272993206978, + "block3_q_update_fnorm": 0.06958705931901932, + "block3_q_max_l1_linf_norm": 0.09224720299243927, + "block3_q_max_spectral_norm": 0.028940433636307716, + "block3_k_update_fnorm": 0.06058286502957344, + "block3_k_max_l1_linf_norm": 0.12106315046548843, + "block3_k_max_spectral_norm": 0.024871526286005974, + "block3_v_update_fnorm": 0.04510148987174034, + "block3_v_max_l1_linf_norm": 0.06066995859146118, + "block3_v_max_spectral_norm": 0.022187508642673492, + "block3_o_update_fnorm": 0.04903658479452133, + "block3_o_max_l1_linf_norm": 0.0701938271522522, + "block3_o_max_spectral_norm": 0.023209504783153534, + "block3_mlp_win_update_fnorm": 0.11183445155620575, + "block3_mlp_win_max_l1_linf_norm": 0.09062269330024719, + "block3_mlp_win_max_spectral_norm": 0.04505180940032005, + "block3_mlp_wout_update_fnorm": 0.10563614964485168, + "block3_mlp_wout_max_l1_linf_norm": 0.24451033771038055, + "block3_mlp_wout_max_spectral_norm": 0.04311978444457054, + "block7_q_update_fnorm": 0.07073264569044113, + "block7_q_max_l1_linf_norm": 0.10933011770248413, + "block7_q_max_spectral_norm": 0.02513817511498928, + "block7_k_update_fnorm": 0.057805899530649185, + "block7_k_max_l1_linf_norm": 0.09326235949993134, + "block7_k_max_spectral_norm": 0.021846961230039597, + "block7_v_update_fnorm": 0.04822782427072525, + "block7_v_max_l1_linf_norm": 0.07175010442733765, + "block7_v_max_spectral_norm": 0.02236437238752842, + "block7_o_update_fnorm": 0.057905346155166626, + "block7_o_max_l1_linf_norm": 0.08186528086662292, + "block7_o_max_spectral_norm": 0.02248983085155487, + "block7_mlp_win_update_fnorm": 0.11838364601135254, + "block7_mlp_win_max_l1_linf_norm": 0.09325426816940308, + "block7_mlp_win_max_spectral_norm": 0.040870483964681625, + "block7_mlp_wout_update_fnorm": 0.11193069070577621, + "block7_mlp_wout_max_l1_linf_norm": 0.2725108861923218, + "block7_mlp_wout_max_spectral_norm": 0.036048829555511475, + "block11_q_update_fnorm": 0.06512198597192764, + "block11_q_max_l1_linf_norm": 0.11121976375579834, + "block11_q_max_spectral_norm": 0.027774924412369728, + "block11_k_update_fnorm": 0.05110614001750946, + "block11_k_max_l1_linf_norm": 0.12287275493144989, + "block11_k_max_spectral_norm": 0.018738048151135445, + "block11_v_update_fnorm": 0.04262567684054375, + "block11_v_max_l1_linf_norm": 0.06609579920768738, + "block11_v_max_spectral_norm": 0.016834773123264313, + "block11_o_update_fnorm": 0.05349912494421005, + "block11_o_max_l1_linf_norm": 0.07959578931331635, + "block11_o_max_spectral_norm": 0.02061992883682251, + "block11_mlp_win_update_fnorm": 0.10437475889921188, + "block11_mlp_win_max_l1_linf_norm": 0.09275367110967636, + "block11_mlp_win_max_spectral_norm": 0.05556364357471466, + "block11_mlp_wout_update_fnorm": 0.10189522057771683, + "block11_mlp_wout_max_l1_linf_norm": 0.2681461274623871, + "block11_mlp_wout_max_spectral_norm": 0.0434965081512928, + "total_sharpness": 0.030402304604649544, + "block_total_sharpness": 0.08248845487833023, + "v_norm_block": 0.6677111387252808, + "v_T_H_v_block": 0.036776501685380936, + "v_norm": 1.1538681983947754, + "ip_v_neg_g_hvp": 0.051432304084300995, + "cos_v_neg_g_hvp": 0.09895605593919754, + "g_hvp_norm": 0.4504404664039612, + "ip_v_neg_g_t": 0.05165694281458855, + "cos_v_neg_g_t": 0.11456763744354248, + "g_t_norm": 0.3907603919506073, + "g_norm": 0.4504404664039612, + "hv_norm": 0.4272937774658203, + "cos_v_hv": 0.08209866285324097, + "hg_norm": 5.7117695808410645, + "cos_g_hg": 0.6482523083686829, + "v_parallel_norm": 0.018312130123376846, + "v_perp_norm": 1.153722882270813, + "embed_lm_head_v_norm": 0.9410490393638611, + "embed_lm_head_cos_v_neg_g": 0.07823708653450012, + "layer_1_v_norm": 0.17564398050308228, + "layer_1_cos_v_neg_g": 0.1422571837902069, + "layer_2_v_norm": 0.17763946950435638, + "layer_2_cos_v_neg_g": 0.14777764678001404, + "layer_3_v_norm": 0.18698838353157043, + "layer_3_cos_v_neg_g": 0.1641964614391327, + "layer_4_v_norm": 0.191396102309227, + "layer_4_cos_v_neg_g": 0.19829794764518738, + "layer_5_v_norm": 0.19065822660923004, + "layer_5_cos_v_neg_g": 0.21091537177562714, + "layer_6_v_norm": 0.18919041752815247, + "layer_6_cos_v_neg_g": 0.19368405640125275, + "layer_7_v_norm": 0.1977671980857849, + "layer_7_cos_v_neg_g": 0.20540276169776917, + "layer_8_v_norm": 0.20145903527736664, + "layer_8_cos_v_neg_g": 0.19924503564834595, + "layer_9_v_norm": 0.20761221647262573, + "layer_9_cos_v_neg_g": 0.20226585865020752, + "layer_10_v_norm": 0.2088020145893097, + "layer_10_cos_v_neg_g": 0.2031753957271576, + "layer_11_v_norm": 0.20117639005184174, + "layer_11_cos_v_neg_g": 0.2033512443304062, + "layer_12_v_norm": 0.18116354942321777, + "layer_12_cos_v_neg_g": 0.22994935512542725, + "block0_q_v_norm": 0.06679651886224747, + "block0_q_cos_v_neg_g": 0.21090708673000336, + "block0_k_v_norm": 0.060117948800325394, + "block0_k_cos_v_neg_g": 0.26476868987083435, + "block0_v_v_norm": 0.029743928462266922, + "block0_v_cos_v_neg_g": 0.15552636981010437, + "block0_o_v_norm": 0.04207603633403778, + "block0_o_cos_v_neg_g": 0.18593791127204895, + "block0_mlp_win_v_norm": 0.1069423109292984, + "block0_mlp_win_cos_v_neg_g": 0.2144005298614502, + "block0_mlp_wout_v_norm": 0.09285100549459457, + "block0_mlp_wout_cos_v_neg_g": 0.21715562045574188, + "block3_q_v_norm": 0.06958705931901932, + "block3_q_cos_v_neg_g": 0.30327123403549194, + "block3_k_v_norm": 0.06058286502957344, + "block3_k_cos_v_neg_g": 0.2696244716644287, + "block3_v_v_norm": 0.04510148987174034, + "block3_v_cos_v_neg_g": 0.19737714529037476, + "block3_o_v_norm": 0.04903658479452133, + "block3_o_cos_v_neg_g": 0.16700899600982666, + "block3_mlp_win_v_norm": 0.11183445155620575, + "block3_mlp_win_cos_v_neg_g": 0.2450781613588333, + "block3_mlp_wout_v_norm": 0.10563614964485168, + "block3_mlp_wout_cos_v_neg_g": 0.2299453616142273, + "block7_q_v_norm": 0.07073264569044113, + "block7_q_cos_v_neg_g": 0.2147141546010971, + "block7_k_v_norm": 0.057805899530649185, + "block7_k_cos_v_neg_g": 0.20485752820968628, + "block7_v_v_norm": 0.04822782427072525, + "block7_v_cos_v_neg_g": 0.16887862980365753, + "block7_o_v_norm": 0.057905346155166626, + "block7_o_cos_v_neg_g": 0.21697476506233215, + "block7_mlp_win_v_norm": 0.11838364601135254, + "block7_mlp_win_cos_v_neg_g": 0.23939712345600128, + "block7_mlp_wout_v_norm": 0.11193069070577621, + "block7_mlp_wout_cos_v_neg_g": 0.24320083856582642, + "block11_q_v_norm": 0.06512198597192764, + "block11_q_cos_v_neg_g": 0.23193442821502686, + "block11_k_v_norm": 0.05110614001750946, + "block11_k_cos_v_neg_g": 0.20190097391605377, + "block11_v_v_norm": 0.04262567684054375, + "block11_v_cos_v_neg_g": 0.189165398478508, + "block11_o_v_norm": 0.05349912494421005, + "block11_o_cos_v_neg_g": 0.2040574997663498, + "block11_mlp_win_v_norm": 0.10437475889921188, + "block11_mlp_win_cos_v_neg_g": 0.2954693138599396, + "block11_mlp_wout_v_norm": 0.10189522057771683, + "block11_mlp_wout_cos_v_neg_g": 0.22471226751804352, + "embed_lm_head_sharpness": 0.000898602360393852, + "layer_1_sharpness": 0.10623829066753387, + "layer_2_sharpness": 0.014829994179308414, + "layer_3_sharpness": 0.008769461885094643, + "layer_4_sharpness": 0.010914522223174572, + "layer_5_sharpness": 0.010687652043998241, + "layer_6_sharpness": 0.008448190987110138, + "layer_7_sharpness": 0.007558693177998066, + "layer_8_sharpness": 0.004609005060046911, + "layer_9_sharpness": 0.004590086173266172, + "layer_10_sharpness": 0.004167275503277779, + "layer_11_sharpness": 0.005109856370836496, + "layer_12_sharpness": 0.03497932851314545, + "block0_q_sharpness": 0.031633295118808746, + "block0_k_sharpness": 0.0388767272233963, + "block0_v_sharpness": 0.1625358909368515, + "block0_o_sharpness": 0.2905043661594391, + "block0_mlp_win_sharpness": 0.011301043443381786, + "block0_mlp_wout_sharpness": 0.029259420931339264, + "block3_q_sharpness": 0.001207047258503735, + "block3_k_sharpness": 0.0016684014117345214, + "block3_v_sharpness": 0.006002156063914299, + "block3_o_sharpness": 0.004851722624152899, + "block3_mlp_win_sharpness": 0.002777335001155734, + "block3_mlp_wout_sharpness": 0.00503937341272831, + "block7_q_sharpness": 0.0008216265123337507, + "block7_k_sharpness": 0.008069741539657116, + "block7_v_sharpness": 0.002668266650289297, + "block7_o_sharpness": 0.0024337805807590485, + "block7_mlp_win_sharpness": 0.0007251654169522226, + "block7_mlp_wout_sharpness": 0.0018709090072661638, + "block11_q_sharpness": 0.0008473506895825267, + "block11_k_sharpness": 0.003118074731901288, + "block11_v_sharpness": 0.007654620334506035, + "block11_o_sharpness": 0.0023095242213457823, + "block11_mlp_win_sharpness": 0.05396059900522232, + "block11_mlp_wout_sharpness": 0.0045811934396624565, + "sum_layer_numerators": 0.007359906303463927, + "block_diag_sharpness": 0.01650802215896395, + "cross_layer_sharpness": 0.06598043271936628 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..26835674247903da450adf16c9b12a831ce40083 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.213982582092285, + "total_l1_linf_norm": 19677.25, + "total_spectral_norm": 2.2139830589294434, + "embed_lm_head_update_fnorm": 1.3397654294967651, + "embed_lm_head_max_l1_linf_norm": 0.347162127494812, + "embed_lm_head_max_spectral_norm": 0.20990274846553802, + "layer_1_update_fnorm": 0.5004397034645081, + "layer_1_max_l1_linf_norm": 0.6449532508850098, + "layer_1_max_spectral_norm": 0.08442419022321701, + "layer_2_update_fnorm": 0.5034364461898804, + "layer_2_max_l1_linf_norm": 0.5963749885559082, + "layer_2_max_spectral_norm": 0.08356235176324844, + "layer_3_update_fnorm": 0.4984531104564667, + "layer_3_max_l1_linf_norm": 0.6306765079498291, + "layer_3_max_spectral_norm": 0.08879195898771286, + "layer_4_update_fnorm": 0.49501341581344604, + "layer_4_max_l1_linf_norm": 0.6337766051292419, + "layer_4_max_spectral_norm": 0.08748781681060791, + "layer_5_update_fnorm": 0.47404736280441284, + "layer_5_max_l1_linf_norm": 0.5714223384857178, + "layer_5_max_spectral_norm": 0.06882839649915695, + "layer_6_update_fnorm": 0.497662216424942, + "layer_6_max_l1_linf_norm": 0.6249728202819824, + "layer_6_max_spectral_norm": 0.08419346809387207, + "layer_7_update_fnorm": 0.5065360069274902, + "layer_7_max_l1_linf_norm": 0.5940392017364502, + "layer_7_max_spectral_norm": 0.07959287613630295, + "layer_8_update_fnorm": 0.5081648826599121, + "layer_8_max_l1_linf_norm": 0.5875144600868225, + "layer_8_max_spectral_norm": 0.07629109174013138, + "layer_9_update_fnorm": 0.5166765451431274, + "layer_9_max_l1_linf_norm": 0.5522778630256653, + "layer_9_max_spectral_norm": 0.06082853302359581, + "layer_10_update_fnorm": 0.5325005054473877, + "layer_10_max_l1_linf_norm": 0.550285816192627, + "layer_10_max_spectral_norm": 0.04691718891263008, + "layer_11_update_fnorm": 0.5379926562309265, + "layer_11_max_l1_linf_norm": 0.5622991323471069, + "layer_11_max_spectral_norm": 0.05048074573278427, + "layer_12_update_fnorm": 0.5312445163726807, + "layer_12_max_l1_linf_norm": 0.5771653652191162, + "layer_12_max_spectral_norm": 0.07796559482812881, + "block0_q_update_fnorm": 0.15896455943584442, + "block0_q_max_l1_linf_norm": 0.27399349212646484, + "block0_q_max_spectral_norm": 0.0751919150352478, + "block0_k_update_fnorm": 0.15326163172721863, + "block0_k_max_l1_linf_norm": 0.2866848111152649, + "block0_k_max_spectral_norm": 0.07757112383842468, + "block0_v_update_fnorm": 0.14999130368232727, + "block0_v_max_l1_linf_norm": 0.19696828722953796, + "block0_v_max_spectral_norm": 0.061442941427230835, + "block0_o_update_fnorm": 0.15389259159564972, + "block0_o_max_l1_linf_norm": 0.1632968783378601, + "block0_o_max_spectral_norm": 0.054967205971479416, + "block0_mlp_win_update_fnorm": 0.29586347937583923, + "block0_mlp_win_max_l1_linf_norm": 0.15776152908802032, + "block0_mlp_win_max_spectral_norm": 0.05154173821210861, + "block0_mlp_wout_update_fnorm": 0.2604864239692688, + "block0_mlp_wout_max_l1_linf_norm": 0.6449532508850098, + "block0_mlp_wout_max_spectral_norm": 0.08442419022321701, + "block3_q_update_fnorm": 0.15375496447086334, + "block3_q_max_l1_linf_norm": 0.20351889729499817, + "block3_q_max_spectral_norm": 0.05575418099761009, + "block3_k_update_fnorm": 0.14518938958644867, + "block3_k_max_l1_linf_norm": 0.22757278382778168, + "block3_k_max_spectral_norm": 0.03767891228199005, + "block3_v_update_fnorm": 0.12769439816474915, + "block3_v_max_l1_linf_norm": 0.15140950679779053, + "block3_v_max_spectral_norm": 0.042803701013326645, + "block3_o_update_fnorm": 0.13303782045841217, + "block3_o_max_l1_linf_norm": 0.158437579870224, + "block3_o_max_spectral_norm": 0.046513743698596954, + "block3_mlp_win_update_fnorm": 0.30923551321029663, + "block3_mlp_win_max_l1_linf_norm": 0.19406741857528687, + "block3_mlp_win_max_spectral_norm": 0.07104148715734482, + "block3_mlp_wout_update_fnorm": 0.26560941338539124, + "block3_mlp_wout_max_l1_linf_norm": 0.6337766051292419, + "block3_mlp_wout_max_spectral_norm": 0.08748781681060791, + "block7_q_update_fnorm": 0.16044287383556366, + "block7_q_max_l1_linf_norm": 0.17223581671714783, + "block7_q_max_spectral_norm": 0.026474669575691223, + "block7_k_update_fnorm": 0.15272951126098633, + "block7_k_max_l1_linf_norm": 0.16757455468177795, + "block7_k_max_spectral_norm": 0.028538774698972702, + "block7_v_update_fnorm": 0.13445907831192017, + "block7_v_max_l1_linf_norm": 0.13729678094387054, + "block7_v_max_spectral_norm": 0.027476387098431587, + "block7_o_update_fnorm": 0.14646179974079132, + "block7_o_max_l1_linf_norm": 0.1365547627210617, + "block7_o_max_spectral_norm": 0.028255624696612358, + "block7_mlp_win_update_fnorm": 0.30636197328567505, + "block7_mlp_win_max_l1_linf_norm": 0.17328372597694397, + "block7_mlp_win_max_spectral_norm": 0.047728657722473145, + "block7_mlp_wout_update_fnorm": 0.27506154775619507, + "block7_mlp_wout_max_l1_linf_norm": 0.5875144600868225, + "block7_mlp_wout_max_spectral_norm": 0.07629109174013138, + "block11_q_update_fnorm": 0.1648419350385666, + "block11_q_max_l1_linf_norm": 0.1678633987903595, + "block11_q_max_spectral_norm": 0.02956724911928177, + "block11_k_update_fnorm": 0.15677890181541443, + "block11_k_max_l1_linf_norm": 0.16456860303878784, + "block11_k_max_spectral_norm": 0.023607317358255386, + "block11_v_update_fnorm": 0.14539946615695953, + "block11_v_max_l1_linf_norm": 0.149316668510437, + "block11_v_max_spectral_norm": 0.036389753222465515, + "block11_o_update_fnorm": 0.15481458604335785, + "block11_o_max_l1_linf_norm": 0.1593034565448761, + "block11_o_max_spectral_norm": 0.03890617936849594, + "block11_mlp_win_update_fnorm": 0.32092443108558655, + "block11_mlp_win_max_l1_linf_norm": 0.21605801582336426, + "block11_mlp_win_max_spectral_norm": 0.05234043300151825, + "block11_mlp_wout_update_fnorm": 0.28677451610565186, + "block11_mlp_wout_max_l1_linf_norm": 0.5771653652191162, + "block11_mlp_wout_max_spectral_norm": 0.07796559482812881, + "total_sharpness": 0.008819415234029293, + "block_total_sharpness": 0.012906252406537533, + "v_norm_block": 1.7625973224639893, + "v_T_H_v_block": 0.04009649157524109, + "v_norm": 2.213982582092285, + "ip_v_neg_g_hvp": 0.06689560413360596, + "cos_v_neg_g_hvp": 0.07479442656040192, + "g_hvp_norm": 0.4039746820926666, + "ip_v_neg_g_t": 0.06715840846300125, + "cos_v_neg_g_t": 0.08681213110685349, + "g_t_norm": 0.34941840171813965, + "g_norm": 0.4039746820926666, + "hv_norm": 0.5992497801780701, + "cos_v_hv": 0.03258412703871727, + "hg_norm": 6.515480041503906, + "cos_g_hg": 0.740831732749939, + "v_parallel_norm": 0.009282022714614868, + "v_perp_norm": 2.213963031768799, + "embed_lm_head_v_norm": 1.3397654294967651, + "embed_lm_head_cos_v_neg_g": 0.08796928077936172, + "layer_1_v_norm": 0.5004397034645081, + "layer_1_cos_v_neg_g": 0.15658041834831238, + "layer_2_v_norm": 0.5034364461898804, + "layer_2_cos_v_neg_g": 0.06551506370306015, + "layer_3_v_norm": 0.4984530806541443, + "layer_3_cos_v_neg_g": 0.053099799901247025, + "layer_4_v_norm": 0.49501341581344604, + "layer_4_cos_v_neg_g": 0.06128452718257904, + "layer_5_v_norm": 0.47404736280441284, + "layer_5_cos_v_neg_g": 0.042641568928956985, + "layer_6_v_norm": 0.497662216424942, + "layer_6_cos_v_neg_g": 0.05956611782312393, + "layer_7_v_norm": 0.5065360069274902, + "layer_7_cos_v_neg_g": 0.07303903251886368, + "layer_8_v_norm": 0.5081648826599121, + "layer_8_cos_v_neg_g": 0.07141325622797012, + "layer_9_v_norm": 0.5166765451431274, + "layer_9_cos_v_neg_g": 0.06852056086063385, + "layer_10_v_norm": 0.5325005054473877, + "layer_10_cos_v_neg_g": 0.08644132316112518, + "layer_11_v_norm": 0.5379926562309265, + "layer_11_cos_v_neg_g": 0.10280448943376541, + "layer_12_v_norm": 0.5312445163726807, + "layer_12_cos_v_neg_g": 0.160126730799675, + "block0_q_v_norm": 0.15896455943584442, + "block0_q_cos_v_neg_g": 0.1971835196018219, + "block0_k_v_norm": 0.15326163172721863, + "block0_k_cos_v_neg_g": 0.18517553806304932, + "block0_v_v_norm": 0.14999130368232727, + "block0_v_cos_v_neg_g": 0.2601005733013153, + "block0_o_v_norm": 0.15389259159564972, + "block0_o_cos_v_neg_g": 0.20845282077789307, + "block0_mlp_win_v_norm": 0.29586347937583923, + "block0_mlp_win_cos_v_neg_g": 0.10837244242429733, + "block0_mlp_wout_v_norm": 0.2604864239692688, + "block0_mlp_wout_cos_v_neg_g": 0.21245567500591278, + "block3_q_v_norm": 0.15375496447086334, + "block3_q_cos_v_neg_g": 0.08208102732896805, + "block3_k_v_norm": 0.14518938958644867, + "block3_k_cos_v_neg_g": 0.08636419475078583, + "block3_v_v_norm": 0.12769439816474915, + "block3_v_cos_v_neg_g": 0.049688465893268585, + "block3_o_v_norm": 0.13303782045841217, + "block3_o_cos_v_neg_g": 0.20240965485572815, + "block3_mlp_win_v_norm": 0.30923551321029663, + "block3_mlp_win_cos_v_neg_g": 0.06433375179767609, + "block3_mlp_wout_v_norm": 0.26560941338539124, + "block3_mlp_wout_cos_v_neg_g": 0.24724815785884857, + "block7_q_v_norm": 0.16044287383556366, + "block7_q_cos_v_neg_g": 0.08480922877788544, + "block7_k_v_norm": 0.15272951126098633, + "block7_k_cos_v_neg_g": 0.1994147151708603, + "block7_v_v_norm": 0.13445907831192017, + "block7_v_cos_v_neg_g": 0.05565647408366203, + "block7_o_v_norm": 0.14646179974079132, + "block7_o_cos_v_neg_g": 0.23191460967063904, + "block7_mlp_win_v_norm": 0.30636197328567505, + "block7_mlp_win_cos_v_neg_g": 0.0963812842965126, + "block7_mlp_wout_v_norm": 0.27506154775619507, + "block7_mlp_wout_cos_v_neg_g": 0.24175475537776947, + "block11_q_v_norm": 0.1648419350385666, + "block11_q_cos_v_neg_g": 0.14238028228282928, + "block11_k_v_norm": 0.15677890181541443, + "block11_k_cos_v_neg_g": 0.20522144436836243, + "block11_v_v_norm": 0.14539946615695953, + "block11_v_cos_v_neg_g": 0.10395971685647964, + "block11_o_v_norm": 0.15481458604335785, + "block11_o_cos_v_neg_g": 0.2570221424102783, + "block11_mlp_win_v_norm": 0.32092443108558655, + "block11_mlp_win_cos_v_neg_g": 0.16363771259784698, + "block11_mlp_wout_v_norm": 0.28677451610565186, + "block11_mlp_wout_cos_v_neg_g": 0.20902714133262634, + "embed_lm_head_sharpness": 0.0003726955910678953, + "layer_1_sharpness": 0.010977819561958313, + "layer_2_sharpness": 0.0005955335218459368, + "layer_3_sharpness": 0.0015996047295629978, + "layer_4_sharpness": 0.002685880521312356, + "layer_5_sharpness": 0.0012405127054080367, + "layer_6_sharpness": 0.0013755514519289136, + "layer_7_sharpness": 0.0017730026738718152, + "layer_8_sharpness": 0.002447512000799179, + "layer_9_sharpness": 0.002439488423988223, + "layer_10_sharpness": 0.0012695271288976073, + "layer_11_sharpness": 0.0013531411532312632, + "layer_12_sharpness": 0.004509300924837589, + "block0_q_sharpness": 0.0014582108706235886, + "block0_k_sharpness": 0.0014188190689310431, + "block0_v_sharpness": 0.013209360651671886, + "block0_o_sharpness": 0.0069570597261190414, + "block0_mlp_win_sharpness": 0.0015628400724381208, + "block0_mlp_wout_sharpness": 0.004195878282189369, + "block3_q_sharpness": 0.0017542087007313967, + "block3_k_sharpness": 0.000670517620164901, + "block3_v_sharpness": 0.004349255468696356, + "block3_o_sharpness": 0.002805163385346532, + "block3_mlp_win_sharpness": 0.00020010296429973096, + "block3_mlp_wout_sharpness": 0.0004499125061556697, + "block7_q_sharpness": 0.00015577285375911742, + "block7_k_sharpness": 0.0003522828046698123, + "block7_v_sharpness": 0.00397111102938652, + "block7_o_sharpness": 0.0005125759053044021, + "block7_mlp_win_sharpness": 0.0006702229147776961, + "block7_mlp_wout_sharpness": 0.0010391119867563248, + "block11_q_sharpness": 0.0002967668988276273, + "block11_k_sharpness": 0.0003724902926478535, + "block11_v_sharpness": 0.0011802389053627849, + "block11_o_sharpness": 0.00043575980816967785, + "block11_mlp_win_sharpness": 0.0010668367613106966, + "block11_mlp_wout_sharpness": 0.005187862552702427, + "sum_layer_numerators": 0.008337665324701402, + "block_diag_sharpness": 0.0026837264493537343, + "cross_layer_sharpness": 0.010222525957183799 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..987ee38c6e55544aa3c1d67da7aaa0d103c95ddf --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_5500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.223526954650879, + "total_l1_linf_norm": 19741.90234375, + "total_spectral_norm": 2.223526954650879, + "embed_lm_head_update_fnorm": 1.3391499519348145, + "embed_lm_head_max_l1_linf_norm": 0.3722645044326782, + "embed_lm_head_max_spectral_norm": 0.2115383893251419, + "layer_1_update_fnorm": 0.49925750494003296, + "layer_1_max_l1_linf_norm": 0.6286246180534363, + "layer_1_max_spectral_norm": 0.08503750711679459, + "layer_2_update_fnorm": 0.5107975006103516, + "layer_2_max_l1_linf_norm": 0.5713009238243103, + "layer_2_max_spectral_norm": 0.082081638276577, + "layer_3_update_fnorm": 0.49842774868011475, + "layer_3_max_l1_linf_norm": 0.6072043776512146, + "layer_3_max_spectral_norm": 0.07861301302909851, + "layer_4_update_fnorm": 0.49917474389076233, + "layer_4_max_l1_linf_norm": 0.5732427835464478, + "layer_4_max_spectral_norm": 0.08526032418012619, + "layer_5_update_fnorm": 0.48339807987213135, + "layer_5_max_l1_linf_norm": 0.5019540786743164, + "layer_5_max_spectral_norm": 0.07068219780921936, + "layer_6_update_fnorm": 0.505154550075531, + "layer_6_max_l1_linf_norm": 0.5709835290908813, + "layer_6_max_spectral_norm": 0.08363517373800278, + "layer_7_update_fnorm": 0.5070257782936096, + "layer_7_max_l1_linf_norm": 0.5321084856987, + "layer_7_max_spectral_norm": 0.07459418475627899, + "layer_8_update_fnorm": 0.5064424872398376, + "layer_8_max_l1_linf_norm": 0.5577678084373474, + "layer_8_max_spectral_norm": 0.07290522009134293, + "layer_9_update_fnorm": 0.5184158086776733, + "layer_9_max_l1_linf_norm": 0.5358471870422363, + "layer_9_max_spectral_norm": 0.058568671345710754, + "layer_10_update_fnorm": 0.5406181216239929, + "layer_10_max_l1_linf_norm": 0.5646947622299194, + "layer_10_max_spectral_norm": 0.04931570217013359, + "layer_11_update_fnorm": 0.5444998741149902, + "layer_11_max_l1_linf_norm": 0.5574600696563721, + "layer_11_max_spectral_norm": 0.05278974771499634, + "layer_12_update_fnorm": 0.5320286750793457, + "layer_12_max_l1_linf_norm": 0.5644204616546631, + "layer_12_max_spectral_norm": 0.06834577023983002, + "block0_q_update_fnorm": 0.15321142971515656, + "block0_q_max_l1_linf_norm": 0.20566146075725555, + "block0_q_max_spectral_norm": 0.06088883802294731, + "block0_k_update_fnorm": 0.14945684373378754, + "block0_k_max_l1_linf_norm": 0.21959397196769714, + "block0_k_max_spectral_norm": 0.06784020364284515, + "block0_v_update_fnorm": 0.1553776115179062, + "block0_v_max_l1_linf_norm": 0.20342259109020233, + "block0_v_max_spectral_norm": 0.05366229638457298, + "block0_o_update_fnorm": 0.14804747700691223, + "block0_o_max_l1_linf_norm": 0.15494945645332336, + "block0_o_max_spectral_norm": 0.04188643395900726, + "block0_mlp_win_update_fnorm": 0.2988348603248596, + "block0_mlp_win_max_l1_linf_norm": 0.19115914404392242, + "block0_mlp_win_max_spectral_norm": 0.04578501358628273, + "block0_mlp_wout_update_fnorm": 0.2607227861881256, + "block0_mlp_wout_max_l1_linf_norm": 0.6286246180534363, + "block0_mlp_wout_max_spectral_norm": 0.08503750711679459, + "block3_q_update_fnorm": 0.1517992615699768, + "block3_q_max_l1_linf_norm": 0.1804385483264923, + "block3_q_max_spectral_norm": 0.048367805778980255, + "block3_k_update_fnorm": 0.14622414112091064, + "block3_k_max_l1_linf_norm": 0.21786488592624664, + "block3_k_max_spectral_norm": 0.035571616142988205, + "block3_v_update_fnorm": 0.13068358600139618, + "block3_v_max_l1_linf_norm": 0.16445839405059814, + "block3_v_max_spectral_norm": 0.04123830795288086, + "block3_o_update_fnorm": 0.1343085914850235, + "block3_o_max_l1_linf_norm": 0.14326030015945435, + "block3_o_max_spectral_norm": 0.04273827373981476, + "block3_mlp_win_update_fnorm": 0.3133746087551117, + "block3_mlp_win_max_l1_linf_norm": 0.19138070940971375, + "block3_mlp_win_max_spectral_norm": 0.07257882505655289, + "block3_mlp_wout_update_fnorm": 0.26701101660728455, + "block3_mlp_wout_max_l1_linf_norm": 0.5732427835464478, + "block3_mlp_wout_max_spectral_norm": 0.08526032418012619, + "block7_q_update_fnorm": 0.16254295408725739, + "block7_q_max_l1_linf_norm": 0.16351819038391113, + "block7_q_max_spectral_norm": 0.025662627071142197, + "block7_k_update_fnorm": 0.1560686081647873, + "block7_k_max_l1_linf_norm": 0.1611725389957428, + "block7_k_max_spectral_norm": 0.027914201840758324, + "block7_v_update_fnorm": 0.13581563532352448, + "block7_v_max_l1_linf_norm": 0.14082609117031097, + "block7_v_max_spectral_norm": 0.02827412448823452, + "block7_o_update_fnorm": 0.1466568410396576, + "block7_o_max_l1_linf_norm": 0.1361762285232544, + "block7_o_max_spectral_norm": 0.028139881789684296, + "block7_mlp_win_update_fnorm": 0.306122750043869, + "block7_mlp_win_max_l1_linf_norm": 0.16869476437568665, + "block7_mlp_win_max_spectral_norm": 0.04547102376818657, + "block7_mlp_wout_update_fnorm": 0.268189400434494, + "block7_mlp_wout_max_l1_linf_norm": 0.5577678084373474, + "block7_mlp_wout_max_spectral_norm": 0.07290522009134293, + "block11_q_update_fnorm": 0.16430598497390747, + "block11_q_max_l1_linf_norm": 0.17076048254966736, + "block11_q_max_spectral_norm": 0.02622387744486332, + "block11_k_update_fnorm": 0.15883129835128784, + "block11_k_max_l1_linf_norm": 0.16812175512313843, + "block11_k_max_spectral_norm": 0.02421712689101696, + "block11_v_update_fnorm": 0.14617086946964264, + "block11_v_max_l1_linf_norm": 0.15571781992912292, + "block11_v_max_spectral_norm": 0.034328024834394455, + "block11_o_update_fnorm": 0.15254195034503937, + "block11_o_max_l1_linf_norm": 0.1575433611869812, + "block11_o_max_spectral_norm": 0.03736602142453194, + "block11_mlp_win_update_fnorm": 0.3232326805591583, + "block11_mlp_win_max_l1_linf_norm": 0.2142481803894043, + "block11_mlp_win_max_spectral_norm": 0.048651162534952164, + "block11_mlp_wout_update_fnorm": 0.2856416702270508, + "block11_mlp_wout_max_l1_linf_norm": 0.5644204616546631, + "block11_mlp_wout_max_spectral_norm": 0.06834577023983002, + "total_sharpness": 0.010995634831488132, + "block_total_sharpness": 0.01574038155376911, + "v_norm_block": 1.77503502368927, + "v_T_H_v_block": 0.04959399625658989, + "v_norm": 2.223526954650879, + "ip_v_neg_g_hvp": 0.06500579416751862, + "cos_v_neg_g_hvp": 0.07875929027795792, + "g_hvp_norm": 0.37119993567466736, + "ip_v_neg_g_t": 0.06543108820915222, + "cos_v_neg_g_t": 0.09455907344818115, + "g_t_norm": 0.31119924783706665, + "g_norm": 0.37119993567466736, + "hv_norm": 0.6810928583145142, + "cos_v_hv": 0.035896848887205124, + "hg_norm": 13.405330657958984, + "cos_g_hg": 0.33222466707229614, + "v_parallel_norm": 0.010471079498529434, + "v_perp_norm": 2.2235021591186523, + "embed_lm_head_v_norm": 1.3391499519348145, + "embed_lm_head_cos_v_neg_g": 0.08398764580488205, + "layer_1_v_norm": 0.49925750494003296, + "layer_1_cos_v_neg_g": 0.16535478830337524, + "layer_2_v_norm": 0.5107975006103516, + "layer_2_cos_v_neg_g": 0.08167655766010284, + "layer_3_v_norm": 0.49842774868011475, + "layer_3_cos_v_neg_g": 0.059740543365478516, + "layer_4_v_norm": 0.49917474389076233, + "layer_4_cos_v_neg_g": 0.07006777077913284, + "layer_5_v_norm": 0.48339807987213135, + "layer_5_cos_v_neg_g": 0.048058975487947464, + "layer_6_v_norm": 0.505154550075531, + "layer_6_cos_v_neg_g": 0.06643416732549667, + "layer_7_v_norm": 0.5070257782936096, + "layer_7_cos_v_neg_g": 0.07878777384757996, + "layer_8_v_norm": 0.5064424872398376, + "layer_8_cos_v_neg_g": 0.07510830461978912, + "layer_9_v_norm": 0.5184158086776733, + "layer_9_cos_v_neg_g": 0.0734676793217659, + "layer_10_v_norm": 0.5406181216239929, + "layer_10_cos_v_neg_g": 0.0890364795923233, + "layer_11_v_norm": 0.5444998741149902, + "layer_11_cos_v_neg_g": 0.09334468841552734, + "layer_12_v_norm": 0.5320286750793457, + "layer_12_cos_v_neg_g": 0.14181099832057953, + "block0_q_v_norm": 0.15321142971515656, + "block0_q_cos_v_neg_g": 0.2269284576177597, + "block0_k_v_norm": 0.14945684373378754, + "block0_k_cos_v_neg_g": 0.20397186279296875, + "block0_v_v_norm": 0.1553776115179062, + "block0_v_cos_v_neg_g": 0.2774944007396698, + "block0_o_v_norm": 0.14804747700691223, + "block0_o_cos_v_neg_g": 0.22102408111095428, + "block0_mlp_win_v_norm": 0.2988348603248596, + "block0_mlp_win_cos_v_neg_g": 0.11221729218959808, + "block0_mlp_wout_v_norm": 0.2607227861881256, + "block0_mlp_wout_cos_v_neg_g": 0.21092794835567474, + "block3_q_v_norm": 0.1517992615699768, + "block3_q_cos_v_neg_g": 0.09508860111236572, + "block3_k_v_norm": 0.14622414112091064, + "block3_k_cos_v_neg_g": 0.11709482222795486, + "block3_v_v_norm": 0.13068358600139618, + "block3_v_cos_v_neg_g": 0.04401784390211105, + "block3_o_v_norm": 0.1343085914850235, + "block3_o_cos_v_neg_g": 0.18272095918655396, + "block3_mlp_win_v_norm": 0.3133746087551117, + "block3_mlp_win_cos_v_neg_g": 0.07112838327884674, + "block3_mlp_wout_v_norm": 0.26701101660728455, + "block3_mlp_wout_cos_v_neg_g": 0.23464471101760864, + "block7_q_v_norm": 0.16254295408725739, + "block7_q_cos_v_neg_g": 0.08589573204517365, + "block7_k_v_norm": 0.1560686081647873, + "block7_k_cos_v_neg_g": 0.21820001304149628, + "block7_v_v_norm": 0.13581563532352448, + "block7_v_cos_v_neg_g": 0.057988397777080536, + "block7_o_v_norm": 0.1466568410396576, + "block7_o_cos_v_neg_g": 0.2446964681148529, + "block7_mlp_win_v_norm": 0.306122750043869, + "block7_mlp_win_cos_v_neg_g": 0.09953173249959946, + "block7_mlp_wout_v_norm": 0.268189400434494, + "block7_mlp_wout_cos_v_neg_g": 0.2582826316356659, + "block11_q_v_norm": 0.16430598497390747, + "block11_q_cos_v_neg_g": 0.14335492253303528, + "block11_k_v_norm": 0.15883129835128784, + "block11_k_cos_v_neg_g": 0.20267543196678162, + "block11_v_v_norm": 0.14617086946964264, + "block11_v_cos_v_neg_g": 0.08045065402984619, + "block11_o_v_norm": 0.15254195034503937, + "block11_o_cos_v_neg_g": 0.2332690805196762, + "block11_mlp_win_v_norm": 0.3232326805591583, + "block11_mlp_win_cos_v_neg_g": 0.1491207480430603, + "block11_mlp_wout_v_norm": 0.2856416702270508, + "block11_mlp_wout_cos_v_neg_g": 0.18981808423995972, + "embed_lm_head_sharpness": 0.00035569164901971817, + "layer_1_sharpness": 0.03786707669496536, + "layer_2_sharpness": 0.0019264004658907652, + "layer_3_sharpness": 0.0011548834154382348, + "layer_4_sharpness": 0.0012254391331225634, + "layer_5_sharpness": 0.0014019072987139225, + "layer_6_sharpness": 0.0013246408198028803, + "layer_7_sharpness": 0.0016473712166771293, + "layer_8_sharpness": 0.0026105584111064672, + "layer_9_sharpness": 0.002280738903209567, + "layer_10_sharpness": 0.0015400659758597612, + "layer_11_sharpness": 0.0014712214469909668, + "layer_12_sharpness": 0.0023260361049324274, + "block0_q_sharpness": 0.0029379206243902445, + "block0_k_sharpness": 0.0036380591336637735, + "block0_v_sharpness": 0.04112723842263222, + "block0_o_sharpness": 0.038821861147880554, + "block0_mlp_win_sharpness": 0.003097651293501258, + "block0_mlp_wout_sharpness": 0.00437830900773406, + "block3_q_sharpness": 0.0009063932811841369, + "block3_k_sharpness": 0.00036209181416779757, + "block3_v_sharpness": 0.001402652938850224, + "block3_o_sharpness": 0.0008721062913537025, + "block3_mlp_win_sharpness": 0.00020172478980384767, + "block3_mlp_wout_sharpness": 0.0003887164930347353, + "block7_q_sharpness": 0.00010648484021658078, + "block7_k_sharpness": 0.00025341883883811533, + "block7_v_sharpness": 0.004165952559560537, + "block7_o_sharpness": 0.0006239469512365758, + "block7_mlp_win_sharpness": 0.0007710768841207027, + "block7_mlp_wout_sharpness": 0.0008759066113270819, + "block11_q_sharpness": 0.0001856643648352474, + "block11_k_sharpness": 0.000277896411716938, + "block11_v_sharpness": 0.0008849120931699872, + "block11_o_sharpness": 0.0002736962051130831, + "block11_mlp_win_sharpness": 0.0007345235790126026, + "block11_mlp_wout_sharpness": 0.0022585818078368902, + "sum_layer_numerators": 0.014449887753289923, + "block_diag_sharpness": 0.0045861749747242225, + "cross_layer_sharpness": 0.011154206579044888 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..79939a0089bf88ddca786d7214331d3be69664dd --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.217339038848877, + "total_l1_linf_norm": 19683.83984375, + "total_spectral_norm": 2.217339038848877, + "embed_lm_head_update_fnorm": 1.3386458158493042, + "embed_lm_head_max_l1_linf_norm": 0.38426673412323, + "embed_lm_head_max_spectral_norm": 0.20352891087532043, + "layer_1_update_fnorm": 0.5013406872749329, + "layer_1_max_l1_linf_norm": 0.6443464756011963, + "layer_1_max_spectral_norm": 0.08184801042079926, + "layer_2_update_fnorm": 0.5004140734672546, + "layer_2_max_l1_linf_norm": 0.590834379196167, + "layer_2_max_spectral_norm": 0.07999025285243988, + "layer_3_update_fnorm": 0.5006722807884216, + "layer_3_max_l1_linf_norm": 0.6491334438323975, + "layer_3_max_spectral_norm": 0.0856153815984726, + "layer_4_update_fnorm": 0.4994470775127411, + "layer_4_max_l1_linf_norm": 0.5934309959411621, + "layer_4_max_spectral_norm": 0.087660051882267, + "layer_5_update_fnorm": 0.4907112717628479, + "layer_5_max_l1_linf_norm": 0.523743212223053, + "layer_5_max_spectral_norm": 0.07091940939426422, + "layer_6_update_fnorm": 0.5020566582679749, + "layer_6_max_l1_linf_norm": 0.5824118256568909, + "layer_6_max_spectral_norm": 0.08371053636074066, + "layer_7_update_fnorm": 0.5075281262397766, + "layer_7_max_l1_linf_norm": 0.6011346578598022, + "layer_7_max_spectral_norm": 0.08062849938869476, + "layer_8_update_fnorm": 0.5077552795410156, + "layer_8_max_l1_linf_norm": 0.5838119387626648, + "layer_8_max_spectral_norm": 0.07344378530979156, + "layer_9_update_fnorm": 0.5131144523620605, + "layer_9_max_l1_linf_norm": 0.5339251756668091, + "layer_9_max_spectral_norm": 0.06352346390485764, + "layer_10_update_fnorm": 0.5333570241928101, + "layer_10_max_l1_linf_norm": 0.5496654510498047, + "layer_10_max_spectral_norm": 0.04969879239797592, + "layer_11_update_fnorm": 0.5379388928413391, + "layer_11_max_l1_linf_norm": 0.5702822208404541, + "layer_11_max_spectral_norm": 0.05379687622189522, + "layer_12_update_fnorm": 0.526639997959137, + "layer_12_max_l1_linf_norm": 0.595016598701477, + "layer_12_max_spectral_norm": 0.07462047785520554, + "block0_q_update_fnorm": 0.15488271415233612, + "block0_q_max_l1_linf_norm": 0.21752172708511353, + "block0_q_max_spectral_norm": 0.056161634624004364, + "block0_k_update_fnorm": 0.15438096225261688, + "block0_k_max_l1_linf_norm": 0.22085663676261902, + "block0_k_max_spectral_norm": 0.07195450365543365, + "block0_v_update_fnorm": 0.15114638209342957, + "block0_v_max_l1_linf_norm": 0.16981761157512665, + "block0_v_max_spectral_norm": 0.04928966239094734, + "block0_o_update_fnorm": 0.1533326357603073, + "block0_o_max_l1_linf_norm": 0.16891592741012573, + "block0_o_max_spectral_norm": 0.04174916073679924, + "block0_mlp_win_update_fnorm": 0.2971337139606476, + "block0_mlp_win_max_l1_linf_norm": 0.2405289262533188, + "block0_mlp_win_max_spectral_norm": 0.04902208223938942, + "block0_mlp_wout_update_fnorm": 0.26222214102745056, + "block0_mlp_wout_max_l1_linf_norm": 0.6443464756011963, + "block0_mlp_wout_max_spectral_norm": 0.08184801042079926, + "block3_q_update_fnorm": 0.1528804898262024, + "block3_q_max_l1_linf_norm": 0.18125715851783752, + "block3_q_max_spectral_norm": 0.052522797137498856, + "block3_k_update_fnorm": 0.1454106718301773, + "block3_k_max_l1_linf_norm": 0.22249791026115417, + "block3_k_max_spectral_norm": 0.0342082604765892, + "block3_v_update_fnorm": 0.13200832903385162, + "block3_v_max_l1_linf_norm": 0.18111512064933777, + "block3_v_max_spectral_norm": 0.04552336409687996, + "block3_o_update_fnorm": 0.136912003159523, + "block3_o_max_l1_linf_norm": 0.15799389779567719, + "block3_o_max_spectral_norm": 0.0478409081697464, + "block3_mlp_win_update_fnorm": 0.312543123960495, + "block3_mlp_win_max_l1_linf_norm": 0.22954992949962616, + "block3_mlp_win_max_spectral_norm": 0.07041589915752411, + "block3_mlp_wout_update_fnorm": 0.2663262188434601, + "block3_mlp_wout_max_l1_linf_norm": 0.5934309959411621, + "block3_mlp_wout_max_spectral_norm": 0.087660051882267, + "block7_q_update_fnorm": 0.1630471646785736, + "block7_q_max_l1_linf_norm": 0.18649904429912567, + "block7_q_max_spectral_norm": 0.029191847890615463, + "block7_k_update_fnorm": 0.1577160358428955, + "block7_k_max_l1_linf_norm": 0.18371346592903137, + "block7_k_max_spectral_norm": 0.03598303720355034, + "block7_v_update_fnorm": 0.13644741475582123, + "block7_v_max_l1_linf_norm": 0.1426295042037964, + "block7_v_max_spectral_norm": 0.028850605711340904, + "block7_o_update_fnorm": 0.14726229012012482, + "block7_o_max_l1_linf_norm": 0.14602446556091309, + "block7_o_max_spectral_norm": 0.02944803237915039, + "block7_mlp_win_update_fnorm": 0.3066627085208893, + "block7_mlp_win_max_l1_linf_norm": 0.17860281467437744, + "block7_mlp_win_max_spectral_norm": 0.045752622187137604, + "block7_mlp_wout_update_fnorm": 0.26811233162879944, + "block7_mlp_wout_max_l1_linf_norm": 0.5838119387626648, + "block7_mlp_wout_max_spectral_norm": 0.07344378530979156, + "block11_q_update_fnorm": 0.162613183259964, + "block11_q_max_l1_linf_norm": 0.15907981991767883, + "block11_q_max_spectral_norm": 0.028662007302045822, + "block11_k_update_fnorm": 0.1565878689289093, + "block11_k_max_l1_linf_norm": 0.159951314330101, + "block11_k_max_spectral_norm": 0.02284620702266693, + "block11_v_update_fnorm": 0.1437406688928604, + "block11_v_max_l1_linf_norm": 0.15789644420146942, + "block11_v_max_spectral_norm": 0.037341661751270294, + "block11_o_update_fnorm": 0.1496685892343521, + "block11_o_max_l1_linf_norm": 0.17195627093315125, + "block11_o_max_spectral_norm": 0.04167122766375542, + "block11_mlp_win_update_fnorm": 0.32069137692451477, + "block11_mlp_win_max_l1_linf_norm": 0.17839840054512024, + "block11_mlp_win_max_spectral_norm": 0.050472233444452286, + "block11_mlp_wout_update_fnorm": 0.28345826268196106, + "block11_mlp_wout_max_l1_linf_norm": 0.595016598701477, + "block11_mlp_wout_max_spectral_norm": 0.07462047785520554, + "total_sharpness": 0.006007582414895296, + "block_total_sharpness": 0.008567442186176777, + "v_norm_block": 1.7676591873168945, + "v_T_H_v_block": 0.02676999196410179, + "v_norm": 2.217339038848877, + "ip_v_neg_g_hvp": 0.05437803640961647, + "cos_v_neg_g_hvp": 0.0662970244884491, + "g_hvp_norm": 0.3699111044406891, + "ip_v_neg_g_t": 0.05481124669313431, + "cos_v_neg_g_t": 0.07917129993438721, + "g_t_norm": 0.3122265338897705, + "g_norm": 0.3699111044406891, + "hv_norm": 0.4636220932006836, + "cos_v_hv": 0.028732120990753174, + "hg_norm": 5.0337419509887695, + "cos_g_hg": 0.6262719631195068, + "v_parallel_norm": 0.009186781011521816, + "v_perp_norm": 2.217319965362549, + "embed_lm_head_v_norm": 1.3386458158493042, + "embed_lm_head_cos_v_neg_g": 0.07998976111412048, + "layer_1_v_norm": 0.5013406872749329, + "layer_1_cos_v_neg_g": 0.12764176726341248, + "layer_2_v_norm": 0.5004140734672546, + "layer_2_cos_v_neg_g": 0.058230210095644, + "layer_3_v_norm": 0.5006722807884216, + "layer_3_cos_v_neg_g": 0.04889313504099846, + "layer_4_v_norm": 0.4994470775127411, + "layer_4_cos_v_neg_g": 0.05821433290839195, + "layer_5_v_norm": 0.4907112717628479, + "layer_5_cos_v_neg_g": 0.03807416185736656, + "layer_6_v_norm": 0.5020566582679749, + "layer_6_cos_v_neg_g": 0.05349274352192879, + "layer_7_v_norm": 0.5075281262397766, + "layer_7_cos_v_neg_g": 0.06397667527198792, + "layer_8_v_norm": 0.5077552795410156, + "layer_8_cos_v_neg_g": 0.0647282674908638, + "layer_9_v_norm": 0.5131144523620605, + "layer_9_cos_v_neg_g": 0.062413476407527924, + "layer_10_v_norm": 0.5333570241928101, + "layer_10_cos_v_neg_g": 0.07296877354383469, + "layer_11_v_norm": 0.5379389524459839, + "layer_11_cos_v_neg_g": 0.08497528731822968, + "layer_12_v_norm": 0.526639997959137, + "layer_12_cos_v_neg_g": 0.1363629698753357, + "block0_q_v_norm": 0.15488271415233612, + "block0_q_cos_v_neg_g": 0.17398066818714142, + "block0_k_v_norm": 0.15438096225261688, + "block0_k_cos_v_neg_g": 0.17034214735031128, + "block0_v_v_norm": 0.15114638209342957, + "block0_v_cos_v_neg_g": 0.18499243259429932, + "block0_o_v_norm": 0.1533326357603073, + "block0_o_cos_v_neg_g": 0.14198607206344604, + "block0_mlp_win_v_norm": 0.2971337139606476, + "block0_mlp_win_cos_v_neg_g": 0.09852365404367447, + "block0_mlp_wout_v_norm": 0.26222214102745056, + "block0_mlp_wout_cos_v_neg_g": 0.17264801263809204, + "block3_q_v_norm": 0.1528804898262024, + "block3_q_cos_v_neg_g": 0.08762107044458389, + "block3_k_v_norm": 0.1454106718301773, + "block3_k_cos_v_neg_g": 0.09783408045768738, + "block3_v_v_norm": 0.13200832903385162, + "block3_v_cos_v_neg_g": 0.03784312307834625, + "block3_o_v_norm": 0.136912003159523, + "block3_o_cos_v_neg_g": 0.1568453311920166, + "block3_mlp_win_v_norm": 0.312543123960495, + "block3_mlp_win_cos_v_neg_g": 0.05892181396484375, + "block3_mlp_wout_v_norm": 0.2663262188434601, + "block3_mlp_wout_cos_v_neg_g": 0.22535011172294617, + "block7_q_v_norm": 0.1630471646785736, + "block7_q_cos_v_neg_g": 0.07410230487585068, + "block7_k_v_norm": 0.1577160358428955, + "block7_k_cos_v_neg_g": 0.1932368278503418, + "block7_v_v_norm": 0.13644741475582123, + "block7_v_cos_v_neg_g": 0.04947865009307861, + "block7_o_v_norm": 0.14726229012012482, + "block7_o_cos_v_neg_g": 0.21540865302085876, + "block7_mlp_win_v_norm": 0.3066627085208893, + "block7_mlp_win_cos_v_neg_g": 0.08739850670099258, + "block7_mlp_wout_v_norm": 0.26811233162879944, + "block7_mlp_wout_cos_v_neg_g": 0.22964948415756226, + "block11_q_v_norm": 0.162613183259964, + "block11_q_cos_v_neg_g": 0.12442637979984283, + "block11_k_v_norm": 0.1565878689289093, + "block11_k_cos_v_neg_g": 0.18372105062007904, + "block11_v_v_norm": 0.1437406688928604, + "block11_v_cos_v_neg_g": 0.07602231204509735, + "block11_o_v_norm": 0.1496685892343521, + "block11_o_cos_v_neg_g": 0.22326569259166718, + "block11_mlp_win_v_norm": 0.32069137692451477, + "block11_mlp_win_cos_v_neg_g": 0.13855278491973877, + "block11_mlp_wout_v_norm": 0.28345826268196106, + "block11_mlp_wout_cos_v_neg_g": 0.19249463081359863, + "embed_lm_head_sharpness": 0.00032533056219108403, + "layer_1_sharpness": 0.005301015917211771, + "layer_2_sharpness": 0.0005757338367402554, + "layer_3_sharpness": 0.0013996422057971358, + "layer_4_sharpness": 0.0013244414003565907, + "layer_5_sharpness": 0.0007898631156422198, + "layer_6_sharpness": 0.0009490009397268295, + "layer_7_sharpness": 0.0013315511168912053, + "layer_8_sharpness": 0.0019969376735389233, + "layer_9_sharpness": 0.0019315675599500537, + "layer_10_sharpness": 0.0010961474617943168, + "layer_11_sharpness": 0.0010159790981560946, + "layer_12_sharpness": 0.0034157000482082367, + "block0_q_sharpness": 0.000784708303399384, + "block0_k_sharpness": 0.0012217439943924546, + "block0_v_sharpness": 0.0030696261674165726, + "block0_o_sharpness": 0.002750994870439172, + "block0_mlp_win_sharpness": 0.0012419124832376838, + "block0_mlp_wout_sharpness": 0.002963701728731394, + "block3_q_sharpness": 0.0008883870323188603, + "block3_k_sharpness": 0.0003771119227167219, + "block3_v_sharpness": 0.0017713546985760331, + "block3_o_sharpness": 0.0008261086768470705, + "block3_mlp_win_sharpness": 0.0001798961020540446, + "block3_mlp_wout_sharpness": 0.000658894598018378, + "block7_q_sharpness": 0.0001231709902640432, + "block7_k_sharpness": 0.00027713319286704063, + "block7_v_sharpness": 0.0032843269873410463, + "block7_o_sharpness": 0.0004383784835226834, + "block7_mlp_win_sharpness": 0.0006554179126396775, + "block7_mlp_wout_sharpness": 0.0006945537752471864, + "block11_q_sharpness": 0.00021671157446689904, + "block11_k_sharpness": 0.0002547599724493921, + "block11_v_sharpness": 0.0009211965370923281, + "block11_o_sharpness": 0.00032144971191883087, + "block11_mlp_win_sharpness": 0.0007575477357022464, + "block11_mlp_wout_sharpness": 0.003924777265638113, + "sum_layer_numerators": 0.005506726795428239, + "block_diag_sharpness": 0.0017623674409622601, + "cross_layer_sharpness": 0.006805074745214516 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..8bf40619dbd927bce20f3261205f31e5568715a8 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_6500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.229010581970215, + "total_l1_linf_norm": 19836.1328125, + "total_spectral_norm": 2.229010581970215, + "embed_lm_head_update_fnorm": 1.331573724746704, + "embed_lm_head_max_l1_linf_norm": 0.3210420310497284, + "embed_lm_head_max_spectral_norm": 0.20458748936653137, + "layer_1_update_fnorm": 0.5079761743545532, + "layer_1_max_l1_linf_norm": 0.5798329710960388, + "layer_1_max_spectral_norm": 0.08238840103149414, + "layer_2_update_fnorm": 0.5084821581840515, + "layer_2_max_l1_linf_norm": 0.5898669958114624, + "layer_2_max_spectral_norm": 0.07826954871416092, + "layer_3_update_fnorm": 0.5019353628158569, + "layer_3_max_l1_linf_norm": 0.7109944820404053, + "layer_3_max_spectral_norm": 0.07864481210708618, + "layer_4_update_fnorm": 0.5017579793930054, + "layer_4_max_l1_linf_norm": 0.6709742546081543, + "layer_4_max_spectral_norm": 0.0797409787774086, + "layer_5_update_fnorm": 0.4989226758480072, + "layer_5_max_l1_linf_norm": 0.5424638986587524, + "layer_5_max_spectral_norm": 0.0696067065000534, + "layer_6_update_fnorm": 0.5078877210617065, + "layer_6_max_l1_linf_norm": 0.6546940207481384, + "layer_6_max_spectral_norm": 0.08170211315155029, + "layer_7_update_fnorm": 0.5144587755203247, + "layer_7_max_l1_linf_norm": 0.5837185382843018, + "layer_7_max_spectral_norm": 0.08086982369422913, + "layer_8_update_fnorm": 0.5134971141815186, + "layer_8_max_l1_linf_norm": 0.5795949697494507, + "layer_8_max_spectral_norm": 0.07663246244192123, + "layer_9_update_fnorm": 0.5217167139053345, + "layer_9_max_l1_linf_norm": 0.5812820196151733, + "layer_9_max_spectral_norm": 0.06385970115661621, + "layer_10_update_fnorm": 0.5380802750587463, + "layer_10_max_l1_linf_norm": 0.5585290789604187, + "layer_10_max_spectral_norm": 0.0461420901119709, + "layer_11_update_fnorm": 0.5417086482048035, + "layer_11_max_l1_linf_norm": 0.5617146492004395, + "layer_11_max_spectral_norm": 0.05445749685168266, + "layer_12_update_fnorm": 0.5335888266563416, + "layer_12_max_l1_linf_norm": 0.606985330581665, + "layer_12_max_spectral_norm": 0.08846121281385422, + "block0_q_update_fnorm": 0.1569102704524994, + "block0_q_max_l1_linf_norm": 0.22843337059020996, + "block0_q_max_spectral_norm": 0.0666264146566391, + "block0_k_update_fnorm": 0.15888097882270813, + "block0_k_max_l1_linf_norm": 0.27133387327194214, + "block0_k_max_spectral_norm": 0.0776621401309967, + "block0_v_update_fnorm": 0.15225382149219513, + "block0_v_max_l1_linf_norm": 0.17802152037620544, + "block0_v_max_spectral_norm": 0.04872382804751396, + "block0_o_update_fnorm": 0.1536075323820114, + "block0_o_max_l1_linf_norm": 0.1477181762456894, + "block0_o_max_spectral_norm": 0.03526854142546654, + "block0_mlp_win_update_fnorm": 0.3005877137184143, + "block0_mlp_win_max_l1_linf_norm": 0.15582258999347687, + "block0_mlp_win_max_spectral_norm": 0.043851159512996674, + "block0_mlp_wout_update_fnorm": 0.26631614565849304, + "block0_mlp_wout_max_l1_linf_norm": 0.5798329710960388, + "block0_mlp_wout_max_spectral_norm": 0.08238840103149414, + "block3_q_update_fnorm": 0.15395908057689667, + "block3_q_max_l1_linf_norm": 0.19720971584320068, + "block3_q_max_spectral_norm": 0.05238369479775429, + "block3_k_update_fnorm": 0.14921319484710693, + "block3_k_max_l1_linf_norm": 0.22810733318328857, + "block3_k_max_spectral_norm": 0.035917796194553375, + "block3_v_update_fnorm": 0.12878836691379547, + "block3_v_max_l1_linf_norm": 0.1596430540084839, + "block3_v_max_spectral_norm": 0.03724522143602371, + "block3_o_update_fnorm": 0.13446494936943054, + "block3_o_max_l1_linf_norm": 0.16563832759857178, + "block3_o_max_spectral_norm": 0.04322345182299614, + "block3_mlp_win_update_fnorm": 0.3132965564727783, + "block3_mlp_win_max_l1_linf_norm": 0.1947663277387619, + "block3_mlp_win_max_spectral_norm": 0.06834250688552856, + "block3_mlp_wout_update_fnorm": 0.2698831260204315, + "block3_mlp_wout_max_l1_linf_norm": 0.6709742546081543, + "block3_mlp_wout_max_spectral_norm": 0.0797409787774086, + "block7_q_update_fnorm": 0.16335414350032806, + "block7_q_max_l1_linf_norm": 0.18678073585033417, + "block7_q_max_spectral_norm": 0.024963706731796265, + "block7_k_update_fnorm": 0.15685361623764038, + "block7_k_max_l1_linf_norm": 0.18852749466896057, + "block7_k_max_spectral_norm": 0.028773225843906403, + "block7_v_update_fnorm": 0.13953953981399536, + "block7_v_max_l1_linf_norm": 0.15911710262298584, + "block7_v_max_spectral_norm": 0.030465496703982353, + "block7_o_update_fnorm": 0.14844080805778503, + "block7_o_max_l1_linf_norm": 0.1416352242231369, + "block7_o_max_spectral_norm": 0.028684306889772415, + "block7_mlp_win_update_fnorm": 0.31020253896713257, + "block7_mlp_win_max_l1_linf_norm": 0.16093198955059052, + "block7_mlp_win_max_spectral_norm": 0.04466969892382622, + "block7_mlp_wout_update_fnorm": 0.27300605177879333, + "block7_mlp_wout_max_l1_linf_norm": 0.5795949697494507, + "block7_mlp_wout_max_spectral_norm": 0.07663246244192123, + "block11_q_update_fnorm": 0.16400212049484253, + "block11_q_max_l1_linf_norm": 0.1562063992023468, + "block11_q_max_spectral_norm": 0.026420610025525093, + "block11_k_update_fnorm": 0.15980610251426697, + "block11_k_max_l1_linf_norm": 0.17262843251228333, + "block11_k_max_spectral_norm": 0.02404968813061714, + "block11_v_update_fnorm": 0.1483052372932434, + "block11_v_max_l1_linf_norm": 0.15734849870204926, + "block11_v_max_spectral_norm": 0.03422149643301964, + "block11_o_update_fnorm": 0.15310846269130707, + "block11_o_max_l1_linf_norm": 0.16159799695014954, + "block11_o_max_spectral_norm": 0.04176774248480797, + "block11_mlp_win_update_fnorm": 0.3215703070163727, + "block11_mlp_win_max_l1_linf_norm": 0.1880897581577301, + "block11_mlp_win_max_spectral_norm": 0.05082595720887184, + "block11_mlp_wout_update_fnorm": 0.28861483931541443, + "block11_mlp_wout_max_l1_linf_norm": 0.606985330581665, + "block11_mlp_wout_max_spectral_norm": 0.08846121281385422, + "total_sharpness": 0.006534964311867952, + "block_total_sharpness": 0.009228927083313465, + "v_norm_block": 1.7875679731369019, + "v_T_H_v_block": 0.02949010580778122, + "v_norm": 2.229010581970215, + "ip_v_neg_g_hvp": 0.05675709992647171, + "cos_v_neg_g_hvp": 0.0649350956082344, + "g_hvp_norm": 0.3921286463737488, + "ip_v_neg_g_t": 0.05720440670847893, + "cos_v_neg_g_t": 0.07683596760034561, + "g_t_norm": 0.33400487899780273, + "g_norm": 0.3921286463737488, + "hv_norm": 0.5270490646362305, + "cos_v_hv": 0.02763785421848297, + "hg_norm": 5.650628089904785, + "cos_g_hg": 0.6968814134597778, + "v_parallel_norm": 0.0092155234888196, + "v_perp_norm": 2.2289915084838867, + "embed_lm_head_v_norm": 1.331573724746704, + "embed_lm_head_cos_v_neg_g": 0.0704425796866417, + "layer_1_v_norm": 0.5079761743545532, + "layer_1_cos_v_neg_g": 0.12657222151756287, + "layer_2_v_norm": 0.5084821581840515, + "layer_2_cos_v_neg_g": 0.06409954279661179, + "layer_3_v_norm": 0.5019353032112122, + "layer_3_cos_v_neg_g": 0.052182599902153015, + "layer_4_v_norm": 0.5017579793930054, + "layer_4_cos_v_neg_g": 0.05934176221489906, + "layer_5_v_norm": 0.4989226758480072, + "layer_5_cos_v_neg_g": 0.042722851037979126, + "layer_6_v_norm": 0.5078877210617065, + "layer_6_cos_v_neg_g": 0.05370986461639404, + "layer_7_v_norm": 0.5144587755203247, + "layer_7_cos_v_neg_g": 0.06475399434566498, + "layer_8_v_norm": 0.5134971737861633, + "layer_8_cos_v_neg_g": 0.06287555396556854, + "layer_9_v_norm": 0.5217167139053345, + "layer_9_cos_v_neg_g": 0.05966747924685478, + "layer_10_v_norm": 0.5380802750587463, + "layer_10_cos_v_neg_g": 0.06958784908056259, + "layer_11_v_norm": 0.5417087078094482, + "layer_11_cos_v_neg_g": 0.08425288647413254, + "layer_12_v_norm": 0.5335888266563416, + "layer_12_cos_v_neg_g": 0.12321565300226212, + "block0_q_v_norm": 0.1569102704524994, + "block0_q_cos_v_neg_g": 0.15191715955734253, + "block0_k_v_norm": 0.15888097882270813, + "block0_k_cos_v_neg_g": 0.15437398850917816, + "block0_v_v_norm": 0.15225382149219513, + "block0_v_cos_v_neg_g": 0.20797185599803925, + "block0_o_v_norm": 0.1536075323820114, + "block0_o_cos_v_neg_g": 0.15818634629249573, + "block0_mlp_win_v_norm": 0.3005877137184143, + "block0_mlp_win_cos_v_neg_g": 0.08982224762439728, + "block0_mlp_wout_v_norm": 0.26631614565849304, + "block0_mlp_wout_cos_v_neg_g": 0.17798472940921783, + "block3_q_v_norm": 0.15395908057689667, + "block3_q_cos_v_neg_g": 0.07291131466627121, + "block3_k_v_norm": 0.14921319484710693, + "block3_k_cos_v_neg_g": 0.07504066824913025, + "block3_v_v_norm": 0.12878836691379547, + "block3_v_cos_v_neg_g": 0.03766045346856117, + "block3_o_v_norm": 0.13446494936943054, + "block3_o_cos_v_neg_g": 0.17375798523426056, + "block3_mlp_win_v_norm": 0.3132965564727783, + "block3_mlp_win_cos_v_neg_g": 0.05794786289334297, + "block3_mlp_wout_v_norm": 0.2698831260204315, + "block3_mlp_wout_cos_v_neg_g": 0.22584743797779083, + "block7_q_v_norm": 0.16335414350032806, + "block7_q_cos_v_neg_g": 0.0747927874326706, + "block7_k_v_norm": 0.15685361623764038, + "block7_k_cos_v_neg_g": 0.20394080877304077, + "block7_v_v_norm": 0.13953953981399536, + "block7_v_cos_v_neg_g": 0.0526985265314579, + "block7_o_v_norm": 0.14844080805778503, + "block7_o_cos_v_neg_g": 0.22897297143936157, + "block7_mlp_win_v_norm": 0.31020253896713257, + "block7_mlp_win_cos_v_neg_g": 0.0799451693892479, + "block7_mlp_wout_v_norm": 0.27300605177879333, + "block7_mlp_wout_cos_v_neg_g": 0.23999688029289246, + "block11_q_v_norm": 0.16400212049484253, + "block11_q_cos_v_neg_g": 0.1234888955950737, + "block11_k_v_norm": 0.15980610251426697, + "block11_k_cos_v_neg_g": 0.1890074610710144, + "block11_v_v_norm": 0.1483052372932434, + "block11_v_cos_v_neg_g": 0.0719592273235321, + "block11_o_v_norm": 0.15310846269130707, + "block11_o_cos_v_neg_g": 0.23631800711154938, + "block11_mlp_win_v_norm": 0.3215703070163727, + "block11_mlp_win_cos_v_neg_g": 0.10350711643695831, + "block11_mlp_wout_v_norm": 0.28861483931541443, + "block11_mlp_wout_cos_v_neg_g": 0.1765591949224472, + "embed_lm_head_sharpness": 0.0003340939583722502, + "layer_1_sharpness": 0.004717163275927305, + "layer_2_sharpness": 0.000412069377489388, + "layer_3_sharpness": 0.0009100271854549646, + "layer_4_sharpness": 0.001120269182138145, + "layer_5_sharpness": 0.000982134835794568, + "layer_6_sharpness": 0.0010470375418663025, + "layer_7_sharpness": 0.0015888223424553871, + "layer_8_sharpness": 0.002140501281246543, + "layer_9_sharpness": 0.0019377004355192184, + "layer_10_sharpness": 0.0011340855853632092, + "layer_11_sharpness": 0.0011600223369896412, + "layer_12_sharpness": 0.006677898578345776, + "block0_q_sharpness": 0.0006717077922075987, + "block0_k_sharpness": 0.0009667929261922836, + "block0_v_sharpness": 0.003321796888485551, + "block0_o_sharpness": 0.0027037248946726322, + "block0_mlp_win_sharpness": 0.0008051027543842793, + "block0_mlp_wout_sharpness": 0.002481991657987237, + "block3_q_sharpness": 0.0007552984170615673, + "block3_k_sharpness": 0.00034959655022248626, + "block3_v_sharpness": 0.0016524267848581076, + "block3_o_sharpness": 0.0009299491066485643, + "block3_mlp_win_sharpness": 0.000194291424122639, + "block3_mlp_wout_sharpness": 0.0003877256822306663, + "block7_q_sharpness": 0.00012859473645221442, + "block7_k_sharpness": 0.00026815300225280225, + "block7_v_sharpness": 0.004275074694305658, + "block7_o_sharpness": 0.00042827214929275215, + "block7_mlp_win_sharpness": 0.0005629349616356194, + "block7_mlp_wout_sharpness": 0.0006939226877875626, + "block11_q_sharpness": 0.00017669623775873333, + "block11_k_sharpness": 0.00032576106605120003, + "block11_v_sharpness": 0.0008262705523520708, + "block11_o_sharpness": 0.00039768905844539404, + "block11_mlp_win_sharpness": 0.0010660967091098428, + "block11_mlp_wout_sharpness": 0.010085190646350384, + "sum_layer_numerators": 0.006432035582058337, + "block_diag_sharpness": 0.0020129051369020654, + "cross_layer_sharpness": 0.0072160219464114 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..44687eb002783ae7ee7b12aae9249efd9c72e77c --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.07906436920166, + "total_l1_linf_norm": 18069.0078125, + "total_spectral_norm": 2.079064130783081, + "embed_lm_head_update_fnorm": 1.3210724592208862, + "embed_lm_head_max_l1_linf_norm": 0.3603051006793976, + "embed_lm_head_max_spectral_norm": 0.20218074321746826, + "layer_1_update_fnorm": 0.33327794075012207, + "layer_1_max_l1_linf_norm": 0.4653246998786926, + "layer_1_max_spectral_norm": 0.06014063209295273, + "layer_2_update_fnorm": 0.36299851536750793, + "layer_2_max_l1_linf_norm": 0.492123007774353, + "layer_2_max_spectral_norm": 0.07169654220342636, + "layer_3_update_fnorm": 0.40196865797042847, + "layer_3_max_l1_linf_norm": 0.5239568948745728, + "layer_3_max_spectral_norm": 0.0679628923535347, + "layer_4_update_fnorm": 0.4284496605396271, + "layer_4_max_l1_linf_norm": 0.5573267936706543, + "layer_4_max_spectral_norm": 0.07293298840522766, + "layer_5_update_fnorm": 0.40932396054267883, + "layer_5_max_l1_linf_norm": 0.44006460905075073, + "layer_5_max_spectral_norm": 0.06247397139668465, + "layer_6_update_fnorm": 0.46519097685813904, + "layer_6_max_l1_linf_norm": 0.5618972778320312, + "layer_6_max_spectral_norm": 0.07966496795415878, + "layer_7_update_fnorm": 0.4904032051563263, + "layer_7_max_l1_linf_norm": 0.6093286275863647, + "layer_7_max_spectral_norm": 0.0770382285118103, + "layer_8_update_fnorm": 0.4991031289100647, + "layer_8_max_l1_linf_norm": 0.5333664417266846, + "layer_8_max_spectral_norm": 0.069851815700531, + "layer_9_update_fnorm": 0.5122613906860352, + "layer_9_max_l1_linf_norm": 0.5600900650024414, + "layer_9_max_spectral_norm": 0.05791785940527916, + "layer_10_update_fnorm": 0.532191812992096, + "layer_10_max_l1_linf_norm": 0.5422117710113525, + "layer_10_max_spectral_norm": 0.053796641528606415, + "layer_11_update_fnorm": 0.5375680327415466, + "layer_11_max_l1_linf_norm": 0.5505867004394531, + "layer_11_max_spectral_norm": 0.05036581680178642, + "layer_12_update_fnorm": 0.5302058458328247, + "layer_12_max_l1_linf_norm": 0.6126577854156494, + "layer_12_max_spectral_norm": 0.06564687192440033, + "block0_q_update_fnorm": 0.09897658973932266, + "block0_q_max_l1_linf_norm": 0.13976742327213287, + "block0_q_max_spectral_norm": 0.032860126346349716, + "block0_k_update_fnorm": 0.09565834701061249, + "block0_k_max_l1_linf_norm": 0.15058927237987518, + "block0_k_max_spectral_norm": 0.037443142384290695, + "block0_v_update_fnorm": 0.0675181970000267, + "block0_v_max_l1_linf_norm": 0.11161710321903229, + "block0_v_max_spectral_norm": 0.023859094828367233, + "block0_o_update_fnorm": 0.07328235357999802, + "block0_o_max_l1_linf_norm": 0.08911594748497009, + "block0_o_max_spectral_norm": 0.02214137278497219, + "block0_mlp_win_update_fnorm": 0.2163749635219574, + "block0_mlp_win_max_l1_linf_norm": 0.14533460140228271, + "block0_mlp_win_max_spectral_norm": 0.03769730404019356, + "block0_mlp_wout_update_fnorm": 0.18795007467269897, + "block0_mlp_wout_max_l1_linf_norm": 0.4653246998786926, + "block0_mlp_wout_max_spectral_norm": 0.06014063209295273, + "block3_q_update_fnorm": 0.11924704909324646, + "block3_q_max_l1_linf_norm": 0.151433527469635, + "block3_q_max_spectral_norm": 0.03915172442793846, + "block3_k_update_fnorm": 0.11144572496414185, + "block3_k_max_l1_linf_norm": 0.17106592655181885, + "block3_k_max_spectral_norm": 0.030827907845377922, + "block3_v_update_fnorm": 0.08229048550128937, + "block3_v_max_l1_linf_norm": 0.1172318309545517, + "block3_v_max_spectral_norm": 0.031075233593583107, + "block3_o_update_fnorm": 0.08666274696588516, + "block3_o_max_l1_linf_norm": 0.11275089532136917, + "block3_o_max_spectral_norm": 0.03373980149626732, + "block3_mlp_win_update_fnorm": 0.2879928946495056, + "block3_mlp_win_max_l1_linf_norm": 0.1832066774368286, + "block3_mlp_win_max_spectral_norm": 0.06962815672159195, + "block3_mlp_wout_update_fnorm": 0.2440822273492813, + "block3_mlp_wout_max_l1_linf_norm": 0.5573267936706543, + "block3_mlp_wout_max_spectral_norm": 0.07293298840522766, + "block7_q_update_fnorm": 0.1533644050359726, + "block7_q_max_l1_linf_norm": 0.1486297845840454, + "block7_q_max_spectral_norm": 0.02302185818552971, + "block7_k_update_fnorm": 0.14703652262687683, + "block7_k_max_l1_linf_norm": 0.14940997958183289, + "block7_k_max_spectral_norm": 0.026607008650898933, + "block7_v_update_fnorm": 0.13248690962791443, + "block7_v_max_l1_linf_norm": 0.13428710401058197, + "block7_v_max_spectral_norm": 0.024577714502811432, + "block7_o_update_fnorm": 0.14093726873397827, + "block7_o_max_l1_linf_norm": 0.13681799173355103, + "block7_o_max_spectral_norm": 0.022874748334288597, + "block7_mlp_win_update_fnorm": 0.3044328987598419, + "block7_mlp_win_max_l1_linf_norm": 0.16910320520401, + "block7_mlp_win_max_spectral_norm": 0.04224685952067375, + "block7_mlp_wout_update_fnorm": 0.2715578079223633, + "block7_mlp_wout_max_l1_linf_norm": 0.5333664417266846, + "block7_mlp_wout_max_spectral_norm": 0.069851815700531, + "block11_q_update_fnorm": 0.1645379364490509, + "block11_q_max_l1_linf_norm": 0.16438868641853333, + "block11_q_max_spectral_norm": 0.029789019376039505, + "block11_k_update_fnorm": 0.1590651124715805, + "block11_k_max_l1_linf_norm": 0.16290491819381714, + "block11_k_max_spectral_norm": 0.02498444728553295, + "block11_v_update_fnorm": 0.1470697969198227, + "block11_v_max_l1_linf_norm": 0.15530690550804138, + "block11_v_max_spectral_norm": 0.03192494064569473, + "block11_o_update_fnorm": 0.15031376481056213, + "block11_o_max_l1_linf_norm": 0.15771876275539398, + "block11_o_max_spectral_norm": 0.03556997701525688, + "block11_mlp_win_update_fnorm": 0.3229210078716278, + "block11_mlp_win_max_l1_linf_norm": 0.18493086099624634, + "block11_mlp_win_max_spectral_norm": 0.047946151345968246, + "block11_mlp_wout_update_fnorm": 0.28304094076156616, + "block11_mlp_wout_max_l1_linf_norm": 0.6126577854156494, + "block11_mlp_wout_max_spectral_norm": 0.06564687192440033, + "total_sharpness": 0.004745089448988438, + "block_total_sharpness": 0.007185859605669975, + "v_norm_block": 1.6053897142410278, + "v_T_H_v_block": 0.018519943580031395, + "v_norm": 2.07906436920166, + "ip_v_neg_g_hvp": 0.04785316810011864, + "cos_v_neg_g_hvp": 0.06443098187446594, + "g_hvp_norm": 0.3572300672531128, + "ip_v_neg_g_t": 0.048313140869140625, + "cos_v_neg_g_t": 0.07845748215913773, + "g_t_norm": 0.2961849570274353, + "g_norm": 0.3572300672531128, + "hv_norm": 0.40435758233070374, + "cos_v_hv": 0.024397579953074455, + "hg_norm": 5.4172043800354, + "cos_g_hg": 0.6252729892730713, + "v_parallel_norm": 0.009179232642054558, + "v_perp_norm": 2.0790441036224365, + "embed_lm_head_v_norm": 1.3210724592208862, + "embed_lm_head_cos_v_neg_g": 0.08471161127090454, + "layer_1_v_norm": 0.33327794075012207, + "layer_1_cos_v_neg_g": 0.11329490691423416, + "layer_2_v_norm": 0.36299851536750793, + "layer_2_cos_v_neg_g": 0.05747565999627113, + "layer_3_v_norm": 0.4019686281681061, + "layer_3_cos_v_neg_g": 0.04567243158817291, + "layer_4_v_norm": 0.4284496605396271, + "layer_4_cos_v_neg_g": 0.056603699922561646, + "layer_5_v_norm": 0.40932396054267883, + "layer_5_cos_v_neg_g": 0.04164101928472519, + "layer_6_v_norm": 0.46519097685813904, + "layer_6_cos_v_neg_g": 0.05195733159780502, + "layer_7_v_norm": 0.4904032051563263, + "layer_7_cos_v_neg_g": 0.06222185865044594, + "layer_8_v_norm": 0.4991031289100647, + "layer_8_cos_v_neg_g": 0.06269217282533646, + "layer_9_v_norm": 0.5122613906860352, + "layer_9_cos_v_neg_g": 0.061446402221918106, + "layer_10_v_norm": 0.532191812992096, + "layer_10_cos_v_neg_g": 0.07264096289873123, + "layer_11_v_norm": 0.5375680327415466, + "layer_11_cos_v_neg_g": 0.08619457483291626, + "layer_12_v_norm": 0.5302058458328247, + "layer_12_cos_v_neg_g": 0.12255025655031204, + "block0_q_v_norm": 0.09897658973932266, + "block0_q_cos_v_neg_g": 0.13236966729164124, + "block0_k_v_norm": 0.09565834701061249, + "block0_k_cos_v_neg_g": 0.15171298384666443, + "block0_v_v_norm": 0.0675181970000267, + "block0_v_cos_v_neg_g": 0.19885556399822235, + "block0_o_v_norm": 0.07328235357999802, + "block0_o_cos_v_neg_g": 0.15532249212265015, + "block0_mlp_win_v_norm": 0.2163749635219574, + "block0_mlp_win_cos_v_neg_g": 0.08375544100999832, + "block0_mlp_wout_v_norm": 0.18795007467269897, + "block0_mlp_wout_cos_v_neg_g": 0.16236643493175507, + "block3_q_v_norm": 0.11924704909324646, + "block3_q_cos_v_neg_g": 0.06334288418292999, + "block3_k_v_norm": 0.11144572496414185, + "block3_k_cos_v_neg_g": 0.08220367878675461, + "block3_v_v_norm": 0.08229048550128937, + "block3_v_cos_v_neg_g": 0.0331055112183094, + "block3_o_v_norm": 0.08666274696588516, + "block3_o_cos_v_neg_g": 0.18051475286483765, + "block3_mlp_win_v_norm": 0.2879928946495056, + "block3_mlp_win_cos_v_neg_g": 0.05672730505466461, + "block3_mlp_wout_v_norm": 0.2440822273492813, + "block3_mlp_wout_cos_v_neg_g": 0.21299144625663757, + "block7_q_v_norm": 0.1533644050359726, + "block7_q_cos_v_neg_g": 0.07068505883216858, + "block7_k_v_norm": 0.14703652262687683, + "block7_k_cos_v_neg_g": 0.20035549998283386, + "block7_v_v_norm": 0.13248690962791443, + "block7_v_cos_v_neg_g": 0.04873734712600708, + "block7_o_v_norm": 0.14093726873397827, + "block7_o_cos_v_neg_g": 0.220381960272789, + "block7_mlp_win_v_norm": 0.3044328987598419, + "block7_mlp_win_cos_v_neg_g": 0.08048604428768158, + "block7_mlp_wout_v_norm": 0.2715578079223633, + "block7_mlp_wout_cos_v_neg_g": 0.22003118693828583, + "block11_q_v_norm": 0.1645379364490509, + "block11_q_cos_v_neg_g": 0.11516931653022766, + "block11_k_v_norm": 0.1590651124715805, + "block11_k_cos_v_neg_g": 0.17274807393550873, + "block11_v_v_norm": 0.1470697969198227, + "block11_v_cos_v_neg_g": 0.06666059046983719, + "block11_o_v_norm": 0.15031376481056213, + "block11_o_cos_v_neg_g": 0.21549288928508759, + "block11_mlp_win_v_norm": 0.3229210078716278, + "block11_mlp_win_cos_v_neg_g": 0.12176443636417389, + "block11_mlp_wout_v_norm": 0.28304094076156616, + "block11_mlp_wout_cos_v_neg_g": 0.1641853004693985, + "embed_lm_head_sharpness": 0.000277822749922052, + "layer_1_sharpness": 0.004732322879135609, + "layer_2_sharpness": 0.0004915245808660984, + "layer_3_sharpness": 0.0009599002660252154, + "layer_4_sharpness": 0.001294457702897489, + "layer_5_sharpness": 0.0009255973272956908, + "layer_6_sharpness": 0.0010318898130208254, + "layer_7_sharpness": 0.0012742978287860751, + "layer_8_sharpness": 0.0018659663619473577, + "layer_9_sharpness": 0.0015957321738824248, + "layer_10_sharpness": 0.0008550658822059631, + "layer_11_sharpness": 0.0008942050626501441, + "layer_12_sharpness": 0.00244277436286211, + "block0_q_sharpness": 0.0004654224612750113, + "block0_k_sharpness": 0.0007340694428421557, + "block0_v_sharpness": 0.009089909493923187, + "block0_o_sharpness": 0.004910292569547892, + "block0_mlp_win_sharpness": 0.0007282855222001672, + "block0_mlp_wout_sharpness": 0.0022476972080767155, + "block3_q_sharpness": 0.0006908983341418207, + "block3_k_sharpness": 0.0007412417908199131, + "block3_v_sharpness": 0.0026983837597072124, + "block3_o_sharpness": 0.0013199280947446823, + "block3_mlp_win_sharpness": 0.00018493649258743972, + "block3_mlp_wout_sharpness": 0.00029221895965747535, + "block7_q_sharpness": 9.228945418726653e-05, + "block7_k_sharpness": 0.00020628234778996557, + "block7_v_sharpness": 0.0036801444366574287, + "block7_o_sharpness": 0.0003707469440996647, + "block7_mlp_win_sharpness": 0.0005457352381199598, + "block7_mlp_wout_sharpness": 0.0006109839887358248, + "block11_q_sharpness": 0.00016374030383303761, + "block11_k_sharpness": 0.000234040868235752, + "block11_v_sharpness": 0.0006823392468504608, + "block11_o_sharpness": 0.0002375216718064621, + "block11_mlp_win_sharpness": 0.0006455864640884101, + "block11_mlp_wout_sharpness": 0.0027636555023491383, + "sum_layer_numerators": 0.0037388269727595324, + "block_diag_sharpness": 0.00145068932373171, + "cross_layer_sharpness": 0.005735170281938266 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..146bdeb8fa5c12770014d0b189a6d8cab262c4e5 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_7500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.284933567047119, + "total_l1_linf_norm": 20390.619140625, + "total_spectral_norm": 2.284933090209961, + "embed_lm_head_update_fnorm": 1.3353402614593506, + "embed_lm_head_max_l1_linf_norm": 0.3742252290248871, + "embed_lm_head_max_spectral_norm": 0.2105090618133545, + "layer_1_update_fnorm": 0.5218959450721741, + "layer_1_max_l1_linf_norm": 0.7064106464385986, + "layer_1_max_spectral_norm": 0.0971963182091713, + "layer_2_update_fnorm": 0.533101499080658, + "layer_2_max_l1_linf_norm": 0.6545923948287964, + "layer_2_max_spectral_norm": 0.0802641361951828, + "layer_3_update_fnorm": 0.5279412865638733, + "layer_3_max_l1_linf_norm": 0.6689751744270325, + "layer_3_max_spectral_norm": 0.10166189074516296, + "layer_4_update_fnorm": 0.5341486930847168, + "layer_4_max_l1_linf_norm": 0.6616289019584656, + "layer_4_max_spectral_norm": 0.10130149126052856, + "layer_5_update_fnorm": 0.5206738114356995, + "layer_5_max_l1_linf_norm": 0.7101402282714844, + "layer_5_max_spectral_norm": 0.08797828108072281, + "layer_6_update_fnorm": 0.5292352437973022, + "layer_6_max_l1_linf_norm": 0.6553069353103638, + "layer_6_max_spectral_norm": 0.0985066145658493, + "layer_7_update_fnorm": 0.5352304577827454, + "layer_7_max_l1_linf_norm": 0.6208641529083252, + "layer_7_max_spectral_norm": 0.09311681985855103, + "layer_8_update_fnorm": 0.5318943858146667, + "layer_8_max_l1_linf_norm": 0.5935789942741394, + "layer_8_max_spectral_norm": 0.08716367185115814, + "layer_9_update_fnorm": 0.5378791093826294, + "layer_9_max_l1_linf_norm": 0.5623480081558228, + "layer_9_max_spectral_norm": 0.06777819991111755, + "layer_10_update_fnorm": 0.5523667335510254, + "layer_10_max_l1_linf_norm": 0.5870633721351624, + "layer_10_max_spectral_norm": 0.05593142285943031, + "layer_11_update_fnorm": 0.5523045063018799, + "layer_11_max_l1_linf_norm": 0.5957237482070923, + "layer_11_max_spectral_norm": 0.06627408415079117, + "layer_12_update_fnorm": 0.5451041460037231, + "layer_12_max_l1_linf_norm": 0.6146879196166992, + "layer_12_max_spectral_norm": 0.09472596645355225, + "block0_q_update_fnorm": 0.15957675874233246, + "block0_q_max_l1_linf_norm": 0.27739858627319336, + "block0_q_max_spectral_norm": 0.05828137323260307, + "block0_k_update_fnorm": 0.1666557937860489, + "block0_k_max_l1_linf_norm": 0.27232950925827026, + "block0_k_max_spectral_norm": 0.0836201012134552, + "block0_v_update_fnorm": 0.15345773100852966, + "block0_v_max_l1_linf_norm": 0.21186846494674683, + "block0_v_max_spectral_norm": 0.05705823376774788, + "block0_o_update_fnorm": 0.15384656190872192, + "block0_o_max_l1_linf_norm": 0.21297764778137207, + "block0_o_max_spectral_norm": 0.055837441235780716, + "block0_mlp_win_update_fnorm": 0.3066079914569855, + "block0_mlp_win_max_l1_linf_norm": 0.2934854030609131, + "block0_mlp_win_max_spectral_norm": 0.06299096345901489, + "block0_mlp_wout_update_fnorm": 0.27883103489875793, + "block0_mlp_wout_max_l1_linf_norm": 0.7064106464385986, + "block0_mlp_wout_max_spectral_norm": 0.0971963182091713, + "block3_q_update_fnorm": 0.16648852825164795, + "block3_q_max_l1_linf_norm": 0.24343684315681458, + "block3_q_max_spectral_norm": 0.07828789204359055, + "block3_k_update_fnorm": 0.1626710742712021, + "block3_k_max_l1_linf_norm": 0.2848093509674072, + "block3_k_max_spectral_norm": 0.051756322383880615, + "block3_v_update_fnorm": 0.14834950864315033, + "block3_v_max_l1_linf_norm": 0.202008917927742, + "block3_v_max_spectral_norm": 0.06556592881679535, + "block3_o_update_fnorm": 0.1465587615966797, + "block3_o_max_l1_linf_norm": 0.16618430614471436, + "block3_o_max_spectral_norm": 0.05211080610752106, + "block3_mlp_win_update_fnorm": 0.3239566683769226, + "block3_mlp_win_max_l1_linf_norm": 0.20370234549045563, + "block3_mlp_win_max_spectral_norm": 0.07274346798658371, + "block3_mlp_wout_update_fnorm": 0.2872600555419922, + "block3_mlp_wout_max_l1_linf_norm": 0.6616289019584656, + "block3_mlp_wout_max_spectral_norm": 0.10130149126052856, + "block7_q_update_fnorm": 0.16716434061527252, + "block7_q_max_l1_linf_norm": 0.19995896518230438, + "block7_q_max_spectral_norm": 0.045000430196523666, + "block7_k_update_fnorm": 0.16175171732902527, + "block7_k_max_l1_linf_norm": 0.19820359349250793, + "block7_k_max_spectral_norm": 0.043129343539476395, + "block7_v_update_fnorm": 0.1461862176656723, + "block7_v_max_l1_linf_norm": 0.15896359086036682, + "block7_v_max_spectral_norm": 0.036403242498636246, + "block7_o_update_fnorm": 0.153374582529068, + "block7_o_max_l1_linf_norm": 0.1459050327539444, + "block7_o_max_spectral_norm": 0.03629960119724274, + "block7_mlp_win_update_fnorm": 0.3193095624446869, + "block7_mlp_win_max_l1_linf_norm": 0.24098441004753113, + "block7_mlp_win_max_spectral_norm": 0.06093969941139221, + "block7_mlp_wout_update_fnorm": 0.28600209951400757, + "block7_mlp_wout_max_l1_linf_norm": 0.5935789942741394, + "block7_mlp_wout_max_spectral_norm": 0.08716367185115814, + "block11_q_update_fnorm": 0.16625748574733734, + "block11_q_max_l1_linf_norm": 0.2080129235982895, + "block11_q_max_spectral_norm": 0.039098288863897324, + "block11_k_update_fnorm": 0.16090533137321472, + "block11_k_max_l1_linf_norm": 0.2109394669532776, + "block11_k_max_spectral_norm": 0.036379218101501465, + "block11_v_update_fnorm": 0.15392565727233887, + "block11_v_max_l1_linf_norm": 0.19882503151893616, + "block11_v_max_spectral_norm": 0.05065561830997467, + "block11_o_update_fnorm": 0.15472815930843353, + "block11_o_max_l1_linf_norm": 0.17153939604759216, + "block11_o_max_spectral_norm": 0.05527110770344734, + "block11_mlp_win_update_fnorm": 0.327231228351593, + "block11_mlp_win_max_l1_linf_norm": 0.21701008081436157, + "block11_mlp_win_max_spectral_norm": 0.06020413711667061, + "block11_mlp_wout_update_fnorm": 0.2978818416595459, + "block11_mlp_wout_max_l1_linf_norm": 0.6146879196166992, + "block11_mlp_wout_max_spectral_norm": 0.09472596645355225, + "total_sharpness": 0.020022381097078323, + "block_total_sharpness": 0.02866162545979023, + "v_norm_block": 1.8541265726089478, + "v_T_H_v_block": 0.09853252023458481, + "v_norm": 2.284933567047119, + "ip_v_neg_g_hvp": 0.08448782563209534, + "cos_v_neg_g_hvp": 0.07306984812021255, + "g_hvp_norm": 0.5060370564460754, + "ip_v_neg_g_t": 0.0846753939986229, + "cos_v_neg_g_t": 0.0862860232591629, + "g_t_norm": 0.42948025465011597, + "g_norm": 0.5060370564460754, + "hv_norm": 1.0209904909133911, + "cos_v_hv": 0.0448092445731163, + "hg_norm": 14.799643516540527, + "cos_g_hg": 0.7396323680877686, + "v_parallel_norm": 0.008421391248703003, + "v_perp_norm": 2.2849180698394775, + "embed_lm_head_v_norm": 1.3353402614593506, + "embed_lm_head_cos_v_neg_g": 0.05725867301225662, + "layer_1_v_norm": 0.5218959450721741, + "layer_1_cos_v_neg_g": 0.15907731652259827, + "layer_2_v_norm": 0.533101499080658, + "layer_2_cos_v_neg_g": 0.05970107391476631, + "layer_3_v_norm": 0.5279412865638733, + "layer_3_cos_v_neg_g": 0.0546468086540699, + "layer_4_v_norm": 0.5341486930847168, + "layer_4_cos_v_neg_g": 0.07694481313228607, + "layer_5_v_norm": 0.5206738114356995, + "layer_5_cos_v_neg_g": 0.05497054010629654, + "layer_6_v_norm": 0.5292352437973022, + "layer_6_cos_v_neg_g": 0.07069913297891617, + "layer_7_v_norm": 0.5352304577827454, + "layer_7_cos_v_neg_g": 0.07488004863262177, + "layer_8_v_norm": 0.5318943858146667, + "layer_8_cos_v_neg_g": 0.07833075523376465, + "layer_9_v_norm": 0.5378791093826294, + "layer_9_cos_v_neg_g": 0.07337617129087448, + "layer_10_v_norm": 0.5523667335510254, + "layer_10_cos_v_neg_g": 0.07960522919893265, + "layer_11_v_norm": 0.5523045063018799, + "layer_11_cos_v_neg_g": 0.08604606240987778, + "layer_12_v_norm": 0.5451041460037231, + "layer_12_cos_v_neg_g": 0.13785183429718018, + "block0_q_v_norm": 0.15957675874233246, + "block0_q_cos_v_neg_g": 0.19085732102394104, + "block0_k_v_norm": 0.1666557937860489, + "block0_k_cos_v_neg_g": 0.1792519986629486, + "block0_v_v_norm": 0.15345773100852966, + "block0_v_cos_v_neg_g": 0.2829653322696686, + "block0_o_v_norm": 0.15384656190872192, + "block0_o_cos_v_neg_g": 0.21515041589736938, + "block0_mlp_win_v_norm": 0.3066079914569855, + "block0_mlp_win_cos_v_neg_g": 0.09625038504600525, + "block0_mlp_wout_v_norm": 0.27883103489875793, + "block0_mlp_wout_cos_v_neg_g": 0.21251530945301056, + "block3_q_v_norm": 0.16648852825164795, + "block3_q_cos_v_neg_g": 0.1453702598810196, + "block3_k_v_norm": 0.1626710742712021, + "block3_k_cos_v_neg_g": 0.11535949259996414, + "block3_v_v_norm": 0.14834950864315033, + "block3_v_cos_v_neg_g": 0.05731305480003357, + "block3_o_v_norm": 0.1465587615966797, + "block3_o_cos_v_neg_g": 0.25769034028053284, + "block3_mlp_win_v_norm": 0.3239566683769226, + "block3_mlp_win_cos_v_neg_g": 0.06448805332183838, + "block3_mlp_wout_v_norm": 0.2872600555419922, + "block3_mlp_wout_cos_v_neg_g": 0.2638208270072937, + "block7_q_v_norm": 0.16716434061527252, + "block7_q_cos_v_neg_g": 0.09653711318969727, + "block7_k_v_norm": 0.16175171732902527, + "block7_k_cos_v_neg_g": 0.29607266187667847, + "block7_v_v_norm": 0.1461862176656723, + "block7_v_cos_v_neg_g": 0.05865839496254921, + "block7_o_v_norm": 0.153374582529068, + "block7_o_cos_v_neg_g": 0.27720069885253906, + "block7_mlp_win_v_norm": 0.3193095624446869, + "block7_mlp_win_cos_v_neg_g": 0.09094707667827606, + "block7_mlp_wout_v_norm": 0.28600209951400757, + "block7_mlp_wout_cos_v_neg_g": 0.28917938470840454, + "block11_q_v_norm": 0.16625748574733734, + "block11_q_cos_v_neg_g": 0.119853176176548, + "block11_k_v_norm": 0.16090533137321472, + "block11_k_cos_v_neg_g": 0.21814650297164917, + "block11_v_v_norm": 0.15392565727233887, + "block11_v_cos_v_neg_g": 0.0884556919336319, + "block11_o_v_norm": 0.15472815930843353, + "block11_o_cos_v_neg_g": 0.2979552447795868, + "block11_mlp_win_v_norm": 0.327231228351593, + "block11_mlp_win_cos_v_neg_g": 0.11236010491847992, + "block11_mlp_wout_v_norm": 0.2978818416595459, + "block11_mlp_wout_cos_v_neg_g": 0.21175122261047363, + "embed_lm_head_sharpness": 0.00038711202796548605, + "layer_1_sharpness": 0.020026065409183502, + "layer_2_sharpness": 0.0014969204785302281, + "layer_3_sharpness": 0.0032275626435875893, + "layer_4_sharpness": 0.005281706806272268, + "layer_5_sharpness": 0.0026431505102664232, + "layer_6_sharpness": 0.0024068800266832113, + "layer_7_sharpness": 0.004042173735797405, + "layer_8_sharpness": 0.0050781904719769955, + "layer_9_sharpness": 0.004467848688364029, + "layer_10_sharpness": 0.0023802684154361486, + "layer_11_sharpness": 0.0018055138643831015, + "layer_12_sharpness": 0.006732754874974489, + "block0_q_sharpness": 0.0015640690689906478, + "block0_k_sharpness": 0.0025852711405605078, + "block0_v_sharpness": 0.016225699335336685, + "block0_o_sharpness": 0.014035506173968315, + "block0_mlp_win_sharpness": 0.002800100017338991, + "block0_mlp_wout_sharpness": 0.005281427409499884, + "block3_q_sharpness": 0.003306132275611162, + "block3_k_sharpness": 0.0017306703375652432, + "block3_v_sharpness": 0.004738297313451767, + "block3_o_sharpness": 0.002145218662917614, + "block3_mlp_win_sharpness": 0.00035854714224115014, + "block3_mlp_wout_sharpness": 0.0008096342789940536, + "block7_q_sharpness": 0.00014049133460503072, + "block7_k_sharpness": 5.563961894949898e-05, + "block7_v_sharpness": 0.005163274239748716, + "block7_o_sharpness": 0.0006649139686487615, + "block7_mlp_win_sharpness": 0.0015749263111501932, + "block7_mlp_wout_sharpness": 0.0016553514869883657, + "block11_q_sharpness": 0.0005984152085147798, + "block11_k_sharpness": 0.000928967259824276, + "block11_v_sharpness": 0.001512565417215228, + "block11_o_sharpness": 0.0007688956684432924, + "block11_mlp_win_sharpness": 0.0011975178495049477, + "block11_mlp_wout_sharpness": 0.00728684151545167, + "sum_layer_numerators": 0.01684208882035056, + "block_diag_sharpness": 0.0048991100720702525, + "cross_layer_sharpness": 0.02376251538771998 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..e21eff15f5aa8ea05d0ebf2bf59923f378f34a9c --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2460553646087646, + "total_l1_linf_norm": 19958.875, + "total_spectral_norm": 2.2460556030273438, + "embed_lm_head_update_fnorm": 1.3363263607025146, + "embed_lm_head_max_l1_linf_norm": 0.38301825523376465, + "embed_lm_head_max_spectral_norm": 0.2008640468120575, + "layer_1_update_fnorm": 0.48097988963127136, + "layer_1_max_l1_linf_norm": 0.5882872343063354, + "layer_1_max_spectral_norm": 0.08530718833208084, + "layer_2_update_fnorm": 0.49991574883461, + "layer_2_max_l1_linf_norm": 0.5776621699333191, + "layer_2_max_spectral_norm": 0.07863865047693253, + "layer_3_update_fnorm": 0.5128495097160339, + "layer_3_max_l1_linf_norm": 0.651952862739563, + "layer_3_max_spectral_norm": 0.08810558915138245, + "layer_4_update_fnorm": 0.5163832306861877, + "layer_4_max_l1_linf_norm": 0.6732165217399597, + "layer_4_max_spectral_norm": 0.08624082803726196, + "layer_5_update_fnorm": 0.5027012228965759, + "layer_5_max_l1_linf_norm": 0.6087028980255127, + "layer_5_max_spectral_norm": 0.0806816816329956, + "layer_6_update_fnorm": 0.5170060396194458, + "layer_6_max_l1_linf_norm": 0.6282179951667786, + "layer_6_max_spectral_norm": 0.08430665731430054, + "layer_7_update_fnorm": 0.5191576480865479, + "layer_7_max_l1_linf_norm": 0.5958786010742188, + "layer_7_max_spectral_norm": 0.07571160048246384, + "layer_8_update_fnorm": 0.5184198021888733, + "layer_8_max_l1_linf_norm": 0.5619698166847229, + "layer_8_max_spectral_norm": 0.07354167848825455, + "layer_9_update_fnorm": 0.5286908745765686, + "layer_9_max_l1_linf_norm": 0.5807837247848511, + "layer_9_max_spectral_norm": 0.0645776018500328, + "layer_10_update_fnorm": 0.5492725968360901, + "layer_10_max_l1_linf_norm": 0.5779099464416504, + "layer_10_max_spectral_norm": 0.051154449582099915, + "layer_11_update_fnorm": 0.5562312602996826, + "layer_11_max_l1_linf_norm": 0.5734052658081055, + "layer_11_max_spectral_norm": 0.05563277006149292, + "layer_12_update_fnorm": 0.5469728708267212, + "layer_12_max_l1_linf_norm": 0.598198652267456, + "layer_12_max_spectral_norm": 0.0743483304977417, + "block0_q_update_fnorm": 0.15084148943424225, + "block0_q_max_l1_linf_norm": 0.2037157416343689, + "block0_q_max_spectral_norm": 0.04378029704093933, + "block0_k_update_fnorm": 0.14720246195793152, + "block0_k_max_l1_linf_norm": 0.1965322196483612, + "block0_k_max_spectral_norm": 0.051413848996162415, + "block0_v_update_fnorm": 0.10846047103404999, + "block0_v_max_l1_linf_norm": 0.1455857753753662, + "block0_v_max_spectral_norm": 0.03835447132587433, + "block0_o_update_fnorm": 0.1263514757156372, + "block0_o_max_l1_linf_norm": 0.15227098762989044, + "block0_o_max_spectral_norm": 0.030054647475481033, + "block0_mlp_win_update_fnorm": 0.29360291361808777, + "block0_mlp_win_max_l1_linf_norm": 0.17226198315620422, + "block0_mlp_win_max_spectral_norm": 0.044030871242284775, + "block0_mlp_wout_update_fnorm": 0.26993995904922485, + "block0_mlp_wout_max_l1_linf_norm": 0.5882872343063354, + "block0_mlp_wout_max_spectral_norm": 0.08530718833208084, + "block3_q_update_fnorm": 0.1563827395439148, + "block3_q_max_l1_linf_norm": 0.18403542041778564, + "block3_q_max_spectral_norm": 0.048560731112957, + "block3_k_update_fnorm": 0.15293097496032715, + "block3_k_max_l1_linf_norm": 0.24911803007125854, + "block3_k_max_spectral_norm": 0.03773832321166992, + "block3_v_update_fnorm": 0.13765473663806915, + "block3_v_max_l1_linf_norm": 0.18649223446846008, + "block3_v_max_spectral_norm": 0.04676257073879242, + "block3_o_update_fnorm": 0.13970595598220825, + "block3_o_max_l1_linf_norm": 0.15935209393501282, + "block3_o_max_spectral_norm": 0.04871878772974014, + "block3_mlp_win_update_fnorm": 0.3215758502483368, + "block3_mlp_win_max_l1_linf_norm": 0.2091180831193924, + "block3_mlp_win_max_spectral_norm": 0.06905428320169449, + "block3_mlp_wout_update_fnorm": 0.2770695686340332, + "block3_mlp_wout_max_l1_linf_norm": 0.6732165217399597, + "block3_mlp_wout_max_spectral_norm": 0.08624082803726196, + "block7_q_update_fnorm": 0.16449488699436188, + "block7_q_max_l1_linf_norm": 0.15324218571186066, + "block7_q_max_spectral_norm": 0.0240174550563097, + "block7_k_update_fnorm": 0.15779033303260803, + "block7_k_max_l1_linf_norm": 0.16057655215263367, + "block7_k_max_spectral_norm": 0.02633938379585743, + "block7_v_update_fnorm": 0.14344710111618042, + "block7_v_max_l1_linf_norm": 0.1429421305656433, + "block7_v_max_spectral_norm": 0.028553204610943794, + "block7_o_update_fnorm": 0.15096093714237213, + "block7_o_max_l1_linf_norm": 0.14053770899772644, + "block7_o_max_spectral_norm": 0.03094099462032318, + "block7_mlp_win_update_fnorm": 0.3144840598106384, + "block7_mlp_win_max_l1_linf_norm": 0.16811059415340424, + "block7_mlp_win_max_spectral_norm": 0.043231379240751266, + "block7_mlp_wout_update_fnorm": 0.2727576792240143, + "block7_mlp_wout_max_l1_linf_norm": 0.5619698166847229, + "block7_mlp_wout_max_spectral_norm": 0.07354167848825455, + "block11_q_update_fnorm": 0.16656658053398132, + "block11_q_max_l1_linf_norm": 0.16181617975234985, + "block11_q_max_spectral_norm": 0.025051705539226532, + "block11_k_update_fnorm": 0.16194836795330048, + "block11_k_max_l1_linf_norm": 0.16242799162864685, + "block11_k_max_spectral_norm": 0.023020079359412193, + "block11_v_update_fnorm": 0.15642648935317993, + "block11_v_max_l1_linf_norm": 0.1668781340122223, + "block11_v_max_spectral_norm": 0.03440377116203308, + "block11_o_update_fnorm": 0.15679049491882324, + "block11_o_max_l1_linf_norm": 0.1718161255121231, + "block11_o_max_spectral_norm": 0.04102914780378342, + "block11_mlp_win_update_fnorm": 0.33016064763069153, + "block11_mlp_win_max_l1_linf_norm": 0.18560919165611267, + "block11_mlp_win_max_spectral_norm": 0.04934721440076828, + "block11_mlp_wout_update_fnorm": 0.2949717938899994, + "block11_mlp_wout_max_l1_linf_norm": 0.598198652267456, + "block11_mlp_wout_max_spectral_norm": 0.0743483304977417, + "total_sharpness": 0.005202736239880323, + "block_total_sharpness": 0.007452594581991434, + "v_norm_block": 1.8052698373794556, + "v_T_H_v_block": 0.02428799867630005, + "v_norm": 2.2460553646087646, + "ip_v_neg_g_hvp": 0.053768083453178406, + "cos_v_neg_g_hvp": 0.059138257056474686, + "g_hvp_norm": 0.40479540824890137, + "ip_v_neg_g_t": 0.054145775735378265, + "cos_v_neg_g_t": 0.07082928717136383, + "g_t_norm": 0.3403542935848236, + "g_norm": 0.40479540824890137, + "hv_norm": 0.4831251800060272, + "cos_v_hv": 0.024187589064240456, + "hg_norm": 7.321966648101807, + "cos_g_hg": 0.6841437816619873, + "v_parallel_norm": 0.006536509841680527, + "v_perp_norm": 2.2460458278656006, + "embed_lm_head_v_norm": 1.3363263607025146, + "embed_lm_head_cos_v_neg_g": 0.08869461715221405, + "layer_1_v_norm": 0.48097988963127136, + "layer_1_cos_v_neg_g": 0.11424378305673599, + "layer_2_v_norm": 0.49991574883461, + "layer_2_cos_v_neg_g": 0.05621061101555824, + "layer_3_v_norm": 0.5128494501113892, + "layer_3_cos_v_neg_g": 0.04514707997441292, + "layer_4_v_norm": 0.5163832306861877, + "layer_4_cos_v_neg_g": 0.05523865669965744, + "layer_5_v_norm": 0.5027012228965759, + "layer_5_cos_v_neg_g": 0.03651510179042816, + "layer_6_v_norm": 0.5170060396194458, + "layer_6_cos_v_neg_g": 0.0478859543800354, + "layer_7_v_norm": 0.5191576480865479, + "layer_7_cos_v_neg_g": 0.05745238438248634, + "layer_8_v_norm": 0.5184198021888733, + "layer_8_cos_v_neg_g": 0.05477934703230858, + "layer_9_v_norm": 0.5286908745765686, + "layer_9_cos_v_neg_g": 0.054399631917476654, + "layer_10_v_norm": 0.5492725968360901, + "layer_10_cos_v_neg_g": 0.06474029272794724, + "layer_11_v_norm": 0.5562313199043274, + "layer_11_cos_v_neg_g": 0.07293903827667236, + "layer_12_v_norm": 0.5469728708267212, + "layer_12_cos_v_neg_g": 0.11486431956291199, + "block0_q_v_norm": 0.15084148943424225, + "block0_q_cos_v_neg_g": 0.15524737536907196, + "block0_k_v_norm": 0.14720246195793152, + "block0_k_cos_v_neg_g": 0.15877828001976013, + "block0_v_v_norm": 0.10846047103404999, + "block0_v_cos_v_neg_g": 0.18963675200939178, + "block0_o_v_norm": 0.1263514757156372, + "block0_o_cos_v_neg_g": 0.1458367109298706, + "block0_mlp_win_v_norm": 0.29360291361808777, + "block0_mlp_win_cos_v_neg_g": 0.08070077002048492, + "block0_mlp_wout_v_norm": 0.26993995904922485, + "block0_mlp_wout_cos_v_neg_g": 0.16328157484531403, + "block3_q_v_norm": 0.1563827395439148, + "block3_q_cos_v_neg_g": 0.07781731337308884, + "block3_k_v_norm": 0.15293097496032715, + "block3_k_cos_v_neg_g": 0.09907384216785431, + "block3_v_v_norm": 0.13765473663806915, + "block3_v_cos_v_neg_g": 0.03818260878324509, + "block3_o_v_norm": 0.13970595598220825, + "block3_o_cos_v_neg_g": 0.17325912415981293, + "block3_mlp_win_v_norm": 0.3215758502483368, + "block3_mlp_win_cos_v_neg_g": 0.05675033852458, + "block3_mlp_wout_v_norm": 0.2770695686340332, + "block3_mlp_wout_cos_v_neg_g": 0.23223121464252472, + "block7_q_v_norm": 0.16449488699436188, + "block7_q_cos_v_neg_g": 0.07477270066738129, + "block7_k_v_norm": 0.15779033303260803, + "block7_k_cos_v_neg_g": 0.2035657912492752, + "block7_v_v_norm": 0.14344710111618042, + "block7_v_cos_v_neg_g": 0.04379947483539581, + "block7_o_v_norm": 0.15096093714237213, + "block7_o_cos_v_neg_g": 0.22914180159568787, + "block7_mlp_win_v_norm": 0.3144840598106384, + "block7_mlp_win_cos_v_neg_g": 0.07221802324056625, + "block7_mlp_wout_v_norm": 0.2727576792240143, + "block7_mlp_wout_cos_v_neg_g": 0.2286733090877533, + "block11_q_v_norm": 0.16656658053398132, + "block11_q_cos_v_neg_g": 0.11317270249128342, + "block11_k_v_norm": 0.16194836795330048, + "block11_k_cos_v_neg_g": 0.17774181067943573, + "block11_v_v_norm": 0.15642648935317993, + "block11_v_cos_v_neg_g": 0.0635790228843689, + "block11_o_v_norm": 0.15679049491882324, + "block11_o_cos_v_neg_g": 0.22211505472660065, + "block11_mlp_win_v_norm": 0.33016064763069153, + "block11_mlp_win_cos_v_neg_g": 0.12259487062692642, + "block11_mlp_wout_v_norm": 0.2949717938899994, + "block11_mlp_wout_cos_v_neg_g": 0.1601681411266327, + "embed_lm_head_sharpness": 0.0002524168521631509, + "layer_1_sharpness": 0.00435398006811738, + "layer_2_sharpness": 0.00044630796764977276, + "layer_3_sharpness": 0.001096819993108511, + "layer_4_sharpness": 0.0012371032498776913, + "layer_5_sharpness": 0.0007388914818875492, + "layer_6_sharpness": 0.0008759286138229072, + "layer_7_sharpness": 0.0012972920667380095, + "layer_8_sharpness": 0.001979018794372678, + "layer_9_sharpness": 0.0018222312210127711, + "layer_10_sharpness": 0.0009989477694034576, + "layer_11_sharpness": 0.0009065052145160735, + "layer_12_sharpness": 0.002062313724309206, + "block0_q_sharpness": 0.00041841305210255086, + "block0_k_sharpness": 0.001315886969678104, + "block0_v_sharpness": 0.004407982341945171, + "block0_o_sharpness": 0.002698010066524148, + "block0_mlp_win_sharpness": 0.0010135711636394262, + "block0_mlp_wout_sharpness": 0.0022232846822589636, + "block3_q_sharpness": 0.0005524924490600824, + "block3_k_sharpness": 0.0004837552842218429, + "block3_v_sharpness": 0.002471792744472623, + "block3_o_sharpness": 0.0006693097529932857, + "block3_mlp_win_sharpness": 0.00016601369134150445, + "block3_mlp_wout_sharpness": 0.00036682983045466244, + "block7_q_sharpness": 0.0001142362380051054, + "block7_k_sharpness": 0.00020415404287632555, + "block7_v_sharpness": 0.003384212264791131, + "block7_o_sharpness": 0.00038561251130886376, + "block7_mlp_win_sharpness": 0.0005900032119825482, + "block7_mlp_wout_sharpness": 0.0006492318934760988, + "block11_q_sharpness": 0.00014721848128829151, + "block11_k_sharpness": 0.00020460282394196838, + "block11_v_sharpness": 0.0007583589758723974, + "block11_o_sharpness": 0.00031662994297221303, + "block11_mlp_win_sharpness": 0.0005474807694554329, + "block11_mlp_wout_sharpness": 0.0021820145193487406, + "sum_layer_numerators": 0.004747729191539121, + "block_diag_sharpness": 0.001456805884547509, + "cross_layer_sharpness": 0.005995788697443925 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..b5baa9218b8bdfe13058d6150f82f43efb1feae0 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_8500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.7811188697814941, + "total_l1_linf_norm": 15923.1015625, + "total_spectral_norm": 1.781118631362915, + "embed_lm_head_update_fnorm": 1.0150508880615234, + "embed_lm_head_max_l1_linf_norm": 0.2853557765483856, + "embed_lm_head_max_spectral_norm": 0.17556893825531006, + "layer_1_update_fnorm": 0.4192734956741333, + "layer_1_max_l1_linf_norm": 0.5664173364639282, + "layer_1_max_spectral_norm": 0.08292204886674881, + "layer_2_update_fnorm": 0.42259344458580017, + "layer_2_max_l1_linf_norm": 0.49513503909111023, + "layer_2_max_spectral_norm": 0.06460566073656082, + "layer_3_update_fnorm": 0.41888102889060974, + "layer_3_max_l1_linf_norm": 0.585796594619751, + "layer_3_max_spectral_norm": 0.08181001245975494, + "layer_4_update_fnorm": 0.4162987768650055, + "layer_4_max_l1_linf_norm": 0.5136622786521912, + "layer_4_max_spectral_norm": 0.08229994028806686, + "layer_5_update_fnorm": 0.41185271739959717, + "layer_5_max_l1_linf_norm": 0.48867470026016235, + "layer_5_max_spectral_norm": 0.07238064706325531, + "layer_6_update_fnorm": 0.4206346273422241, + "layer_6_max_l1_linf_norm": 0.5328063368797302, + "layer_6_max_spectral_norm": 0.08267351984977722, + "layer_7_update_fnorm": 0.4171932637691498, + "layer_7_max_l1_linf_norm": 0.5293845534324646, + "layer_7_max_spectral_norm": 0.07891134917736053, + "layer_8_update_fnorm": 0.41510242223739624, + "layer_8_max_l1_linf_norm": 0.4947868287563324, + "layer_8_max_spectral_norm": 0.07576848566532135, + "layer_9_update_fnorm": 0.41967537999153137, + "layer_9_max_l1_linf_norm": 0.48448166251182556, + "layer_9_max_spectral_norm": 0.06129889190196991, + "layer_10_update_fnorm": 0.43759846687316895, + "layer_10_max_l1_linf_norm": 0.45165500044822693, + "layer_10_max_spectral_norm": 0.047409363090991974, + "layer_11_update_fnorm": 0.4382607042789459, + "layer_11_max_l1_linf_norm": 0.4596805274486542, + "layer_11_max_spectral_norm": 0.05925225466489792, + "layer_12_update_fnorm": 0.43163108825683594, + "layer_12_max_l1_linf_norm": 0.4902591407299042, + "layer_12_max_spectral_norm": 0.07101274281740189, + "block0_q_update_fnorm": 0.12393135577440262, + "block0_q_max_l1_linf_norm": 0.14904282987117767, + "block0_q_max_spectral_norm": 0.038917917758226395, + "block0_k_update_fnorm": 0.12346208095550537, + "block0_k_max_l1_linf_norm": 0.1599428653717041, + "block0_k_max_spectral_norm": 0.04229208454489708, + "block0_v_update_fnorm": 0.12416084110736847, + "block0_v_max_l1_linf_norm": 0.15336601436138153, + "block0_v_max_spectral_norm": 0.03970075398683548, + "block0_o_update_fnorm": 0.12648150324821472, + "block0_o_max_l1_linf_norm": 0.13473744690418243, + "block0_o_max_spectral_norm": 0.040281642228364944, + "block0_mlp_win_update_fnorm": 0.24673399329185486, + "block0_mlp_win_max_l1_linf_norm": 0.1606678068637848, + "block0_mlp_win_max_spectral_norm": 0.04633350297808647, + "block0_mlp_wout_update_fnorm": 0.22973522543907166, + "block0_mlp_wout_max_l1_linf_norm": 0.5664173364639282, + "block0_mlp_wout_max_spectral_norm": 0.08292204886674881, + "block3_q_update_fnorm": 0.12583081424236298, + "block3_q_max_l1_linf_norm": 0.16566841304302216, + "block3_q_max_spectral_norm": 0.04523517191410065, + "block3_k_update_fnorm": 0.12099288403987885, + "block3_k_max_l1_linf_norm": 0.1646905243396759, + "block3_k_max_spectral_norm": 0.030681440606713295, + "block3_v_update_fnorm": 0.12003184854984283, + "block3_v_max_l1_linf_norm": 0.16619928181171417, + "block3_v_max_spectral_norm": 0.0420692041516304, + "block3_o_update_fnorm": 0.11806678771972656, + "block3_o_max_l1_linf_norm": 0.13715487718582153, + "block3_o_max_spectral_norm": 0.04239227622747421, + "block3_mlp_win_update_fnorm": 0.25270769000053406, + "block3_mlp_win_max_l1_linf_norm": 0.17579400539398193, + "block3_mlp_win_max_spectral_norm": 0.05578486993908882, + "block3_mlp_wout_update_fnorm": 0.22475332021713257, + "block3_mlp_wout_max_l1_linf_norm": 0.5136622786521912, + "block3_mlp_wout_max_spectral_norm": 0.08229994028806686, + "block7_q_update_fnorm": 0.13146336376667023, + "block7_q_max_l1_linf_norm": 0.1484418660402298, + "block7_q_max_spectral_norm": 0.025411881506443024, + "block7_k_update_fnorm": 0.1276145577430725, + "block7_k_max_l1_linf_norm": 0.1451970934867859, + "block7_k_max_spectral_norm": 0.027206161990761757, + "block7_v_update_fnorm": 0.11680641025304794, + "block7_v_max_l1_linf_norm": 0.11921349167823792, + "block7_v_max_spectral_norm": 0.025756515562534332, + "block7_o_update_fnorm": 0.12059193104505539, + "block7_o_max_l1_linf_norm": 0.12055715173482895, + "block7_o_max_spectral_norm": 0.02516188472509384, + "block7_mlp_win_update_fnorm": 0.2491663694381714, + "block7_mlp_win_max_l1_linf_norm": 0.20580588281154633, + "block7_mlp_win_max_spectral_norm": 0.0393403097987175, + "block7_mlp_wout_update_fnorm": 0.21995426714420319, + "block7_mlp_wout_max_l1_linf_norm": 0.4947868287563324, + "block7_mlp_wout_max_spectral_norm": 0.07576848566532135, + "block11_q_update_fnorm": 0.12829817831516266, + "block11_q_max_l1_linf_norm": 0.14831838011741638, + "block11_q_max_spectral_norm": 0.02342858910560608, + "block11_k_update_fnorm": 0.12566563487052917, + "block11_k_max_l1_linf_norm": 0.14306719601154327, + "block11_k_max_spectral_norm": 0.020250199362635612, + "block11_v_update_fnorm": 0.1278230845928192, + "block11_v_max_l1_linf_norm": 0.18249455094337463, + "block11_v_max_spectral_norm": 0.047912418842315674, + "block11_o_update_fnorm": 0.12688569724559784, + "block11_o_max_l1_linf_norm": 0.1406276822090149, + "block11_o_max_spectral_norm": 0.04835382103919983, + "block11_mlp_win_update_fnorm": 0.25693055987358093, + "block11_mlp_win_max_l1_linf_norm": 0.17726123332977295, + "block11_mlp_win_max_spectral_norm": 0.06159484386444092, + "block11_mlp_wout_update_fnorm": 0.23557829856872559, + "block11_mlp_wout_max_l1_linf_norm": 0.4902591407299042, + "block11_mlp_wout_max_spectral_norm": 0.07101274281740189, + "total_sharpness": 0.009813000448048115, + "block_total_sharpness": 0.013623201288282871, + "v_norm_block": 1.4635765552520752, + "v_T_H_v_block": 0.02918166294693947, + "v_norm": 1.7811188697814941, + "ip_v_neg_g_hvp": 0.05254343897104263, + "cos_v_neg_g_hvp": 0.05624094977974892, + "g_hvp_norm": 0.5245331525802612, + "ip_v_neg_g_t": 0.05263838917016983, + "cos_v_neg_g_t": 0.06674745678901672, + "g_t_norm": 0.4427666962146759, + "g_norm": 0.5245331525802612, + "hv_norm": 0.5247725248336792, + "cos_v_hv": 0.03330609202384949, + "hg_norm": 17.70653533935547, + "cos_g_hg": 0.733293890953064, + "v_parallel_norm": 0.0038862235378473997, + "v_perp_norm": 1.7811146974563599, + "embed_lm_head_v_norm": 1.0150508880615234, + "embed_lm_head_cos_v_neg_g": 0.0851300060749054, + "layer_1_v_norm": 0.4192734956741333, + "layer_1_cos_v_neg_g": 0.11718365550041199, + "layer_2_v_norm": 0.42259344458580017, + "layer_2_cos_v_neg_g": 0.04634140431880951, + "layer_3_v_norm": 0.41888102889060974, + "layer_3_cos_v_neg_g": 0.04731745645403862, + "layer_4_v_norm": 0.4162987768650055, + "layer_4_cos_v_neg_g": 0.05172022059559822, + "layer_5_v_norm": 0.41185271739959717, + "layer_5_cos_v_neg_g": 0.03203963115811348, + "layer_6_v_norm": 0.4206346273422241, + "layer_6_cos_v_neg_g": 0.041844215244054794, + "layer_7_v_norm": 0.4171932637691498, + "layer_7_cos_v_neg_g": 0.05129965394735336, + "layer_8_v_norm": 0.41510242223739624, + "layer_8_cos_v_neg_g": 0.049440477043390274, + "layer_9_v_norm": 0.41967537999153137, + "layer_9_cos_v_neg_g": 0.051808491349220276, + "layer_10_v_norm": 0.43759846687316895, + "layer_10_cos_v_neg_g": 0.058338653296232224, + "layer_11_v_norm": 0.4382607340812683, + "layer_11_cos_v_neg_g": 0.06957947462797165, + "layer_12_v_norm": 0.43163108825683594, + "layer_12_cos_v_neg_g": 0.1266503632068634, + "block0_q_v_norm": 0.12393135577440262, + "block0_q_cos_v_neg_g": 0.18041564524173737, + "block0_k_v_norm": 0.12346208095550537, + "block0_k_cos_v_neg_g": 0.1857060194015503, + "block0_v_v_norm": 0.12416084110736847, + "block0_v_cos_v_neg_g": 0.22688749432563782, + "block0_o_v_norm": 0.12648150324821472, + "block0_o_cos_v_neg_g": 0.14464496076107025, + "block0_mlp_win_v_norm": 0.24673399329185486, + "block0_mlp_win_cos_v_neg_g": 0.07453306019306183, + "block0_mlp_wout_v_norm": 0.22973522543907166, + "block0_mlp_wout_cos_v_neg_g": 0.17115840315818787, + "block3_q_v_norm": 0.12583081424236298, + "block3_q_cos_v_neg_g": 0.08900575339794159, + "block3_k_v_norm": 0.12099288403987885, + "block3_k_cos_v_neg_g": 0.09048520028591156, + "block3_v_v_norm": 0.12003184854984283, + "block3_v_cos_v_neg_g": 0.034455735236406326, + "block3_o_v_norm": 0.11806678771972656, + "block3_o_cos_v_neg_g": 0.17539319396018982, + "block3_mlp_win_v_norm": 0.25270769000053406, + "block3_mlp_win_cos_v_neg_g": 0.04816162586212158, + "block3_mlp_wout_v_norm": 0.22475332021713257, + "block3_mlp_wout_cos_v_neg_g": 0.2226954847574234, + "block7_q_v_norm": 0.13146336376667023, + "block7_q_cos_v_neg_g": 0.0698239728808403, + "block7_k_v_norm": 0.1276145577430725, + "block7_k_cos_v_neg_g": 0.20047204196453094, + "block7_v_v_norm": 0.11680641025304794, + "block7_v_cos_v_neg_g": 0.03880089148879051, + "block7_o_v_norm": 0.12059193104505539, + "block7_o_cos_v_neg_g": 0.2257286012172699, + "block7_mlp_win_v_norm": 0.2491663694381714, + "block7_mlp_win_cos_v_neg_g": 0.06463444232940674, + "block7_mlp_wout_v_norm": 0.21995426714420319, + "block7_mlp_wout_cos_v_neg_g": 0.22831420600414276, + "block11_q_v_norm": 0.12829817831516266, + "block11_q_cos_v_neg_g": 0.1115029826760292, + "block11_k_v_norm": 0.12566563487052917, + "block11_k_cos_v_neg_g": 0.19728779792785645, + "block11_v_v_norm": 0.1278230845928192, + "block11_v_cos_v_neg_g": 0.0728851929306984, + "block11_o_v_norm": 0.12688569724559784, + "block11_o_cos_v_neg_g": 0.2644784450531006, + "block11_mlp_win_v_norm": 0.25693055987358093, + "block11_mlp_win_cos_v_neg_g": 0.12197327613830566, + "block11_mlp_wout_v_norm": 0.23557829856872559, + "block11_mlp_wout_cos_v_neg_g": 0.19391421973705292, + "embed_lm_head_sharpness": 0.0003459904110059142, + "layer_1_sharpness": 0.016917524859309196, + "layer_2_sharpness": 0.0026361988857388496, + "layer_3_sharpness": 0.0024619915056973696, + "layer_4_sharpness": 0.0013928046682849526, + "layer_5_sharpness": 0.0007572043687105179, + "layer_6_sharpness": 0.0013451986014842987, + "layer_7_sharpness": 0.0021381129045039415, + "layer_8_sharpness": 0.003097237553447485, + "layer_9_sharpness": 0.002681449754163623, + "layer_10_sharpness": 0.0016039859037846327, + "layer_11_sharpness": 0.0016190940514206886, + "layer_12_sharpness": 0.004895511083304882, + "block0_q_sharpness": 0.0009086739737540483, + "block0_k_sharpness": 0.0012467397609725595, + "block0_v_sharpness": 0.004907630849629641, + "block0_o_sharpness": 0.012212510220706463, + "block0_mlp_win_sharpness": 0.003086720360442996, + "block0_mlp_wout_sharpness": 0.007307606283575296, + "block3_q_sharpness": 0.0008041295222938061, + "block3_k_sharpness": 0.00047455247840844095, + "block3_v_sharpness": 0.00216226396150887, + "block3_o_sharpness": 0.0010533953318372369, + "block3_mlp_win_sharpness": 0.0001726905320538208, + "block3_mlp_wout_sharpness": 0.00036798545625060797, + "block7_q_sharpness": 0.00014514976646751165, + "block7_k_sharpness": 0.00020933817722834647, + "block7_v_sharpness": 0.00427588913589716, + "block7_o_sharpness": 0.00044829898979514837, + "block7_mlp_win_sharpness": 0.0009057329152710736, + "block7_mlp_wout_sharpness": 0.0012025582836940885, + "block11_q_sharpness": 0.0002057048404822126, + "block11_k_sharpness": 0.0003397726104594767, + "block11_v_sharpness": 0.0010899294866248965, + "block11_o_sharpness": 0.0005534354713745415, + "block11_mlp_win_sharpness": 0.0012486519990488887, + "block11_mlp_wout_sharpness": 0.005542176775634289, + "sum_layer_numerators": 0.007392831444004771, + "block_diag_sharpness": 0.0034512777884616367, + "cross_layer_sharpness": 0.010171923499821234 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..fefa979799756efe6a830f6ee66d7a3388246033 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.1642316579818726, + "total_l1_linf_norm": 10401.90234375, + "total_spectral_norm": 1.164231538772583, + "embed_lm_head_update_fnorm": 0.6704404950141907, + "embed_lm_head_max_l1_linf_norm": 0.18617567420005798, + "embed_lm_head_max_spectral_norm": 0.10930078476667404, + "layer_1_update_fnorm": 0.27285704016685486, + "layer_1_max_l1_linf_norm": 0.34259486198425293, + "layer_1_max_spectral_norm": 0.04793371260166168, + "layer_2_update_fnorm": 0.27363571524620056, + "layer_2_max_l1_linf_norm": 0.3178842067718506, + "layer_2_max_spectral_norm": 0.04201250895857811, + "layer_3_update_fnorm": 0.2717428207397461, + "layer_3_max_l1_linf_norm": 0.35602861642837524, + "layer_3_max_spectral_norm": 0.050558093935251236, + "layer_4_update_fnorm": 0.2703498601913452, + "layer_4_max_l1_linf_norm": 0.32661378383636475, + "layer_4_max_spectral_norm": 0.050860144197940826, + "layer_5_update_fnorm": 0.2651573419570923, + "layer_5_max_l1_linf_norm": 0.32643455266952515, + "layer_5_max_spectral_norm": 0.04454704001545906, + "layer_6_update_fnorm": 0.2717759609222412, + "layer_6_max_l1_linf_norm": 0.3402651250362396, + "layer_6_max_spectral_norm": 0.05108979344367981, + "layer_7_update_fnorm": 0.2718731760978699, + "layer_7_max_l1_linf_norm": 0.32317492365837097, + "layer_7_max_spectral_norm": 0.04717392101883888, + "layer_8_update_fnorm": 0.27052292227745056, + "layer_8_max_l1_linf_norm": 0.3195670247077942, + "layer_8_max_spectral_norm": 0.04537104442715645, + "layer_9_update_fnorm": 0.27632376551628113, + "layer_9_max_l1_linf_norm": 0.33005398511886597, + "layer_9_max_spectral_norm": 0.03836227208375931, + "layer_10_update_fnorm": 0.28247174620628357, + "layer_10_max_l1_linf_norm": 0.3153225779533386, + "layer_10_max_spectral_norm": 0.028229251503944397, + "layer_11_update_fnorm": 0.2859322428703308, + "layer_11_max_l1_linf_norm": 0.30533507466316223, + "layer_11_max_spectral_norm": 0.028706934303045273, + "layer_12_update_fnorm": 0.2837488055229187, + "layer_12_max_l1_linf_norm": 0.32622230052948, + "layer_12_max_spectral_norm": 0.036764949560165405, + "block0_q_update_fnorm": 0.0806160643696785, + "block0_q_max_l1_linf_norm": 0.09237897396087646, + "block0_q_max_spectral_norm": 0.023599689826369286, + "block0_k_update_fnorm": 0.08140023052692413, + "block0_k_max_l1_linf_norm": 0.10918694734573364, + "block0_k_max_spectral_norm": 0.029026830568909645, + "block0_v_update_fnorm": 0.07994800060987473, + "block0_v_max_l1_linf_norm": 0.09398207813501358, + "block0_v_max_spectral_norm": 0.02344493940472603, + "block0_o_update_fnorm": 0.08128538727760315, + "block0_o_max_l1_linf_norm": 0.08177369832992554, + "block0_o_max_spectral_norm": 0.017979281023144722, + "block0_mlp_win_update_fnorm": 0.15982115268707275, + "block0_mlp_win_max_l1_linf_norm": 0.10457701981067657, + "block0_mlp_win_max_spectral_norm": 0.02160060405731201, + "block0_mlp_wout_update_fnorm": 0.1508016139268875, + "block0_mlp_wout_max_l1_linf_norm": 0.34259486198425293, + "block0_mlp_wout_max_spectral_norm": 0.04793371260166168, + "block3_q_update_fnorm": 0.08031968027353287, + "block3_q_max_l1_linf_norm": 0.10276216268539429, + "block3_q_max_spectral_norm": 0.025220971554517746, + "block3_k_update_fnorm": 0.07774454355239868, + "block3_k_max_l1_linf_norm": 0.10422638058662415, + "block3_k_max_spectral_norm": 0.01784682646393776, + "block3_v_update_fnorm": 0.07414083927869797, + "block3_v_max_l1_linf_norm": 0.09656934440135956, + "block3_v_max_spectral_norm": 0.020989425480365753, + "block3_o_update_fnorm": 0.07557696849107742, + "block3_o_max_l1_linf_norm": 0.08551521599292755, + "block3_o_max_spectral_norm": 0.024264071136713028, + "block3_mlp_win_update_fnorm": 0.16454647481441498, + "block3_mlp_win_max_l1_linf_norm": 0.10072442889213562, + "block3_mlp_win_max_spectral_norm": 0.034762777388095856, + "block3_mlp_wout_update_fnorm": 0.14919203519821167, + "block3_mlp_wout_max_l1_linf_norm": 0.32661378383636475, + "block3_mlp_wout_max_spectral_norm": 0.050860144197940826, + "block7_q_update_fnorm": 0.08257056027650833, + "block7_q_max_l1_linf_norm": 0.07735171169042587, + "block7_q_max_spectral_norm": 0.012562624178826809, + "block7_k_update_fnorm": 0.08053914457559586, + "block7_k_max_l1_linf_norm": 0.08934904634952545, + "block7_k_max_spectral_norm": 0.013913409784436226, + "block7_v_update_fnorm": 0.07710625976324081, + "block7_v_max_l1_linf_norm": 0.07771174609661102, + "block7_v_max_spectral_norm": 0.012950120493769646, + "block7_o_update_fnorm": 0.0794532373547554, + "block7_o_max_l1_linf_norm": 0.07690159976482391, + "block7_o_max_spectral_norm": 0.013808466494083405, + "block7_mlp_win_update_fnorm": 0.16281317174434662, + "block7_mlp_win_max_l1_linf_norm": 0.0876312106847763, + "block7_mlp_win_max_spectral_norm": 0.0218582171946764, + "block7_mlp_wout_update_fnorm": 0.14515474438667297, + "block7_mlp_wout_max_l1_linf_norm": 0.3195670247077942, + "block7_mlp_wout_max_spectral_norm": 0.04537104442715645, + "block11_q_update_fnorm": 0.08485033363103867, + "block11_q_max_l1_linf_norm": 0.08219314366579056, + "block11_q_max_spectral_norm": 0.012775965966284275, + "block11_k_update_fnorm": 0.08297600597143173, + "block11_k_max_l1_linf_norm": 0.09254070371389389, + "block11_k_max_spectral_norm": 0.011898866854608059, + "block11_v_update_fnorm": 0.08101791143417358, + "block11_v_max_l1_linf_norm": 0.09126052260398865, + "block11_v_max_spectral_norm": 0.015974074602127075, + "block11_o_update_fnorm": 0.0821952074766159, + "block11_o_max_l1_linf_norm": 0.08640776574611664, + "block11_o_max_spectral_norm": 0.021093731746077538, + "block11_mlp_win_update_fnorm": 0.1690491884946823, + "block11_mlp_win_max_l1_linf_norm": 0.09966473281383514, + "block11_mlp_win_max_spectral_norm": 0.025282910093665123, + "block11_mlp_wout_update_fnorm": 0.1564716249704361, + "block11_mlp_wout_max_l1_linf_norm": 0.32622230052948, + "block11_mlp_wout_max_spectral_norm": 0.036764949560165405, + "total_sharpness": 0.006724217906594276, + "block_total_sharpness": 0.009425773285329342, + "v_norm_block": 0.9518112540245056, + "v_T_H_v_block": 0.00853922963142395, + "v_norm": 1.1642316579818726, + "ip_v_neg_g_hvp": 0.026074722409248352, + "cos_v_neg_g_hvp": 0.06063997000455856, + "g_hvp_norm": 0.36933574080467224, + "ip_v_neg_g_t": 0.026364998891949654, + "cos_v_neg_g_t": 0.07665924727916718, + "g_t_norm": 0.29540905356407166, + "g_norm": 0.36933574080467224, + "hv_norm": 0.32926180958747864, + "cos_v_hv": 0.023776058107614517, + "hg_norm": 6.174778461456299, + "cos_g_hg": 0.6053447127342224, + "v_parallel_norm": 0.0037053690757602453, + "v_perp_norm": 1.1642258167266846, + "embed_lm_head_v_norm": 0.6704404950141907, + "embed_lm_head_cos_v_neg_g": 0.09135421365499496, + "layer_1_v_norm": 0.27285704016685486, + "layer_1_cos_v_neg_g": 0.10854798555374146, + "layer_2_v_norm": 0.27363571524620056, + "layer_2_cos_v_neg_g": 0.057279445230960846, + "layer_3_v_norm": 0.2717428207397461, + "layer_3_cos_v_neg_g": 0.04850349947810173, + "layer_4_v_norm": 0.2703498601913452, + "layer_4_cos_v_neg_g": 0.055633820593357086, + "layer_5_v_norm": 0.2651573419570923, + "layer_5_cos_v_neg_g": 0.036049969494342804, + "layer_6_v_norm": 0.2717759609222412, + "layer_6_cos_v_neg_g": 0.049050718545913696, + "layer_7_v_norm": 0.2718731760978699, + "layer_7_cos_v_neg_g": 0.05433277785778046, + "layer_8_v_norm": 0.27052292227745056, + "layer_8_cos_v_neg_g": 0.0556422583758831, + "layer_9_v_norm": 0.27632376551628113, + "layer_9_cos_v_neg_g": 0.05619673430919647, + "layer_10_v_norm": 0.28247174620628357, + "layer_10_cos_v_neg_g": 0.06615841388702393, + "layer_11_v_norm": 0.2859322428703308, + "layer_11_cos_v_neg_g": 0.07961918413639069, + "layer_12_v_norm": 0.2837488055229187, + "layer_12_cos_v_neg_g": 0.12421619892120361, + "block0_q_v_norm": 0.0806160643696785, + "block0_q_cos_v_neg_g": 0.13591808080673218, + "block0_k_v_norm": 0.08140023052692413, + "block0_k_cos_v_neg_g": 0.1415461301803589, + "block0_v_v_norm": 0.07994800060987473, + "block0_v_cos_v_neg_g": 0.18099607527256012, + "block0_o_v_norm": 0.08128538727760315, + "block0_o_cos_v_neg_g": 0.13653463125228882, + "block0_mlp_win_v_norm": 0.15982115268707275, + "block0_mlp_win_cos_v_neg_g": 0.07481478899717331, + "block0_mlp_wout_v_norm": 0.1508016139268875, + "block0_mlp_wout_cos_v_neg_g": 0.14885683357715607, + "block3_q_v_norm": 0.08031968027353287, + "block3_q_cos_v_neg_g": 0.07834534347057343, + "block3_k_v_norm": 0.07774454355239868, + "block3_k_cos_v_neg_g": 0.10112112015485764, + "block3_v_v_norm": 0.07414083927869797, + "block3_v_cos_v_neg_g": 0.036718592047691345, + "block3_o_v_norm": 0.07557696849107742, + "block3_o_cos_v_neg_g": 0.15526579320430756, + "block3_mlp_win_v_norm": 0.16454647481441498, + "block3_mlp_win_cos_v_neg_g": 0.052701130509376526, + "block3_mlp_wout_v_norm": 0.14919203519821167, + "block3_mlp_wout_cos_v_neg_g": 0.22138451039791107, + "block7_q_v_norm": 0.08257056027650833, + "block7_q_cos_v_neg_g": 0.06557915359735489, + "block7_k_v_norm": 0.08053914457559586, + "block7_k_cos_v_neg_g": 0.20184655487537384, + "block7_v_v_norm": 0.07710625976324081, + "block7_v_cos_v_neg_g": 0.04213693365454674, + "block7_o_v_norm": 0.0794532373547554, + "block7_o_cos_v_neg_g": 0.21056394279003143, + "block7_mlp_win_v_norm": 0.16281317174434662, + "block7_mlp_win_cos_v_neg_g": 0.06839893013238907, + "block7_mlp_wout_v_norm": 0.14515474438667297, + "block7_mlp_wout_cos_v_neg_g": 0.2185479700565338, + "block11_q_v_norm": 0.08485033363103867, + "block11_q_cos_v_neg_g": 0.11286874860525131, + "block11_k_v_norm": 0.08297600597143173, + "block11_k_cos_v_neg_g": 0.18853937089443207, + "block11_v_v_norm": 0.08101791143417358, + "block11_v_cos_v_neg_g": 0.062339670956134796, + "block11_o_v_norm": 0.0821952074766159, + "block11_o_cos_v_neg_g": 0.21714599430561066, + "block11_mlp_win_v_norm": 0.1690491884946823, + "block11_mlp_win_cos_v_neg_g": 0.13118579983711243, + "block11_mlp_wout_v_norm": 0.1564716249704361, + "block11_mlp_wout_cos_v_neg_g": 0.1713746190071106, + "embed_lm_head_sharpness": 0.0003450744552537799, + "layer_1_sharpness": 0.004338011611253023, + "layer_2_sharpness": 0.0009597576572559774, + "layer_3_sharpness": 0.0013838129816576838, + "layer_4_sharpness": 0.0012185540981590748, + "layer_5_sharpness": 0.0007692409562878311, + "layer_6_sharpness": 0.0015424661105498672, + "layer_7_sharpness": 0.0020465226843953133, + "layer_8_sharpness": 0.0034651048481464386, + "layer_9_sharpness": 0.002779413480311632, + "layer_10_sharpness": 0.001243252889253199, + "layer_11_sharpness": 0.0010559724178165197, + "layer_12_sharpness": 0.002365861786529422, + "block0_q_sharpness": 0.0006840552669018507, + "block0_k_sharpness": 0.0008541599963791668, + "block0_v_sharpness": 0.0028606520500034094, + "block0_o_sharpness": 0.0022811631206423044, + "block0_mlp_win_sharpness": 0.0009109360980801284, + "block0_mlp_wout_sharpness": 0.0022406745702028275, + "block3_q_sharpness": 0.0005074512446299195, + "block3_k_sharpness": 0.0003734838683158159, + "block3_v_sharpness": 0.002631189301609993, + "block3_o_sharpness": 0.0007798694423399866, + "block3_mlp_win_sharpness": 0.00018479749269317836, + "block3_mlp_wout_sharpness": 0.0004391645488794893, + "block7_q_sharpness": 0.00016300895367749035, + "block7_k_sharpness": 0.0002055326767731458, + "block7_v_sharpness": 0.004892011638730764, + "block7_o_sharpness": 0.0003733128833118826, + "block7_mlp_win_sharpness": 0.00104415079113096, + "block7_mlp_wout_sharpness": 0.0013015738222748041, + "block11_q_sharpness": 0.00021726767590735108, + "block11_k_sharpness": 0.0002823469985742122, + "block11_v_sharpness": 0.0008713516290299594, + "block11_o_sharpness": 0.00026885204715654254, + "block11_mlp_win_sharpness": 0.0006244347896426916, + "block11_mlp_wout_sharpness": 0.0024654867593199015, + "sum_layer_numerators": 0.0017471883267153674, + "block_diag_sharpness": 0.0019285817307811777, + "cross_layer_sharpness": 0.007497191554548165 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..fb87214b6c1d1e2c63d14f715831e053a0489279 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/sharpness_step_9500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.5971421003341675, + "total_l1_linf_norm": 5346.578125, + "total_spectral_norm": 0.5971421003341675, + "embed_lm_head_update_fnorm": 0.3396012485027313, + "embed_lm_head_max_l1_linf_norm": 0.08981205523014069, + "embed_lm_head_max_spectral_norm": 0.058515191078186035, + "layer_1_update_fnorm": 0.14101123809814453, + "layer_1_max_l1_linf_norm": 0.1808914840221405, + "layer_1_max_spectral_norm": 0.025123534724116325, + "layer_2_update_fnorm": 0.1407158374786377, + "layer_2_max_l1_linf_norm": 0.15501931309700012, + "layer_2_max_spectral_norm": 0.020953457802534103, + "layer_3_update_fnorm": 0.14087079465389252, + "layer_3_max_l1_linf_norm": 0.17474085092544556, + "layer_3_max_spectral_norm": 0.02405286394059658, + "layer_4_update_fnorm": 0.1397785097360611, + "layer_4_max_l1_linf_norm": 0.1788906753063202, + "layer_4_max_spectral_norm": 0.02437894232571125, + "layer_5_update_fnorm": 0.13750247657299042, + "layer_5_max_l1_linf_norm": 0.15872564911842346, + "layer_5_max_spectral_norm": 0.023165414109826088, + "layer_6_update_fnorm": 0.1396990865468979, + "layer_6_max_l1_linf_norm": 0.18531635403633118, + "layer_6_max_spectral_norm": 0.025993146002292633, + "layer_7_update_fnorm": 0.140215203166008, + "layer_7_max_l1_linf_norm": 0.1717570722103119, + "layer_7_max_spectral_norm": 0.024745237082242966, + "layer_8_update_fnorm": 0.14012624323368073, + "layer_8_max_l1_linf_norm": 0.17849937081336975, + "layer_8_max_spectral_norm": 0.02511504478752613, + "layer_9_update_fnorm": 0.14259713888168335, + "layer_9_max_l1_linf_norm": 0.1579912006855011, + "layer_9_max_spectral_norm": 0.020874204114079475, + "layer_10_update_fnorm": 0.145646870136261, + "layer_10_max_l1_linf_norm": 0.1538073867559433, + "layer_10_max_spectral_norm": 0.015017432160675526, + "layer_11_update_fnorm": 0.14687174558639526, + "layer_11_max_l1_linf_norm": 0.16430172324180603, + "layer_11_max_spectral_norm": 0.015067786909639835, + "layer_12_update_fnorm": 0.14610005915164948, + "layer_12_max_l1_linf_norm": 0.16722920536994934, + "layer_12_max_spectral_norm": 0.020626341924071312, + "block0_q_update_fnorm": 0.04127304255962372, + "block0_q_max_l1_linf_norm": 0.04374966770410538, + "block0_q_max_spectral_norm": 0.011970272287726402, + "block0_k_update_fnorm": 0.041127968579530716, + "block0_k_max_l1_linf_norm": 0.051535844802856445, + "block0_k_max_spectral_norm": 0.01248870324343443, + "block0_v_update_fnorm": 0.040867842733860016, + "block0_v_max_l1_linf_norm": 0.04541522264480591, + "block0_v_max_spectral_norm": 0.010118599981069565, + "block0_o_update_fnorm": 0.041929565370082855, + "block0_o_max_l1_linf_norm": 0.03993309289216995, + "block0_o_max_spectral_norm": 0.007715681102126837, + "block0_mlp_win_update_fnorm": 0.08241355419158936, + "block0_mlp_win_max_l1_linf_norm": 0.04264548048377037, + "block0_mlp_win_max_spectral_norm": 0.013023464009165764, + "block0_mlp_wout_update_fnorm": 0.07910217344760895, + "block0_mlp_wout_max_l1_linf_norm": 0.1808914840221405, + "block0_mlp_wout_max_spectral_norm": 0.025123534724116325, + "block3_q_update_fnorm": 0.041251666843891144, + "block3_q_max_l1_linf_norm": 0.04687114059925079, + "block3_q_max_spectral_norm": 0.011903936043381691, + "block3_k_update_fnorm": 0.0401160754263401, + "block3_k_max_l1_linf_norm": 0.05727382376790047, + "block3_k_max_spectral_norm": 0.010071402415633202, + "block3_v_update_fnorm": 0.03904443606734276, + "block3_v_max_l1_linf_norm": 0.05089178681373596, + "block3_v_max_spectral_norm": 0.010184362530708313, + "block3_o_update_fnorm": 0.03992171958088875, + "block3_o_max_l1_linf_norm": 0.0447457954287529, + "block3_o_max_spectral_norm": 0.012994112446904182, + "block3_mlp_win_update_fnorm": 0.08418163657188416, + "block3_mlp_win_max_l1_linf_norm": 0.04523748159408569, + "block3_mlp_win_max_spectral_norm": 0.01744142919778824, + "block3_mlp_wout_update_fnorm": 0.07751083374023438, + "block3_mlp_wout_max_l1_linf_norm": 0.1788906753063202, + "block3_mlp_wout_max_spectral_norm": 0.02437894232571125, + "block7_q_update_fnorm": 0.04189508780837059, + "block7_q_max_l1_linf_norm": 0.04010423645377159, + "block7_q_max_spectral_norm": 0.006347224581986666, + "block7_k_update_fnorm": 0.04113638401031494, + "block7_k_max_l1_linf_norm": 0.04198860377073288, + "block7_k_max_spectral_norm": 0.007248471956700087, + "block7_v_update_fnorm": 0.04024706780910492, + "block7_v_max_l1_linf_norm": 0.03930272161960602, + "block7_v_max_spectral_norm": 0.006793639622628689, + "block7_o_update_fnorm": 0.04117074981331825, + "block7_o_max_l1_linf_norm": 0.03940371423959732, + "block7_o_max_spectral_norm": 0.006670936942100525, + "block7_mlp_win_update_fnorm": 0.08411052078008652, + "block7_mlp_win_max_l1_linf_norm": 0.043039269745349884, + "block7_mlp_win_max_spectral_norm": 0.011744133196771145, + "block7_mlp_wout_update_fnorm": 0.0760686844587326, + "block7_mlp_wout_max_l1_linf_norm": 0.17849937081336975, + "block7_mlp_wout_max_spectral_norm": 0.02511504478752613, + "block11_q_update_fnorm": 0.04301013797521591, + "block11_q_max_l1_linf_norm": 0.041040606796741486, + "block11_q_max_spectral_norm": 0.006703045219182968, + "block11_k_update_fnorm": 0.04244574159383774, + "block11_k_max_l1_linf_norm": 0.04254152998328209, + "block11_k_max_spectral_norm": 0.006318200379610062, + "block11_v_update_fnorm": 0.041836172342300415, + "block11_v_max_l1_linf_norm": 0.04427371174097061, + "block11_v_max_spectral_norm": 0.007948045618832111, + "block11_o_update_fnorm": 0.042344603687524796, + "block11_o_max_l1_linf_norm": 0.043119609355926514, + "block11_o_max_spectral_norm": 0.010570655576884747, + "block11_mlp_win_update_fnorm": 0.08602436631917953, + "block11_mlp_win_max_l1_linf_norm": 0.05116493999958038, + "block11_mlp_win_max_spectral_norm": 0.012315121479332447, + "block11_mlp_wout_update_fnorm": 0.08207203447818756, + "block11_mlp_wout_max_l1_linf_norm": 0.16722920536994934, + "block11_mlp_wout_max_spectral_norm": 0.020626341924071312, + "total_sharpness": 0.006736064329743385, + "block_total_sharpness": 0.00908642541617155, + "v_norm_block": 0.4911716878414154, + "v_T_H_v_block": 0.002192096784710884, + "v_norm": 0.5971421003341675, + "ip_v_neg_g_hvp": 0.012629697099328041, + "cos_v_neg_g_hvp": 0.06230544298887253, + "g_hvp_norm": 0.33946049213409424, + "ip_v_neg_g_t": 0.012729490175843239, + "cos_v_neg_g_t": 0.09000738710165024, + "g_t_norm": 0.23684005439281464, + "g_norm": 0.33946049213409424, + "hv_norm": 0.1657731682062149, + "cos_v_hv": 0.02426440827548504, + "hg_norm": 9.036114692687988, + "cos_g_hg": 0.5708729028701782, + "v_parallel_norm": 0.0022307676263153553, + "v_perp_norm": 0.5971379280090332, + "embed_lm_head_v_norm": 0.3396012485027313, + "embed_lm_head_cos_v_neg_g": 0.09460648149251938, + "layer_1_v_norm": 0.14101123809814453, + "layer_1_cos_v_neg_g": 0.10908357053995132, + "layer_2_v_norm": 0.1407158374786377, + "layer_2_cos_v_neg_g": 0.047597844153642654, + "layer_3_v_norm": 0.14087079465389252, + "layer_3_cos_v_neg_g": 0.04795321822166443, + "layer_4_v_norm": 0.1397785097360611, + "layer_4_cos_v_neg_g": 0.05395695939660072, + "layer_5_v_norm": 0.13750247657299042, + "layer_5_cos_v_neg_g": 0.03412216529250145, + "layer_6_v_norm": 0.1396990865468979, + "layer_6_cos_v_neg_g": 0.04675035923719406, + "layer_7_v_norm": 0.140215203166008, + "layer_7_cos_v_neg_g": 0.05565205588936806, + "layer_8_v_norm": 0.14012624323368073, + "layer_8_cos_v_neg_g": 0.0549585223197937, + "layer_9_v_norm": 0.14259713888168335, + "layer_9_cos_v_neg_g": 0.05720861256122589, + "layer_10_v_norm": 0.145646870136261, + "layer_10_cos_v_neg_g": 0.06733375042676926, + "layer_11_v_norm": 0.14687174558639526, + "layer_11_cos_v_neg_g": 0.07905053347349167, + "layer_12_v_norm": 0.14610005915164948, + "layer_12_cos_v_neg_g": 0.12331611663103104, + "block0_q_v_norm": 0.04127304255962372, + "block0_q_cos_v_neg_g": 0.12793681025505066, + "block0_k_v_norm": 0.041127968579530716, + "block0_k_cos_v_neg_g": 0.12533530592918396, + "block0_v_v_norm": 0.040867842733860016, + "block0_v_cos_v_neg_g": 0.18271787464618683, + "block0_o_v_norm": 0.041929565370082855, + "block0_o_cos_v_neg_g": 0.13500389456748962, + "block0_mlp_win_v_norm": 0.08241355419158936, + "block0_mlp_win_cos_v_neg_g": 0.07795944809913635, + "block0_mlp_wout_v_norm": 0.07910217344760895, + "block0_mlp_wout_cos_v_neg_g": 0.1439865529537201, + "block3_q_v_norm": 0.041251666843891144, + "block3_q_cos_v_neg_g": 0.07635442912578583, + "block3_k_v_norm": 0.0401160754263401, + "block3_k_cos_v_neg_g": 0.09222111850976944, + "block3_v_v_norm": 0.03904443606734276, + "block3_v_cos_v_neg_g": 0.027097361162304878, + "block3_o_v_norm": 0.03992171958088875, + "block3_o_cos_v_neg_g": 0.1401534229516983, + "block3_mlp_win_v_norm": 0.08418163657188416, + "block3_mlp_win_cos_v_neg_g": 0.05325018987059593, + "block3_mlp_wout_v_norm": 0.07751083374023438, + "block3_mlp_wout_cos_v_neg_g": 0.19323448836803436, + "block7_q_v_norm": 0.04189508780837059, + "block7_q_cos_v_neg_g": 0.06726741045713425, + "block7_k_v_norm": 0.04113638401031494, + "block7_k_cos_v_neg_g": 0.18997322022914886, + "block7_v_v_norm": 0.04024706780910492, + "block7_v_cos_v_neg_g": 0.03829217329621315, + "block7_o_v_norm": 0.04117074981331825, + "block7_o_cos_v_neg_g": 0.20109224319458008, + "block7_mlp_win_v_norm": 0.08411052078008652, + "block7_mlp_win_cos_v_neg_g": 0.07013776153326035, + "block7_mlp_wout_v_norm": 0.0760686844587326, + "block7_mlp_wout_cos_v_neg_g": 0.19179491698741913, + "block11_q_v_norm": 0.04301013797521591, + "block11_q_cos_v_neg_g": 0.11216028034687042, + "block11_k_v_norm": 0.04244574159383774, + "block11_k_cos_v_neg_g": 0.17546305060386658, + "block11_v_v_norm": 0.041836172342300415, + "block11_v_cos_v_neg_g": 0.06511996686458588, + "block11_o_v_norm": 0.042344603687524796, + "block11_o_cos_v_neg_g": 0.21414630115032196, + "block11_mlp_win_v_norm": 0.08602436631917953, + "block11_mlp_win_cos_v_neg_g": 0.12720932066440582, + "block11_mlp_wout_v_norm": 0.08207203447818756, + "block11_mlp_wout_cos_v_neg_g": 0.16429154574871063, + "embed_lm_head_sharpness": 0.0003842798469122499, + "layer_1_sharpness": 0.0035507152788341045, + "layer_2_sharpness": 0.00034637731732800603, + "layer_3_sharpness": 0.0011840583756566048, + "layer_4_sharpness": 0.0011666639475151896, + "layer_5_sharpness": 0.000823201728053391, + "layer_6_sharpness": 0.0014580824645236135, + "layer_7_sharpness": 0.0017694301204755902, + "layer_8_sharpness": 0.003199005965143442, + "layer_9_sharpness": 0.0027121335733681917, + "layer_10_sharpness": 0.0013620451791211963, + "layer_11_sharpness": 0.001457297708839178, + "layer_12_sharpness": 0.003219838719815016, + "block0_q_sharpness": 0.0006041037268005311, + "block0_k_sharpness": 0.0008166838088072836, + "block0_v_sharpness": 0.0018663856899365783, + "block0_o_sharpness": 0.002018754370510578, + "block0_mlp_win_sharpness": 0.0007775865960866213, + "block0_mlp_wout_sharpness": 0.0019038376631215215, + "block3_q_sharpness": 0.0005048344028182328, + "block3_k_sharpness": 0.0005350405699573457, + "block3_v_sharpness": 0.0016226788284257054, + "block3_o_sharpness": 0.0011034418130293489, + "block3_mlp_win_sharpness": 0.000173240463482216, + "block3_mlp_wout_sharpness": 0.000362962280632928, + "block7_q_sharpness": 0.00015080918092280626, + "block7_k_sharpness": 0.00016264196892734617, + "block7_v_sharpness": 0.005340640898793936, + "block7_o_sharpness": 0.0003730539174284786, + "block7_mlp_win_sharpness": 0.0008875599596649408, + "block7_mlp_wout_sharpness": 0.0014986201422289014, + "block11_q_sharpness": 0.00028328245389275253, + "block11_k_sharpness": 0.0002619176229927689, + "block11_v_sharpness": 0.0010934267193078995, + "block11_o_sharpness": 0.00028010777896270156, + "block11_mlp_win_sharpness": 0.0007335108239203691, + "block11_mlp_wout_sharpness": 0.003827550681307912, + "sum_layer_numerators": 0.00044957963678639523, + "block_diag_sharpness": 0.0018635454176425604, + "cross_layer_sharpness": 0.00722287999852899 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/training_log.txt b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..8d200538d0d1d5dbf8dfea5edbb505179b3f2a66 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_44_5b3c6861-d93c-48e1-b6be-fb835f5d0065/training_log.txt @@ -0,0 +1,11788 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +import nano_GPT_qkvonorm_pure +from nano_GPT_qkvonorm_pure import GPT, GPTConfig + +# Import debug utilities +# from debug_utils import setup_debugpy + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes, + shuffle_files=False, random_seed=None): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + self.shuffle_files = shuffle_files + self.random_seed = random_seed + self._rng = random.Random(random_seed) if shuffle_files and random_seed is not None else None + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + if self.shuffle_files: + self._shuffle_files() + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + next_shard = (self.current_shard + 1) % len(self.files) + if next_shard == 0 and self.shuffle_files: + self._shuffle_files() + self.current_shard = next_shard + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + + def _shuffle_files(self): + if self._rng is not None: + self._rng.shuffle(self.files) + else: + random.shuffle(self.files) + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + + all_param_groups["embed_lm_head"] = list(model.lm_head.parameters()) + + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # Add fine-grained params for selected layers (0, 3, 7, 11) + selected_layers = [0, 3, 7, 11] + for layer_idx in selected_layers: + block = blocks[layer_idx] + prefix = f"block{layer_idx}" + # Attention: Q, K, V, O + all_param_groups[f"{prefix}_q"] = [block.attn.q_w.weight] + all_param_groups[f"{prefix}_k"] = [block.attn.k_w.weight] + all_param_groups[f"{prefix}_v"] = [block.attn.v_w.weight] + all_param_groups[f"{prefix}_o"] = [block.attn.c_proj.weight] + # MLP: c_fc (win) and c_proj (wout) + all_param_groups[f"{prefix}_mlp_win"] = [block.mlp.c_fc.weight] + all_param_groups[f"{prefix}_mlp_wout"] = [block.mlp.c_proj.weight] + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + original_flash = nano_GPT_qkvonorm_pure.FLASH + nano_GPT_qkvonorm_pure.FLASH = 0 + print0(f"[Enhanced Sharpness @ Step {step}] Disabled FLASH attention for HVP (was {original_flash})") + + # Get block parameter indices for cross-layer analysis (need this before loop) + block_param_indices = set() + for group_name, param_group in all_param_groups.items(): + if group_name.startswith("layer_"): + for p in param_group: + if id(p) in param_to_idx: + block_param_indices.add(param_to_idx[id(p)]) + + # Initialize accumulators for all quantities we need + grads_hvp = None + hvp_v_total = None + hvp_v_block = None + hvp_g_accum = None + layer_hvp_accum = {} + + + group_names_to_process = [gn for gn, pg in all_param_groups.items() + if pg and any(id(p) in param_to_idx for p in pg)] + + if last_training_batches is not None and len(last_training_batches) > 0: + + batch_iterator = [(x, y) for x, y in last_training_batches] + n_batches = len(batch_iterator) + print0(f"[Enhanced Sharpness @ Step {step}] Using {n_batches} microbatches for HVP (out of {grad_accum_steps} training microbatches)") + restore_loader = False + else: + # Fallback: use new batches from train_loader (should rarely happen) + print0(f"[Enhanced Sharpness @ Step {step}] WARNING: last_training_batches is None/empty, using {grad_accum_steps} new batches (inconsistent)") + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + n_batches = grad_accum_steps # Use same number as training for consistency + batch_iterator = [] + shard_was_changed = False + for _ in range(n_batches): + x_hvp, y_hvp = train_loader.next_batch() + batch_iterator.append((x_hvp, y_hvp)) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + restore_loader = True + + + print0(f"[Enhanced Sharpness @ Step {step}] Computing HVPs for {n_batches} microbatches") + for mb_idx, (x_hvp, y_hvp) in enumerate(batch_iterator): + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + + + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + grads_mb = torch.autograd.grad(loss_mb, model.parameters(), create_graph=True, allow_unused=True) + + # Compute H·v (total sharpness) + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_mb, update_direction_v) if g is not None) + + if not isinstance(v_dot_g_total, torch.Tensor): + v_dot_g_total = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_total_mb = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + # Compute H·v_block (block-only sharpness) + if block_param_indices: + v_dot_g_block = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in block_param_indices if grads_mb[i] is not None) + if not isinstance(v_dot_g_block, torch.Tensor): + v_dot_g_block = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_block_mb = torch.autograd.grad(v_dot_g_block, model.parameters(), retain_graph=True, allow_unused=True) + else: + + hvp_v_block_mb = [None] * len(list(model.parameters())) + + + g_dot_g = sum(torch.sum(g * g) for g in grads_mb if g is not None) + if not isinstance(g_dot_g, torch.Tensor): + g_dot_g = torch.tensor(0.0, device=device, requires_grad=True) + + + hvp_g_mb_raw = torch.autograd.grad(g_dot_g, model.parameters(), + retain_graph=True, allow_unused=True) + hvp_g_mb = [h / 2.0 if h is not None else None for h in hvp_g_mb_raw] + + # Compute per-layer H_kk·v_k (for layer-wise sharpness) + for group_idx, group_name in enumerate(group_names_to_process): + param_group = all_param_groups[group_name] + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + is_last_layer = (group_idx == len(group_names_to_process) - 1) + is_last_microbatch = (mb_idx == n_batches - 1) + need_retain = not (is_last_layer and is_last_microbatch) + + try: + v_dot_g_layer = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in indices if grads_mb[i] is not None) + + if not isinstance(v_dot_g_layer, torch.Tensor): + v_dot_g_layer = torch.tensor(0.0, device=device, requires_grad=True) + + hvp_layer_mb = torch.autograd.grad(v_dot_g_layer, model.parameters(), + retain_graph=need_retain, + allow_unused=True) + + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_layer_mb] + else: + layer_hvp_accum[group_name] = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + ] + + # Accumulate layer HVP + # if group_name not in layer_hvp_accum: + # layer_hvp_accum[group_name] = [h.detach() / n_batches if h is not None else None for h in hvp_layer_mb] + # else: + # layer_hvp_accum[group_name] = [ + # (h_acc + h.detach() / n_batches) if (h is not None and h_acc is not None) + # else (h.detach() / n_batches if h is not None else h_acc) + # for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + # ] + # del hvp_layer_mb, v_dot_g_layer + # torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error computing layer HVP for '{group_name}' in microbatch {mb_idx}: {e}") + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = None + + # 6. Accumulate all quantities + if grads_hvp is None: + grads_hvp = [(g.detach() / n_batches).cpu() if g is not None else None for g in grads_mb] + hvp_v_total = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_total_mb] + hvp_v_block = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_block_mb] + hvp_g_accum = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_g_mb] + else: + grads_hvp = [ + (g_acc + (g.detach() / n_batches).cpu()) if (g is not None and g_acc is not None) + else ((g.detach() / n_batches).cpu() if g is not None else g_acc) + for g_acc, g in zip(grads_hvp, grads_mb) + ] + hvp_v_total = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_total, hvp_v_total_mb) + ] + hvp_v_block = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_block, hvp_v_block_mb) + ] + hvp_g_accum = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_g_accum, hvp_g_mb) + ] + + + + if mb_idx % max(1, n_batches // 4) == 0: + print0(f"[Enhanced Sharpness @ Step {step}] Processed microbatch {mb_idx + 1}/{n_batches}") + + + if restore_loader: + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + print0(f"[Enhanced Sharpness @ Step {step}] Finished computing all HVPs for {n_batches} microbatches") + grads_hvp = [g.to(device) if g is not None else None for g in grads_hvp] + hvp_v_total = [h.to(device) if h is not None else None for h in hvp_v_total] + hvp_v_block = [h.to(device) if h is not None else None for h in hvp_v_block] + hvp_g_accum = [h.to(device) if h is not None else None for h in hvp_g_accum] + for group_name in layer_hvp_accum: + if layer_hvp_accum[group_name] is not None: + layer_hvp_accum[group_name] = [h.to(device) if h is not None else None for h in layer_hvp_accum[group_name]] + # --- Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + # hvp_v_total is already computed in the loop above + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_v_total, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_total, torch.Tensor): + vhp_dot_v_total = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_total, torch.Tensor): + v_norm_sq_total = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + + print0(f"[Enhanced Sharpness @ Step {step}] Calculating BLOCK-ONLY total sharpness...") + # hvp_v_block is already computed in the loop above + if block_param_indices: # Only compute if there are block parameters + # Compute v_block^T H v_block (only sum over block indices) + vhp_dot_v_block = sum(torch.sum(hvp_v_block[i] * update_direction_v[i]) + for i in block_param_indices if hvp_v_block[i] is not None) + + v_norm_sq_block = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in block_param_indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_block, torch.Tensor): + vhp_dot_v_block = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_block, torch.Tensor): + v_norm_sq_block = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_block, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_block, op=dist.ReduceOp.AVG) + + if v_norm_sq_block.item() > 1e-12: + analysis_results["block_total_sharpness"] = (vhp_dot_v_block / v_norm_sq_block).item() + else: + analysis_results["block_total_sharpness"] = 0.0 + + analysis_results["v_norm_block"] = torch.sqrt(v_norm_sq_block).item() + analysis_results["v_T_H_v_block"] = vhp_dot_v_block.item() + else: + # No block parameters + analysis_results["block_total_sharpness"] = 0.0 + analysis_results["v_norm_block"] = 0.0 + analysis_results["v_T_H_v_block"] = 0.0 + + torch.cuda.empty_cache() + + # ---- Alignment metrics between update v and (negative) gradient g ---- + eps = 1e-12 + v_norm = torch.sqrt(v_norm_sq_total + eps) + analysis_results["v_norm"] = v_norm.item() + + # --- Version 1: g_hvp --- + ip_v_neg_g_hvp = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + g_hvp_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + + if not isinstance(ip_v_neg_g_hvp, torch.Tensor): + ip_v_neg_g_hvp = torch.tensor(0.0, device=device) + if not isinstance(g_hvp_norm_sq, torch.Tensor): + g_hvp_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_v_neg_g_hvp, op=dist.ReduceOp.AVG) + dist.all_reduce(g_hvp_norm_sq, op=dist.ReduceOp.AVG) + g_hvp_norm = torch.sqrt(g_hvp_norm_sq + eps) + analysis_results["ip_v_neg_g_hvp"] = ip_v_neg_g_hvp.item() + analysis_results["cos_v_neg_g_hvp"] = (ip_v_neg_g_hvp / (v_norm * g_hvp_norm + eps)).item() + analysis_results["g_hvp_norm"] = g_hvp_norm.item() + + # --- Version 2: g_t (original gradient that produced v) --- + # last_training_gradient is the actual gradient from training that led to the update v + if last_training_gradient is not None: + ip_v_neg_g_t = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, last_training_gradient) if g is not None) + g_t_norm_sq = sum(torch.sum(g * g) for g in last_training_gradient if g is not None) + dist.all_reduce(ip_v_neg_g_t, op=dist.ReduceOp.AVG) + dist.all_reduce(g_t_norm_sq, op=dist.ReduceOp.AVG) + g_t_norm = torch.sqrt(g_t_norm_sq + eps) + analysis_results["ip_v_neg_g_t"] = ip_v_neg_g_t.item() + analysis_results["cos_v_neg_g_t"] = (ip_v_neg_g_t / (v_norm * g_t_norm + eps)).item() + analysis_results["g_t_norm"] = g_t_norm.item() + else: + print0(f"[Enhanced Sharpness @ Step {step}] Warning: last_training_gradient is None, skipping g_t metrics") + + # Keep backward compatibility aliases (g_norm uses g_hvp for now) + g_norm_sq = g_hvp_norm_sq + g_norm = g_hvp_norm + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_v_total if hvp is not None) + if not isinstance(hv_norm_sq, torch.Tensor): + hv_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg ---- + # hvp_g_accum is already computed in the loop above + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_accum) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_accum if hg is not None) + if not isinstance(ip_g_hg, torch.Tensor): + ip_g_hg = torch.tensor(0.0, device=device) + if not isinstance(hg_norm_sq, torch.Tensor): + hg_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + if not isinstance(v_parallel_norm_sq, torch.Tensor): + v_parallel_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(torch.clamp(v_norm_sq_total - v_parallel_norm_sq, min=0.0) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + # Ensure they are tensors + if not isinstance(v_norm_sq_layer, torch.Tensor): + v_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(g_norm_sq_layer, torch.Tensor): + g_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(ip_v_neg_g_layer, torch.Tensor): + ip_v_neg_g_layer = torch.tensor(0.0, device=device) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + if group_name not in layer_hvp_accum or layer_hvp_accum[group_name] is None: + print0(f"[Enhanced Sharpness @ Step {step}] No HVP data for '{group_name}', skipping") + analysis_results[f"{group_name}_sharpness"] = 0.0 + continue + + hvp_group_result = layer_hvp_accum[group_name] + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_group, torch.Tensor): + vhp_dot_v_group = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_group, torch.Tensor): + v_norm_sq_group = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- Calculate block-diagonal approximation and cross-layer interaction --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating block-diagonal and cross-layer sharpness...") + + sum_layer_numerators = 0.0 + for layer in range(1, NUM_LAYERS + 1): + layer_name = f"layer_{layer}" + if f"{layer_name}_sharpness" in analysis_results and f"{layer_name}_v_norm" in analysis_results: + s_k = analysis_results[f"{layer_name}_sharpness"] + v_k_norm = analysis_results[f"{layer_name}_v_norm"] + sum_layer_numerators += s_k * (v_k_norm ** 2) + + analysis_results["sum_layer_numerators"] = sum_layer_numerators + + # Block-diagonal sharpness (using block ||v||²) + v_norm_block = analysis_results.get("v_norm_block", 0) + v_norm_sq_block_val = v_norm_block ** 2 if v_norm_block else 1e-12 + + if v_norm_sq_block_val > 1e-12: + analysis_results["block_diag_sharpness"] = sum_layer_numerators / v_norm_sq_block_val + else: + analysis_results["block_diag_sharpness"] = 0.0 + + # Cross-layer interaction = block_total - block_diag + block_total = analysis_results.get("block_total_sharpness", 0) + block_diag = analysis_results.get("block_diag_sharpness", 0) + analysis_results["cross_layer_sharpness"] = block_total - block_diag + + print0(f"[Enhanced Sharpness @ Step {step}] block_total={block_total:.6f}, block_diag={block_diag:.6f}, cross_layer={block_total - block_diag:.6f}") + + # --- 8. Cleanup --- + nano_GPT_qkvonorm_pure.FLASH = original_flash + print0(f"[Enhanced Sharpness @ Step {step}] Restored FLASH attention to {original_flash}") + + print0(f"[Enhanced Sharpness @ Step {step}] Restoring parameters back to θ_{{t+1}}...") + with torch.no_grad(): + for p, v in zip(model.parameters(), update_direction_v): + p.data.add_(v) + + if prev_training_mode: + model.train() + else: + model.eval() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del hvp_v_total, hvp_v_block, hvp_g_accum, layer_hvp_accum + del vhp_dot_v_total, v_norm_sq_total + del vhp_dot_v_block, v_norm_sq_block + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + + # Version 1: g_hvp (new batch, computed at θ_t during HVP calculation) + if 'cos_v_neg_g_hvp' in results: + misc_parts.append(f"cos_v_-g_hvp:{results['cos_v_neg_g_hvp']:.4e}") + if 'g_hvp_norm' in results: + misc_parts.append(f"g_hvp_norm:{results['g_hvp_norm']:.4e}") + + # Version 2: g_t (original gradient that produced v) + if 'cos_v_neg_g_t' in results: + misc_parts.append(f"cos_v_-g_t:{results['cos_v_neg_g_t']:.4e}") + if 'g_t_norm' in results: + misc_parts.append(f"g_t_norm:{results['g_t_norm']:.4e}") + + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d8|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + parser.add_argument("--shuffle_files", action="store_true") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d8", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # Setup debugpy for remote debugging (only activates if DEBUGPY env var is set) + # setup_debugpy(rank=ddp_rank, force=True) + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + nano_GPT_qkvonorm_pure.FLASH = args.flash # Set module-level FLASH for training + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d8": GPTConfig(block_size=1024, vocab_size=50257, n_layer=8, n_head=8, n_embd=512), + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader( + args.input_bin, B, T, ddp_rank, ddp_world_size, + shuffle_files=args.shuffle_files, random_seed=args.seed + ) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + base_module = model.module if ddp else model + # If compiled, unwrap to get the original module + if hasattr(base_module, "_orig_mod"): + base_module = base_module._orig_mod + + raw_params = list(raw_model_uncompiled.parameters()) + train_params = list(base_module.parameters()) + + assert len(raw_params) == len(train_params), \ + f"Parameter count mismatch: raw_model_uncompiled has {len(raw_params)}, training model has {len(train_params)}" + for i, (rp, tp) in enumerate(zip(raw_params, train_params)): + assert rp.data_ptr() == tp.data_ptr(), \ + f"Parameter {i} has different data_ptr: raw_model_uncompiled and training model do not share parameters!" + print0(f"[Verified] raw_model_uncompiled and training model share the same {len(raw_params)} Parameter objects") + + last_training_update = None + last_training_gradient = None # Store the original gradient that produced the update + last_training_batches = None # Store ALL microbatches (x, y) for consistent HVP calculation + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it, base_lr): + min_lr = base_lr * args.lr_decay_frac + cooldown_iters = int(args.num_iterations * 0.2) + # 1) Warmup: linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it + 1) / args.warmup_iters + # 3) Decay: linear decay from base_lr to min_lr in the last cooldown_iters steps + cooldown_start = args.num_iterations - cooldown_iters + if it >= cooldown_start: + decay_ratio = (it - cooldown_start) / cooldown_iters + return base_lr - decay_ratio * (base_lr - min_lr) + # 2) Stable: constant learning rate at base_lr + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + last_training_update=last_training_update, # Pass the real update captured from training + last_training_gradient=last_training_gradient, # Pass the original gradient g_t + last_training_batches=last_training_batches # Pass ALL microbatches for consistent HVP + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + + # Pre-check if we need to collect microbatches for sharpness analysis + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + + microbatches_this_step = [] if will_analyze_sharpness_next else None + + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + + # Store ALL microbatches for memory-efficient HVP calculation + if will_analyze_sharpness_next: + microbatches_this_step.append((x.detach().clone(), y.detach().clone())) + + if ddp: + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + + #no clipping + # norm = torch.nn.utils.clip_grad_norm_(raw_model_uncompiled.parameters(), float('inf')) + + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + print(raw_model_uncompiled.transformer.h[0].attn.q_w.weight[:5,:5]) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + # Save the original gradient g_t that will produce the update v + last_training_gradient = [ + p.grad.detach().clone() if p.grad is not None else torch.zeros_like(p) + for p in raw_model_uncompiled.parameters() + ] + # Capture ALL microbatches for consistent HVP calculation + # This ensures H is computed on the exact same objective as g_t and v + last_training_batches = microbatches_this_step # Already cloned above + else: + params_before_optimizer_step = None + last_training_batches = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p.detach() - p_before + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group()step:0 validation loss:11.020913 +step:0 train loss:11.019318 +step:1 train loss:10.939599 +step:2 train loss:10.780504 +step:3 train loss:10.590445 +step:4 train loss:10.412065 +step:5 train loss:10.229995 +step:6 train loss:10.092650 +step:7 train loss:10.023896 +step:8 train loss:9.901325 +step:9 train loss:9.827549 +step:10 train loss:9.801296 +step:11 train loss:9.735043 +step:12 train loss:9.693127 +step:13 train loss:9.655843 +step:14 train loss:9.627496 +step:15 train loss:9.594902 +step:16 train loss:9.592788 +step:17 train loss:9.554340 +step:18 train loss:9.541218 +step:19 train loss:9.484360 +step:20 train loss:9.487333 +step:21 train loss:9.455070 +step:22 train loss:9.409922 +step:23 train loss:9.345986 +step:24 train loss:9.332457 +step:25 train loss:9.237272 +step:26 train loss:9.256731 +step:27 train loss:9.202371 +step:28 train loss:9.161745 +step:29 train loss:9.090217 +step:30 train loss:9.049913 +step:31 train loss:9.010789 +step:32 train loss:8.978122 +step:33 train loss:8.911070 +step:34 train loss:8.892333 +step:35 train loss:8.819811 +step:36 train loss:8.797024 +step:37 train loss:8.797047 +step:38 train loss:8.646420 +step:39 train loss:8.645883 +step:40 train loss:8.621290 +step:41 train loss:8.573369 +step:42 train loss:8.562841 +step:43 train loss:8.491901 +step:44 train loss:8.419748 +step:45 train loss:8.401327 +step:46 train loss:8.379103 +step:47 train loss:8.293724 +step:48 train loss:8.239887 +step:49 train loss:8.244723 +step:50 train loss:8.138321 +step:51 train loss:8.096309 +step:52 train loss:8.037096 +step:53 train loss:8.031117 +step:54 train loss:7.991916 +step:55 train loss:7.924266 +step:56 train loss:7.882890 +step:57 train loss:7.872618 +step:58 train loss:7.818125 +step:59 train loss:7.774903 +step:60 train loss:7.734961 +step:61 train loss:7.701390 +step:62 train loss:7.643545 +step:63 train loss:7.556694 +step:64 train loss:7.559827 +step:65 train loss:7.583436 +step:66 train loss:7.550489 +step:67 train loss:7.496937 +step:68 train loss:7.446686 +step:69 train loss:7.483690 +step:70 train loss:7.399381 +step:71 train loss:7.360590 +step:72 train loss:7.320200 +step:73 train loss:7.252389 +step:74 train loss:7.298513 +step:75 train loss:7.324472 +step:76 train loss:7.256289 +step:77 train loss:7.226240 +step:78 train loss:7.189982 +step:79 train loss:7.211224 +step:80 train loss:7.172900 +step:81 train loss:7.099983 +step:82 train loss:7.183476 +step:83 train loss:7.069539 +step:84 train loss:7.102027 +step:85 train loss:7.112515 +step:86 train loss:7.080112 +step:87 train loss:7.051416 +step:88 train loss:7.014016 +step:89 train loss:7.073475 +step:90 train loss:6.964901 +step:91 train loss:6.959681 +step:92 train loss:6.998582 +step:93 train loss:6.909949 +step:94 train loss:7.028996 +step:95 train loss:6.852673 +step:96 train loss:6.880105 +step:97 train loss:6.925738 +step:98 train loss:6.889500 +step:99 train loss:6.843555 +step:100 train loss:6.817048 +step:101 train loss:6.851866 +step:102 train loss:6.774682 +step:103 train loss:6.796296 +step:104 train loss:6.806001 +step:105 train loss:6.750739 +step:106 train loss:6.796951 +step:107 train loss:6.761560 +step:108 train loss:6.681399 +step:109 train loss:6.693151 +step:110 train loss:6.723751 +step:111 train loss:6.764154 +step:112 train loss:6.800484 +step:113 train loss:6.690039 +step:114 train loss:6.731391 +step:115 train loss:6.716379 +step:116 train loss:6.625796 +step:117 train loss:6.701760 +step:118 train loss:6.661654 +step:119 train loss:6.607496 +step:120 train loss:6.651906 +step:121 train loss:6.568211 +step:122 train loss:6.451126 +step:123 train loss:6.641138 +step:124 train loss:6.671671 +step:125 train loss:6.621181 +step:126 train loss:6.578452 +step:127 train loss:6.535362 +step:128 train loss:6.814815 +step:129 train loss:6.566074 +step:130 train loss:6.590397 +step:131 train loss:6.639321 +step:132 train loss:6.577695 +step:133 train loss:6.491560 +step:134 train loss:6.460647 +step:135 train loss:6.527779 +step:136 train loss:6.488875 +step:137 train loss:6.569042 +step:138 train loss:6.464066 +step:139 train loss:6.577836 +step:140 train loss:6.501263 +step:141 train loss:6.493639 +step:142 train loss:6.540880 +step:143 train loss:6.392611 +step:144 train loss:6.498423 +step:145 train loss:6.388549 +step:146 train loss:6.426667 +step:147 train loss:6.447689 +step:148 train loss:6.421114 +step:149 train loss:6.452721 +step:150 train loss:6.374156 +step:151 train loss:6.456779 +step:152 train loss:6.384124 +step:153 train loss:6.411860 +step:154 train loss:6.386798 +step:155 train loss:6.412163 +step:156 train loss:6.360063 +step:157 train loss:6.350158 +step:158 train loss:6.333381 +step:159 train loss:6.393294 +step:160 train loss:6.235693 +step:161 train loss:6.298604 +step:162 train loss:6.319305 +step:163 train loss:6.359655 +step:164 train loss:6.359322 +step:165 train loss:6.310955 +step:166 train loss:6.363399 +step:167 train loss:6.341150 +step:168 train loss:6.337911 +step:169 train loss:6.269859 +step:170 train loss:6.363526 +step:171 train loss:6.265407 +step:172 train loss:6.298483 +step:173 train loss:6.256892 +step:174 train loss:6.415213 +step:175 train loss:6.353312 +step:176 train loss:6.260769 +step:177 train loss:6.329672 +step:178 train loss:6.275146 +step:179 train loss:6.310946 +step:180 train loss:6.241348 +step:181 train loss:6.182262 +step:182 train loss:6.294034 +step:183 train loss:6.260390 +step:184 train loss:6.282848 +step:185 train loss:6.210838 +step:186 train loss:6.262455 +step:187 train loss:6.220402 +step:188 train loss:6.151246 +step:189 train loss:6.281261 +step:190 train loss:6.177096 +step:191 train loss:6.267284 +step:192 train loss:6.188325 +step:193 train loss:6.179232 +step:194 train loss:6.174791 +step:195 train loss:6.300673 +step:196 train loss:6.181525 +step:197 train loss:6.170199 +step:198 train loss:6.216080 +step:199 train loss:6.198137 +step:200 train loss:6.198047 +step:201 train loss:6.221830 +step:202 train loss:6.172457 +step:203 train loss:6.133059 +step:204 train loss:6.186894 +step:205 train loss:6.234404 +step:206 train loss:6.169206 +step:207 train loss:6.124318 +step:208 train loss:6.180505 +step:209 train loss:6.102150 +step:210 train loss:6.119983 +step:211 train loss:6.153711 +step:212 train loss:6.131208 +step:213 train loss:6.132197 +step:214 train loss:6.202446 +step:215 train loss:6.109512 +step:216 train loss:6.142650 +step:217 train loss:6.102406 +step:218 train loss:6.119936 +step:219 train loss:6.089016 +step:220 train loss:6.130110 +step:221 train loss:6.099437 +step:222 train loss:6.121210 +step:223 train loss:6.050259 +step:224 train loss:6.087766 +step:225 train loss:6.100945 +step:226 train loss:6.114725 +step:227 train loss:6.104265 +step:228 train loss:6.084316 +step:229 train loss:6.097190 +step:230 train loss:6.065573 +step:231 train loss:6.102691 +step:232 train loss:6.078681 +step:233 train loss:6.107881 +step:234 train loss:6.052579 +step:235 train loss:6.040366 +step:236 train loss:5.982101 +step:237 train loss:6.021920 +step:238 train loss:5.986826 +step:239 train loss:5.999174 +step:240 train loss:6.010345 +step:241 train loss:6.064404 +step:242 train loss:6.079876 +step:243 train loss:6.064175 +step:244 train loss:6.063880 +step:245 train loss:6.024018 +step:246 train loss:6.020537 +step:247 train loss:6.094893 +step:248 train loss:5.962801 +step:249 train loss:6.024361 +step:250 validation loss:6.040835 +step:250 train loss:5.965185 +step:251 train loss:6.041079 +step:252 train loss:6.023493 +step:253 train loss:6.009124 +step:254 train loss:6.026601 +step:255 train loss:5.977474 +step:256 train loss:5.881065 +step:257 train loss:5.971797 +step:258 train loss:6.048206 +step:259 train loss:5.952808 +step:260 train loss:6.034847 +step:261 train loss:5.949229 +step:262 train loss:5.978595 +step:263 train loss:5.925122 +step:264 train loss:6.003308 +step:265 train loss:5.894518 +step:266 train loss:5.871116 +step:267 train loss:5.953633 +step:268 train loss:5.964264 +step:269 train loss:5.871815 +step:270 train loss:5.921533 +step:271 train loss:5.946996 +step:272 train loss:5.961310 +step:273 train loss:5.879282 +step:274 train loss:5.955090 +step:275 train loss:5.923884 +step:276 train loss:5.901565 +step:277 train loss:5.892679 +step:278 train loss:5.899892 +step:279 train loss:5.877782 +step:280 train loss:5.912817 +step:281 train loss:5.911452 +step:282 train loss:5.876734 +step:283 train loss:5.886503 +step:284 train loss:5.898723 +step:285 train loss:5.908611 +step:286 train loss:5.759116 +step:287 train loss:5.730733 +step:288 train loss:5.940638 +step:289 train loss:5.865333 +step:290 train loss:5.951217 +step:291 train loss:5.947510 +step:292 train loss:5.908130 +step:293 train loss:5.939294 +step:294 train loss:5.998074 +step:295 train loss:5.910047 +step:296 train loss:5.889092 +step:297 train loss:5.856164 +step:298 train loss:5.918373 +step:299 train loss:5.909555 +step:300 train loss:5.854611 +step:301 train loss:5.862381 +step:302 train loss:5.870306 +step:303 train loss:5.862842 +step:304 train loss:5.835110 +step:305 train loss:5.880900 +step:306 train loss:5.878647 +step:307 train loss:5.850373 +step:308 train loss:5.906260 +step:309 train loss:5.828201 +step:310 train loss:5.843477 +step:311 train loss:5.677335 +step:312 train loss:5.882405 +step:313 train loss:5.837381 +step:314 train loss:5.818961 +step:315 train loss:5.878358 +step:316 train loss:5.784396 +step:317 train loss:5.858214 +step:318 train loss:5.921634 +step:319 train loss:5.841808 +step:320 train loss:5.845387 +step:321 train loss:5.807644 +step:322 train loss:5.752683 +step:323 train loss:5.854353 +step:324 train loss:5.781883 +step:325 train loss:5.812315 +step:326 train loss:5.802747 +step:327 train loss:5.783391 +step:328 train loss:5.821871 +step:329 train loss:5.772622 +step:330 train loss:5.730992 +step:331 train loss:5.764808 +step:332 train loss:5.855624 +step:333 train loss:5.782030 +step:334 train loss:5.831771 +step:335 train loss:5.733642 +step:336 train loss:5.714529 +step:337 train loss:5.689748 +step:338 train loss:5.745840 +step:339 train loss:5.776329 +step:340 train loss:5.784597 +step:341 train loss:5.705564 +step:342 train loss:5.760881 +step:343 train loss:5.748792 +step:344 train loss:5.633541 +step:345 train loss:5.788221 +step:346 train loss:5.689075 +step:347 train loss:5.699289 +step:348 train loss:5.697671 +step:349 train loss:5.607145 +step:350 train loss:5.707263 +step:351 train loss:5.692446 +step:352 train loss:5.731717 +step:353 train loss:5.679743 +step:354 train loss:5.773420 +step:355 train loss:5.739239 +step:356 train loss:5.744692 +step:357 train loss:5.676040 +step:358 train loss:5.692961 +step:359 train loss:5.737564 +step:360 train loss:5.724693 +step:361 train loss:5.719179 +step:362 train loss:5.656896 +step:363 train loss:5.759917 +step:364 train loss:5.674397 +step:365 train loss:5.660882 +step:366 train loss:5.763299 +step:367 train loss:5.709394 +step:368 train loss:5.705493 +step:369 train loss:5.723874 +step:370 train loss:5.671997 +step:371 train loss:5.698920 +step:372 train loss:5.699227 +step:373 train loss:5.645190 +step:374 train loss:5.668143 +step:375 train loss:5.679670 +step:376 train loss:5.683764 +step:377 train loss:5.690872 +step:378 train loss:5.748765 +step:379 train loss:5.756635 +step:380 train loss:5.657117 +step:381 train loss:5.696990 +step:382 train loss:5.610474 +step:383 train loss:5.661269 +step:384 train loss:5.639688 +step:385 train loss:5.596274 +step:386 train loss:5.641492 +step:387 train loss:5.571531 +step:388 train loss:5.617134 +step:389 train loss:5.628772 +step:390 train loss:5.641274 +step:391 train loss:5.705205 +step:392 train loss:5.613378 +step:393 train loss:5.651017 +step:394 train loss:5.618328 +step:395 train loss:5.576567 +step:396 train loss:5.641652 +step:397 train loss:5.649399 +step:398 train loss:5.673612 +step:399 train loss:5.650095 +step:400 train loss:5.645277 +step:401 train loss:5.658878 +step:402 train loss:5.621050 +step:403 train loss:5.653700 +step:404 train loss:5.625860 +step:405 train loss:5.632454 +step:406 train loss:5.562904 +step:407 train loss:5.573284 +step:408 train loss:5.618708 +step:409 train loss:5.545700 +step:410 train loss:5.595541 +step:411 train loss:5.572913 +step:412 train loss:5.537778 +step:413 train loss:5.549325 +step:414 train loss:5.538311 +step:415 train loss:5.559069 +step:416 train loss:5.587592 +step:417 train loss:5.603300 +step:418 train loss:5.604542 +step:419 train loss:5.638249 +step:420 train loss:5.683340 +step:421 train loss:5.704965 +step:422 train loss:5.676604 +step:423 train loss:5.592366 +step:424 train loss:5.622327 +step:425 train loss:5.688369 +step:426 train loss:5.647718 +step:427 train loss:5.564935 +step:428 train loss:5.578318 +step:429 train loss:5.585166 +step:430 train loss:5.567132 +step:431 train loss:5.563900 +step:432 train loss:5.532077 +step:433 train loss:5.532949 +step:434 train loss:5.539957 +step:435 train loss:5.526912 +step:436 train loss:5.512664 +step:437 train loss:5.573656 +step:438 train loss:5.527311 +step:439 train loss:5.538904 +step:440 train loss:5.518072 +step:441 train loss:5.555076 +step:442 train loss:5.514212 +step:443 train loss:5.562013 +step:444 train loss:5.520456 +step:445 train loss:5.591200 +step:446 train loss:5.516685 +step:447 train loss:5.499582 +step:448 train loss:5.453669 +step:449 train loss:5.538671 +step:450 train loss:5.588294 +step:451 train loss:5.517155 +step:452 train loss:5.471742 +step:453 train loss:5.518073 +step:454 train loss:5.440565 +step:455 train loss:5.473704 +step:456 train loss:5.473995 +step:457 train loss:5.521732 +step:458 train loss:5.434963 +step:459 train loss:5.434136 +step:460 train loss:5.499787 +step:461 train loss:5.529778 +step:462 train loss:5.563507 +step:463 train loss:5.478127 +step:464 train loss:5.542959 +step:465 train loss:5.458164 +step:466 train loss:5.532946 +step:467 train loss:5.419158 +step:468 train loss:5.487073 +step:469 train loss:5.459937 +step:470 train loss:5.481127 +step:471 train loss:5.488968 +step:472 train loss:5.442864 +step:473 train loss:5.424582 +step:474 train loss:5.050878 +step:475 train loss:5.579009 +step:476 train loss:5.833452 +step:477 train loss:5.933368 +step:478 train loss:5.731331 +step:479 train loss:5.634910 +step:480 train loss:5.674174 +step:481 train loss:5.623572 +step:482 train loss:5.603337 +step:483 train loss:5.618077 +step:484 train loss:5.624151 +step:485 train loss:5.634243 +step:486 train loss:5.587082 +step:487 train loss:5.545895 +step:488 train loss:5.588971 +step:489 train loss:5.589098 +step:490 train loss:5.552142 +step:491 train loss:5.527897 +step:492 train loss:5.587928 +step:493 train loss:5.482854 +step:494 train loss:5.563007 +step:495 train loss:5.505128 +step:496 train loss:5.412337 +step:497 train loss:5.563795 +step:498 train loss:5.452711 +step:499 train loss:5.459018 +step:500 validation loss:5.488581 total_sharp:3.0402e-02 L1_sharp:1.0624e-01 L2_sharp:1.4830e-02 L3_sharp:8.7695e-03 L4_sharp:1.0915e-02 L5_sharp:1.0688e-02 L6_sharp:8.4482e-03 L7_sharp:7.5587e-03 L8_sharp:4.6090e-03 L9_sharp:4.5901e-03 L10_sharp:4.1673e-03 L11_sharp:5.1099e-03 L12_sharp:3.4979e-02 total_fnorm:1.1539e+00 total_l1_linf:9.4606e+03 total_spectral:1.1539e+00 L1_fnorm:1.7564e-01 L2_fnorm:1.7764e-01 L3_fnorm:1.8699e-01 L4_fnorm:1.9140e-01 L5_fnorm:1.9066e-01 L6_fnorm:1.8919e-01 L7_fnorm:1.9777e-01 L8_fnorm:2.0146e-01 L9_fnorm:2.0761e-01 L10_fnorm:2.0880e-01 L11_fnorm:2.0118e-01 L12_fnorm:1.8116e-01 L1_l1linf:2.0064e-01 L2_l1linf:2.2137e-01 L3_l1linf:2.2851e-01 L4_l1linf:2.4451e-01 L5_l1linf:2.4559e-01 L6_l1linf:2.4030e-01 L7_l1linf:2.5864e-01 L8_l1linf:2.7251e-01 L9_l1linf:2.6456e-01 L10_l1linf:2.7646e-01 L11_l1linf:2.7702e-01 L12_l1linf:2.6815e-01 L1_spectral:4.1557e-02 L2_spectral:4.1550e-02 L3_spectral:4.6217e-02 L4_spectral:4.5052e-02 L5_spectral:4.0964e-02 L6_spectral:4.0544e-02 L7_spectral:4.0116e-02 L8_spectral:4.0870e-02 L9_spectral:4.0963e-02 L10_spectral:4.2025e-02 L11_spectral:3.9147e-02 L12_spectral:5.5564e-02 v_norm:1.1539e+00 cos_v_-g_hvp:9.8956e-02 g_hvp_norm:4.5044e-01 cos_v_-g_t:1.1457e-01 g_t_norm:3.9076e-01 hv_norm:4.2729e-01 cos_v_hv:8.2099e-02 hg_norm:5.7118e+00 cos_g_hg:6.4825e-01 v_par:1.8312e-02 v_perp:1.1537e+00 L1_cos_v_neg_g:1.4226e-01 L1_v_norm:1.7564e-01 L2_cos_v_neg_g:1.4778e-01 L2_v_norm:1.7764e-01 L3_cos_v_neg_g:1.6420e-01 L3_v_norm:1.8699e-01 L4_cos_v_neg_g:1.9830e-01 L4_v_norm:1.9140e-01 L5_cos_v_neg_g:2.1092e-01 L5_v_norm:1.9066e-01 L6_cos_v_neg_g:1.9368e-01 L6_v_norm:1.8919e-01 L7_cos_v_neg_g:2.0540e-01 L7_v_norm:1.9777e-01 L8_cos_v_neg_g:1.9925e-01 L8_v_norm:2.0146e-01 L9_cos_v_neg_g:2.0227e-01 L9_v_norm:2.0761e-01 L10_cos_v_neg_g:2.0318e-01 L10_v_norm:2.0880e-01 L11_cos_v_neg_g:2.0335e-01 L11_v_norm:2.0118e-01 L12_cos_v_neg_g:2.2995e-01 L12_v_norm:1.8116e-01 +step:500 train loss:5.528506 +step:501 train loss:5.386589 +step:502 train loss:5.462484 +step:503 train loss:5.470129 +step:504 train loss:5.384551 +step:505 train loss:5.402042 +step:506 train loss:5.475535 +step:507 train loss:5.317663 +step:508 train loss:5.379948 +step:509 train loss:5.351474 +step:510 train loss:5.325727 +step:511 train loss:5.363517 +step:512 train loss:5.442054 +step:513 train loss:5.370622 +step:514 train loss:5.327864 +step:515 train loss:5.359918 +step:516 train loss:5.372025 +step:517 train loss:5.337099 +step:518 train loss:5.300568 +step:519 train loss:5.398746 +step:520 train loss:5.351091 +step:521 train loss:5.462805 +step:522 train loss:5.406851 +step:523 train loss:5.403727 +step:524 train loss:5.429697 +step:525 train loss:5.524704 +step:526 train loss:5.383273 +step:527 train loss:5.380990 +step:528 train loss:5.424793 +step:529 train loss:5.325537 +step:530 train loss:5.421532 +step:531 train loss:5.339947 +step:532 train loss:5.349472 +step:533 train loss:5.314806 +step:534 train loss:5.339084 +step:535 train loss:5.334894 +step:536 train loss:5.369135 +step:537 train loss:5.281203 +step:538 train loss:5.314151 +step:539 train loss:5.260213 +step:540 train loss:5.332077 +step:541 train loss:5.337617 +step:542 train loss:5.337509 +step:543 train loss:5.312217 +step:544 train loss:5.381707 +step:545 train loss:5.308294 +step:546 train loss:5.344139 +step:547 train loss:5.339679 +step:548 train loss:5.321207 +step:549 train loss:5.265561 +step:550 train loss:5.258058 +step:551 train loss:5.271546 +step:552 train loss:5.292977 +step:553 train loss:5.283952 +step:554 train loss:5.374979 +step:555 train loss:5.271796 +step:556 train loss:5.258151 +step:557 train loss:5.312759 +step:558 train loss:5.354350 +step:559 train loss:5.311193 +step:560 train loss:5.237387 +step:561 train loss:5.315302 +step:562 train loss:5.248723 +step:563 train loss:5.336177 +step:564 train loss:5.326389 +step:565 train loss:5.291827 +step:566 train loss:5.384725 +step:567 train loss:5.265591 +step:568 train loss:5.304709 +step:569 train loss:5.277782 +step:570 train loss:5.263056 +step:571 train loss:5.441668 +step:572 train loss:5.282630 +step:573 train loss:5.337569 +step:574 train loss:5.275324 +step:575 train loss:5.301198 +step:576 train loss:5.216668 +step:577 train loss:5.203086 +step:578 train loss:5.262468 +step:579 train loss:5.253185 +step:580 train loss:5.256138 +step:581 train loss:5.234924 +step:582 train loss:5.247763 +step:583 train loss:5.300905 +step:584 train loss:5.268372 +step:585 train loss:5.288108 +step:586 train loss:5.226513 +step:587 train loss:5.247007 +step:588 train loss:5.248233 +step:589 train loss:5.246964 +step:590 train loss:5.336023 +step:591 train loss:5.231532 +step:592 train loss:5.288656 +step:593 train loss:5.230997 +step:594 train loss:5.252672 +step:595 train loss:5.239832 +step:596 train loss:5.182401 +step:597 train loss:5.251442 +step:598 train loss:5.267386 +step:599 train loss:5.208778 +step:600 train loss:5.125962 +step:601 train loss:5.189446 +step:602 train loss:5.261992 +step:603 train loss:5.208284 +step:604 train loss:5.183115 +step:605 train loss:5.236066 +step:606 train loss:5.123605 +step:607 train loss:5.160896 +step:608 train loss:5.148006 +step:609 train loss:5.144017 +step:610 train loss:5.166869 +step:611 train loss:5.089270 +step:612 train loss:5.172163 +step:613 train loss:5.205201 +step:614 train loss:5.071283 +step:615 train loss:5.198057 +step:616 train loss:5.050453 +step:617 train loss:5.127085 +step:618 train loss:5.112651 +step:619 train loss:5.146495 +step:620 train loss:5.168139 +step:621 train loss:5.081375 +step:622 train loss:5.121465 +step:623 train loss:5.099361 +step:624 train loss:5.128403 +step:625 train loss:5.200561 +step:626 train loss:5.059418 +step:627 train loss:5.100639 +step:628 train loss:5.066375 +step:629 train loss:5.102079 +step:630 train loss:5.086572 +step:631 train loss:5.128358 +step:632 train loss:5.031430 +step:633 train loss:5.061553 +step:634 train loss:5.032247 +step:635 train loss:5.106185 +step:636 train loss:5.063068 +step:637 train loss:5.094942 +step:638 train loss:5.062431 +step:639 train loss:5.027350 +step:640 train loss:5.079115 +step:641 train loss:5.069412 +step:642 train loss:5.037189 +step:643 train loss:5.099792 +step:644 train loss:5.028744 +step:645 train loss:5.017726 +step:646 train loss:5.155752 +step:647 train loss:5.081661 +step:648 train loss:5.057055 +step:649 train loss:5.068790 +step:650 train loss:5.013422 +step:651 train loss:5.035673 +step:652 train loss:5.057576 +step:653 train loss:5.026702 +step:654 train loss:5.083260 +step:655 train loss:4.964142 +step:656 train loss:4.967298 +step:657 train loss:5.058196 +step:658 train loss:5.031467 +step:659 train loss:5.062099 +step:660 train loss:5.052333 +step:661 train loss:5.104053 +step:662 train loss:5.139954 +step:663 train loss:5.106734 +step:664 train loss:5.137451 +step:665 train loss:5.041940 +step:666 train loss:4.966762 +step:667 train loss:5.024842 +step:668 train loss:4.956533 +step:669 train loss:5.022815 +step:670 train loss:5.061318 +step:671 train loss:5.093855 +step:672 train loss:5.056685 +step:673 train loss:4.969741 +step:674 train loss:5.041678 +step:675 train loss:4.950559 +step:676 train loss:4.978869 +step:677 train loss:4.988733 +step:678 train loss:5.019330 +step:679 train loss:4.987444 +step:680 train loss:4.895352 +step:681 train loss:5.009168 +step:682 train loss:4.970491 +step:683 train loss:4.996190 +step:684 train loss:4.978483 +step:685 train loss:5.056769 +step:686 train loss:4.977660 +step:687 train loss:4.911308 +step:688 train loss:4.949542 +step:689 train loss:4.970840 +step:690 train loss:4.977577 +step:691 train loss:5.006177 +step:692 train loss:4.858223 +step:693 train loss:4.996661 +step:694 train loss:5.025591 +step:695 train loss:4.977527 +step:696 train loss:5.033188 +step:697 train loss:4.968915 +step:698 train loss:4.931156 +step:699 train loss:4.957969 +step:700 train loss:5.011969 +step:701 train loss:4.907654 +step:702 train loss:4.882902 +step:703 train loss:4.979754 +step:704 train loss:4.935145 +step:705 train loss:4.980094 +step:706 train loss:4.929899 +step:707 train loss:4.882860 +step:708 train loss:4.908467 +step:709 train loss:4.916970 +step:710 train loss:4.925956 +step:711 train loss:4.913356 +step:712 train loss:4.835361 +step:713 train loss:4.902205 +step:714 train loss:4.828935 +step:715 train loss:4.874164 +step:716 train loss:4.833230 +step:717 train loss:4.824401 +step:718 train loss:4.903771 +step:719 train loss:4.891944 +step:720 train loss:4.862555 +step:721 train loss:4.888087 +step:722 train loss:4.826431 +step:723 train loss:4.860030 +step:724 train loss:4.811051 +step:725 train loss:4.791009 +step:726 train loss:4.786873 +step:727 train loss:4.769183 +step:728 train loss:4.909630 +step:729 train loss:4.783595 +step:730 train loss:4.837387 +step:731 train loss:4.874423 +step:732 train loss:4.763288 +step:733 train loss:4.832231 +step:734 train loss:4.811715 +step:735 train loss:4.857864 +step:736 train loss:4.866994 +step:737 train loss:4.832982 +step:738 train loss:4.884107 +step:739 train loss:4.807919 +step:740 train loss:4.868931 +step:741 train loss:4.851696 +step:742 train loss:4.818233 +step:743 train loss:4.847202 +step:744 train loss:4.830502 +step:745 train loss:4.775279 +step:746 train loss:4.817115 +step:747 train loss:4.834203 +step:748 train loss:4.802691 +step:749 train loss:4.854520 +step:750 validation loss:4.814925 +step:750 train loss:4.807576 +step:751 train loss:4.786065 +step:752 train loss:4.778528 +step:753 train loss:4.778858 +step:754 train loss:4.716662 +step:755 train loss:4.713449 +step:756 train loss:4.716943 +step:757 train loss:4.700973 +step:758 train loss:4.799198 +step:759 train loss:4.763675 +step:760 train loss:4.822866 +step:761 train loss:4.805427 +step:762 train loss:4.747003 +step:763 train loss:4.769935 +step:764 train loss:4.758160 +step:765 train loss:4.737782 +step:766 train loss:4.814429 +step:767 train loss:4.641981 +step:768 train loss:4.768898 +step:769 train loss:4.786087 +step:770 train loss:4.752113 +step:771 train loss:4.799259 +step:772 train loss:4.787597 +step:773 train loss:4.735690 +step:774 train loss:4.772675 +step:775 train loss:4.720406 +step:776 train loss:4.698502 +step:777 train loss:4.762254 +step:778 train loss:4.783214 +step:779 train loss:4.721350 +step:780 train loss:4.693375 +step:781 train loss:4.777292 +step:782 train loss:4.770845 +step:783 train loss:4.722997 +step:784 train loss:4.737651 +step:785 train loss:4.741452 +step:786 train loss:4.769035 +step:787 train loss:4.805284 +step:788 train loss:4.753927 +step:789 train loss:4.781500 +step:790 train loss:4.765166 +step:791 train loss:4.695096 +step:792 train loss:4.712901 +step:793 train loss:4.681277 +step:794 train loss:4.717390 +step:795 train loss:4.681611 +step:796 train loss:4.777733 +step:797 train loss:4.629027 +step:798 train loss:4.753000 +step:799 train loss:4.661618 +step:800 train loss:4.695366 +step:801 train loss:4.631560 +step:802 train loss:4.714238 +step:803 train loss:4.723273 +step:804 train loss:4.678123 +step:805 train loss:4.723866 +step:806 train loss:4.687496 +step:807 train loss:4.702612 +step:808 train loss:4.606174 +step:809 train loss:4.626544 +step:810 train loss:4.570272 +step:811 train loss:4.631126 +step:812 train loss:4.566410 +step:813 train loss:4.640273 +step:814 train loss:4.610927 +step:815 train loss:4.724478 +step:816 train loss:4.802762 +step:817 train loss:4.686622 +step:818 train loss:4.694261 +step:819 train loss:4.631322 +step:820 train loss:4.644169 +step:821 train loss:4.642268 +step:822 train loss:4.642560 +step:823 train loss:4.618083 +step:824 train loss:4.643347 +step:825 train loss:4.570920 +step:826 train loss:4.670175 +step:827 train loss:4.617846 +step:828 train loss:4.609516 +step:829 train loss:4.575589 +step:830 train loss:4.591722 +step:831 train loss:4.612113 +step:832 train loss:4.610754 +step:833 train loss:4.598326 +step:834 train loss:4.586133 +step:835 train loss:4.582781 +step:836 train loss:4.549368 +step:837 train loss:4.616264 +step:838 train loss:4.548406 +step:839 train loss:4.586390 +step:840 train loss:4.595304 +step:841 train loss:4.582766 +step:842 train loss:4.519866 +step:843 train loss:4.565685 +step:844 train loss:4.563017 +step:845 train loss:4.636684 +step:846 train loss:4.602038 +step:847 train loss:4.676801 +step:848 train loss:4.642994 +step:849 train loss:4.680926 +step:850 train loss:4.668802 +step:851 train loss:4.673163 +step:852 train loss:4.650534 +step:853 train loss:4.718966 +step:854 train loss:4.647154 +step:855 train loss:4.613640 +step:856 train loss:4.578838 +step:857 train loss:4.646474 +step:858 train loss:4.592347 +step:859 train loss:4.563099 +step:860 train loss:4.543924 +step:861 train loss:4.549289 +step:862 train loss:4.574255 +step:863 train loss:4.549772 +step:864 train loss:4.547437 +step:865 train loss:4.492722 +step:866 train loss:4.612156 +step:867 train loss:4.587800 +step:868 train loss:4.576751 +step:869 train loss:4.482266 +step:870 train loss:4.521950 +step:871 train loss:4.441290 +step:872 train loss:4.489787 +step:873 train loss:4.441312 +step:874 train loss:4.483677 +step:875 train loss:4.487816 +step:876 train loss:4.459279 +step:877 train loss:4.446073 +step:878 train loss:4.639600 +step:879 train loss:4.453165 +step:880 train loss:4.461226 +step:881 train loss:4.512417 +step:882 train loss:4.725761 +step:883 train loss:4.624416 +step:884 train loss:4.633616 +step:885 train loss:4.674443 +step:886 train loss:4.774067 +step:887 train loss:4.705343 +step:888 train loss:4.759821 +step:889 train loss:4.698961 +step:890 train loss:4.741540 +step:891 train loss:4.599341 +step:892 train loss:4.757517 +step:893 train loss:4.702333 +step:894 train loss:4.671432 +step:895 train loss:4.630133 +step:896 train loss:4.670856 +step:897 train loss:4.561035 +step:898 train loss:4.597776 +step:899 train loss:4.555987 +step:900 train loss:4.627087 +step:901 train loss:4.537950 +step:902 train loss:4.596020 +step:903 train loss:4.548439 +step:904 train loss:4.571511 +step:905 train loss:4.491480 +step:906 train loss:4.549142 +step:907 train loss:4.560285 +step:908 train loss:4.559450 +step:909 train loss:4.481233 +step:910 train loss:4.526509 +step:911 train loss:4.467073 +step:912 train loss:4.444551 +step:913 train loss:4.470204 +step:914 train loss:4.470930 +step:915 train loss:4.426937 +step:916 train loss:4.491663 +step:917 train loss:4.590813 +step:918 train loss:4.528611 +step:919 train loss:4.475506 +step:920 train loss:4.496662 +step:921 train loss:4.447612 +step:922 train loss:4.440622 +step:923 train loss:4.440217 +step:924 train loss:4.463232 +step:925 train loss:4.457276 +step:926 train loss:4.449218 +step:927 train loss:4.422597 +step:928 train loss:4.414205 +step:929 train loss:4.431316 +step:930 train loss:4.457004 +step:931 train loss:4.465289 +step:932 train loss:4.441624 +step:933 train loss:4.550721 +step:934 train loss:4.479018 +step:935 train loss:4.471046 +step:936 train loss:4.386627 +step:937 train loss:4.388542 +step:938 train loss:4.398221 +step:939 train loss:4.436312 +step:940 train loss:4.397977 +step:941 train loss:4.477766 +step:942 train loss:4.362075 +step:943 train loss:4.446127 +step:944 train loss:4.393015 +step:945 train loss:4.308494 +step:946 train loss:4.426654 +step:947 train loss:4.452047 +step:948 train loss:4.419866 +step:949 train loss:4.410641 +step:950 train loss:4.377933 +step:951 train loss:4.450698 +step:952 train loss:4.385080 +step:953 train loss:4.456712 +step:954 train loss:4.430246 +step:955 train loss:4.407538 +step:956 train loss:4.413882 +step:957 train loss:4.456536 +step:958 train loss:4.425716 +step:959 train loss:4.369017 +step:960 train loss:4.458279 +step:961 train loss:4.387191 +step:962 train loss:4.439281 +step:963 train loss:4.461101 +step:964 train loss:4.434881 +step:965 train loss:4.450237 +step:966 train loss:4.399819 +step:967 train loss:4.437163 +step:968 train loss:4.467626 +step:969 train loss:4.395355 +step:970 train loss:4.403750 +step:971 train loss:4.448110 +step:972 train loss:4.403827 +step:973 train loss:4.385234 +step:974 train loss:4.361800 +step:975 train loss:4.458147 +step:976 train loss:4.341369 +step:977 train loss:4.358502 +step:978 train loss:4.385554 +step:979 train loss:4.325780 +step:980 train loss:4.357037 +step:981 train loss:4.313175 +step:982 train loss:4.414249 +step:983 train loss:4.443687 +step:984 train loss:4.405139 +step:985 train loss:4.404019 +step:986 train loss:4.391335 +step:987 train loss:4.412284 +step:988 train loss:4.362009 +step:989 train loss:4.302816 +step:990 train loss:4.369212 +step:991 train loss:4.383389 +step:992 train loss:4.415124 +step:993 train loss:4.438060 +step:994 train loss:4.408807 +step:995 train loss:4.484147 +step:996 train loss:4.474745 +step:997 train loss:4.366968 +step:998 train loss:4.451931 +step:999 train loss:4.351275 +step:1000 validation loss:4.325916 total_sharp:6.5505e-02 L1_sharp:1.1921e-01 L2_sharp:1.0428e-02 L3_sharp:9.4325e-03 L4_sharp:1.5098e-02 L5_sharp:1.5287e-02 L6_sharp:1.1111e-02 L7_sharp:1.6261e-02 L8_sharp:1.8456e-02 L9_sharp:1.1559e-02 L10_sharp:6.7624e-03 L11_sharp:6.2726e-03 L12_sharp:1.1320e-02 total_fnorm:1.9488e+00 total_l1_linf:1.6924e+04 total_spectral:1.9488e+00 L1_fnorm:3.7623e-01 L2_fnorm:3.6996e-01 L3_fnorm:3.8639e-01 L4_fnorm:3.6802e-01 L5_fnorm:3.4375e-01 L6_fnorm:3.9143e-01 L7_fnorm:4.1094e-01 L8_fnorm:4.3542e-01 L9_fnorm:4.6094e-01 L10_fnorm:4.7266e-01 L11_fnorm:4.6860e-01 L12_fnorm:4.3605e-01 L1_l1linf:5.1433e-01 L2_l1linf:6.7465e-01 L3_l1linf:6.4750e-01 L4_l1linf:5.8363e-01 L5_l1linf:5.3126e-01 L6_l1linf:5.6258e-01 L7_l1linf:5.9837e-01 L8_l1linf:4.8487e-01 L9_l1linf:4.9733e-01 L10_l1linf:5.0856e-01 L11_l1linf:5.2208e-01 L12_l1linf:5.2629e-01 L1_spectral:8.0160e-02 L2_spectral:9.9021e-02 L3_spectral:9.3631e-02 L4_spectral:9.7278e-02 L5_spectral:8.7860e-02 L6_spectral:9.5794e-02 L7_spectral:8.9720e-02 L8_spectral:7.6183e-02 L9_spectral:7.7028e-02 L10_spectral:7.4519e-02 L11_spectral:7.8127e-02 L12_spectral:8.3162e-02 v_norm:1.9488e+00 cos_v_-g_hvp:1.6496e-01 g_hvp_norm:4.4902e-01 cos_v_-g_t:1.8613e-01 g_t_norm:3.9931e-01 hv_norm:1.0053e+00 cos_v_hv:1.2699e-01 hg_norm:1.6603e+01 cos_g_hg:2.9332e-01 v_par:1.7342e-02 v_perp:1.9488e+00 L1_cos_v_neg_g:2.6239e-01 L1_v_norm:3.7623e-01 L2_cos_v_neg_g:2.2617e-01 L2_v_norm:3.6996e-01 L3_cos_v_neg_g:2.1906e-01 L3_v_norm:3.8639e-01 L4_cos_v_neg_g:2.0453e-01 L4_v_norm:3.6802e-01 L5_cos_v_neg_g:1.7657e-01 L5_v_norm:3.4375e-01 L6_cos_v_neg_g:1.8235e-01 L6_v_norm:3.9143e-01 L7_cos_v_neg_g:2.0910e-01 L7_v_norm:4.1094e-01 L8_cos_v_neg_g:2.3508e-01 L8_v_norm:4.3542e-01 L9_cos_v_neg_g:2.1320e-01 L9_v_norm:4.6094e-01 L10_cos_v_neg_g:2.6793e-01 L10_v_norm:4.7266e-01 L11_cos_v_neg_g:2.6625e-01 L11_v_norm:4.6860e-01 L12_cos_v_neg_g:3.0113e-01 L12_v_norm:4.3605e-01 +step:1000 train loss:4.405469 +step:1001 train loss:4.383103 +step:1002 train loss:4.314265 +step:1003 train loss:4.343328 +step:1004 train loss:4.312378 +step:1005 train loss:4.397363 +step:1006 train loss:4.376721 +step:1007 train loss:4.412499 +step:1008 train loss:4.283530 +step:1009 train loss:4.392954 +step:1010 train loss:4.365953 +step:1011 train loss:4.341087 +step:1012 train loss:4.341733 +step:1013 train loss:4.353143 +step:1014 train loss:4.317883 +step:1015 train loss:4.293833 +step:1016 train loss:4.374201 +step:1017 train loss:4.461388 +step:1018 train loss:4.423925 +step:1019 train loss:4.281172 +step:1020 train loss:4.378884 +step:1021 train loss:4.306708 +step:1022 train loss:4.318224 +step:1023 train loss:4.315979 +step:1024 train loss:4.286853 +step:1025 train loss:4.328286 +step:1026 train loss:4.327433 +step:1027 train loss:4.301611 +step:1028 train loss:4.326471 +step:1029 train loss:4.248126 +step:1030 train loss:4.361461 +step:1031 train loss:4.337797 +step:1032 train loss:4.347113 +step:1033 train loss:4.331289 +step:1034 train loss:4.340066 +step:1035 train loss:4.434536 +step:1036 train loss:4.447285 +step:1037 train loss:4.296835 +step:1038 train loss:4.308835 +step:1039 train loss:4.316215 +step:1040 train loss:4.350933 +step:1041 train loss:4.382197 +step:1042 train loss:4.339147 +step:1043 train loss:4.289680 +step:1044 train loss:4.268284 +step:1045 train loss:4.279604 +step:1046 train loss:4.296244 +step:1047 train loss:4.309990 +step:1048 train loss:4.382256 +step:1049 train loss:4.379579 +step:1050 train loss:4.371776 +step:1051 train loss:4.481766 +step:1052 train loss:4.341477 +step:1053 train loss:4.440137 +step:1054 train loss:4.349951 +step:1055 train loss:4.296814 +step:1056 train loss:4.427687 +step:1057 train loss:4.322553 +step:1058 train loss:4.358649 +step:1059 train loss:4.346599 +step:1060 train loss:4.320789 +step:1061 train loss:4.311712 +step:1062 train loss:4.313800 +step:1063 train loss:4.327840 +step:1064 train loss:4.299212 +step:1065 train loss:4.314482 +step:1066 train loss:4.281627 +step:1067 train loss:4.328933 +step:1068 train loss:4.280085 +step:1069 train loss:4.287660 +step:1070 train loss:4.237907 +step:1071 train loss:4.320483 +step:1072 train loss:4.301057 +step:1073 train loss:4.317245 +step:1074 train loss:4.246308 +step:1075 train loss:4.308877 +step:1076 train loss:4.220231 +step:1077 train loss:4.319265 +step:1078 train loss:4.274993 +step:1079 train loss:4.284653 +step:1080 train loss:4.319379 +step:1081 train loss:4.280795 +step:1082 train loss:4.299014 +step:1083 train loss:4.282470 +step:1084 train loss:4.367455 +step:1085 train loss:4.286215 +step:1086 train loss:4.325297 +step:1087 train loss:4.345382 +step:1088 train loss:4.442345 +step:1089 train loss:4.403518 +step:1090 train loss:4.449518 +step:1091 train loss:4.451184 +step:1092 train loss:4.461496 +step:1093 train loss:4.485832 +step:1094 train loss:4.545340 +step:1095 train loss:4.375553 +step:1096 train loss:4.453830 +step:1097 train loss:4.424321 +step:1098 train loss:4.359163 +step:1099 train loss:4.367651 +step:1100 train loss:4.373643 +step:1101 train loss:4.329615 +step:1102 train loss:4.386429 +step:1103 train loss:4.382868 +step:1104 train loss:4.390868 +step:1105 train loss:4.343214 +step:1106 train loss:4.349250 +step:1107 train loss:4.269841 +step:1108 train loss:4.278736 +step:1109 train loss:4.306563 +step:1110 train loss:4.292209 +step:1111 train loss:4.248337 +step:1112 train loss:4.282809 +step:1113 train loss:4.253345 +step:1114 train loss:4.258731 +step:1115 train loss:4.271761 +step:1116 train loss:4.304075 +step:1117 train loss:4.249243 +step:1118 train loss:4.233119 +step:1119 train loss:4.258346 +step:1120 train loss:4.224875 +step:1121 train loss:4.273962 +step:1122 train loss:4.287554 +step:1123 train loss:4.261892 +step:1124 train loss:4.271412 +step:1125 train loss:4.246513 +step:1126 train loss:4.240418 +step:1127 train loss:4.200779 +step:1128 train loss:4.233149 +step:1129 train loss:4.177452 +step:1130 train loss:4.274906 +step:1131 train loss:4.228391 +step:1132 train loss:4.325480 +step:1133 train loss:4.295173 +step:1134 train loss:4.230490 +step:1135 train loss:4.235493 +step:1136 train loss:4.288263 +step:1137 train loss:4.234061 +step:1138 train loss:4.325531 +step:1139 train loss:4.249488 +step:1140 train loss:4.254261 +step:1141 train loss:4.231920 +step:1142 train loss:4.226856 +step:1143 train loss:4.222108 +step:1144 train loss:4.301818 +step:1145 train loss:4.274813 +step:1146 train loss:4.320547 +step:1147 train loss:4.253668 +step:1148 train loss:4.260469 +step:1149 train loss:4.241404 +step:1150 train loss:4.266581 +step:1151 train loss:4.190428 +step:1152 train loss:4.237378 +step:1153 train loss:4.196624 +step:1154 train loss:4.159237 +step:1155 train loss:4.207816 +step:1156 train loss:4.216074 +step:1157 train loss:4.121928 +step:1158 train loss:4.216708 +step:1159 train loss:4.163526 +step:1160 train loss:4.131023 +step:1161 train loss:4.245121 +step:1162 train loss:4.238472 +step:1163 train loss:4.207480 +step:1164 train loss:4.292846 +step:1165 train loss:4.178957 +step:1166 train loss:4.127482 +step:1167 train loss:4.196520 +step:1168 train loss:4.216246 +step:1169 train loss:4.248065 +step:1170 train loss:4.218878 +step:1171 train loss:4.253898 +step:1172 train loss:4.216237 +step:1173 train loss:4.221876 +step:1174 train loss:4.251053 +step:1175 train loss:4.304081 +step:1176 train loss:4.204275 +step:1177 train loss:4.218411 +step:1178 train loss:4.227630 +step:1179 train loss:4.220896 +step:1180 train loss:4.193301 +step:1181 train loss:4.205647 +step:1182 train loss:4.163408 +step:1183 train loss:4.155059 +step:1184 train loss:4.176564 +step:1185 train loss:4.178760 +step:1186 train loss:4.232129 +step:1187 train loss:4.146645 +step:1188 train loss:4.282417 +step:1189 train loss:4.143899 +step:1190 train loss:4.175250 +step:1191 train loss:4.125180 +step:1192 train loss:4.136300 +step:1193 train loss:4.232718 +step:1194 train loss:4.211620 +step:1195 train loss:4.261362 +step:1196 train loss:4.156754 +step:1197 train loss:4.159550 +step:1198 train loss:4.125776 +step:1199 train loss:4.141910 +step:1200 train loss:4.171557 +step:1201 train loss:4.090240 +step:1202 train loss:4.247208 +step:1203 train loss:4.089843 +step:1204 train loss:4.149251 +step:1205 train loss:4.277440 +step:1206 train loss:4.179482 +step:1207 train loss:4.150040 +step:1208 train loss:4.167486 +step:1209 train loss:4.205597 +step:1210 train loss:4.142388 +step:1211 train loss:4.185704 +step:1212 train loss:4.144714 +step:1213 train loss:4.110622 +step:1214 train loss:4.173630 +step:1215 train loss:4.212654 +step:1216 train loss:4.164377 +step:1217 train loss:4.169216 +step:1218 train loss:4.189895 +step:1219 train loss:4.128487 +step:1220 train loss:4.260730 +step:1221 train loss:4.196537 +step:1222 train loss:4.141709 +step:1223 train loss:4.078412 +step:1224 train loss:4.139072 +step:1225 train loss:4.183412 +step:1226 train loss:4.113105 +step:1227 train loss:4.143538 +step:1228 train loss:4.179609 +step:1229 train loss:4.113895 +step:1230 train loss:4.085181 +step:1231 train loss:4.150763 +step:1232 train loss:4.114661 +step:1233 train loss:4.118574 +step:1234 train loss:4.135784 +step:1235 train loss:4.171851 +step:1236 train loss:4.105467 +step:1237 train loss:4.112210 +step:1238 train loss:4.112031 +step:1239 train loss:4.162908 +step:1240 train loss:4.108522 +step:1241 train loss:4.143559 +step:1242 train loss:4.110918 +step:1243 train loss:4.105267 +step:1244 train loss:4.140327 +step:1245 train loss:4.155245 +step:1246 train loss:4.122447 +step:1247 train loss:4.077958 +step:1248 train loss:4.109295 +step:1249 train loss:4.174549 +step:1250 validation loss:4.125170 +step:1250 train loss:4.107793 +step:1251 train loss:4.162678 +step:1252 train loss:4.163891 +step:1253 train loss:4.136793 +step:1254 train loss:4.160919 +step:1255 train loss:4.111414 +step:1256 train loss:4.133151 +step:1257 train loss:4.161909 +step:1258 train loss:4.075543 +step:1259 train loss:4.149635 +step:1260 train loss:4.124763 +step:1261 train loss:4.257712 +step:1262 train loss:4.138901 +step:1263 train loss:4.268990 +step:1264 train loss:4.199665 +step:1265 train loss:4.235467 +step:1266 train loss:4.192769 +step:1267 train loss:4.164885 +step:1268 train loss:4.187557 +step:1269 train loss:4.219934 +step:1270 train loss:4.045711 +step:1271 train loss:4.151611 +step:1272 train loss:4.140923 +step:1273 train loss:4.141335 +step:1274 train loss:4.162687 +step:1275 train loss:4.224277 +step:1276 train loss:4.071934 +step:1277 train loss:4.162768 +step:1278 train loss:4.112747 +step:1279 train loss:4.087231 +step:1280 train loss:4.175987 +step:1281 train loss:4.184150 +step:1282 train loss:4.132568 +step:1283 train loss:4.342491 +step:1284 train loss:4.232499 +step:1285 train loss:4.184573 +step:1286 train loss:4.115044 +step:1287 train loss:4.150224 +step:1288 train loss:4.152606 +step:1289 train loss:4.158796 +step:1290 train loss:4.150542 +step:1291 train loss:4.206443 +step:1292 train loss:4.090941 +step:1293 train loss:4.151237 +step:1294 train loss:4.128298 +step:1295 train loss:4.102115 +step:1296 train loss:4.096647 +step:1297 train loss:4.082520 +step:1298 train loss:4.129968 +step:1299 train loss:4.156381 +step:1300 train loss:4.148144 +step:1301 train loss:4.113463 +step:1302 train loss:4.113152 +step:1303 train loss:4.125109 +step:1304 train loss:4.093674 +step:1305 train loss:4.120461 +step:1306 train loss:4.193718 +step:1307 train loss:4.051691 +step:1308 train loss:4.124950 +step:1309 train loss:4.132740 +step:1310 train loss:4.180083 +step:1311 train loss:4.055681 +step:1312 train loss:4.128115 +step:1313 train loss:4.106307 +step:1314 train loss:4.032065 +step:1315 train loss:4.083356 +step:1316 train loss:4.115550 +step:1317 train loss:4.094981 +step:1318 train loss:4.173991 +step:1319 train loss:4.186346 +step:1320 train loss:4.139329 +step:1321 train loss:4.107998 +step:1322 train loss:4.177742 +step:1323 train loss:4.063771 +step:1324 train loss:4.132188 +step:1325 train loss:4.128519 +step:1326 train loss:4.112266 +step:1327 train loss:4.066744 +step:1328 train loss:4.100260 +step:1329 train loss:4.130064 +step:1330 train loss:4.079837 +step:1331 train loss:4.048419 +step:1332 train loss:4.128818 +step:1333 train loss:4.093408 +step:1334 train loss:3.994654 +step:1335 train loss:4.062619 +step:1336 train loss:4.091634 +step:1337 train loss:4.167462 +step:1338 train loss:4.103735 +step:1339 train loss:4.071825 +step:1340 train loss:4.112706 +step:1341 train loss:4.094983 +step:1342 train loss:4.108160 +step:1343 train loss:4.081173 +step:1344 train loss:4.121605 +step:1345 train loss:4.122012 +step:1346 train loss:4.042097 +step:1347 train loss:4.130930 +step:1348 train loss:4.257932 +step:1349 train loss:4.043626 +step:1350 train loss:4.019846 +step:1351 train loss:4.167756 +step:1352 train loss:4.130280 +step:1353 train loss:4.123973 +step:1354 train loss:4.052307 +step:1355 train loss:4.114956 +step:1356 train loss:4.044491 +step:1357 train loss:4.124969 +step:1358 train loss:4.122887 +step:1359 train loss:4.063698 +step:1360 train loss:4.084028 +step:1361 train loss:4.123182 +step:1362 train loss:4.157379 +step:1363 train loss:4.111651 +step:1364 train loss:4.092448 +step:1365 train loss:4.028694 +step:1366 train loss:4.385719 +step:1367 train loss:4.182469 +step:1368 train loss:4.150119 +step:1369 train loss:4.174720 +step:1370 train loss:4.159904 +step:1371 train loss:4.156315 +step:1372 train loss:4.109499 +step:1373 train loss:4.082774 +step:1374 train loss:4.146537 +step:1375 train loss:4.130158 +step:1376 train loss:4.029180 +step:1377 train loss:4.161077 +step:1378 train loss:4.061495 +step:1379 train loss:4.093263 +step:1380 train loss:4.138330 +step:1381 train loss:4.061220 +step:1382 train loss:4.109153 +step:1383 train loss:4.048715 +step:1384 train loss:4.147813 +step:1385 train loss:4.146404 +step:1386 train loss:4.166269 +step:1387 train loss:4.040719 +step:1388 train loss:4.059181 +step:1389 train loss:4.135799 +step:1390 train loss:4.095940 +step:1391 train loss:4.063236 +step:1392 train loss:4.116807 +step:1393 train loss:4.164078 +step:1394 train loss:4.028217 +step:1395 train loss:4.120011 +step:1396 train loss:4.041299 +step:1397 train loss:4.078019 +step:1398 train loss:4.079349 +step:1399 train loss:4.055772 +step:1400 train loss:4.121866 +step:1401 train loss:4.018995 +step:1402 train loss:4.076571 +step:1403 train loss:4.053454 +step:1404 train loss:4.040342 +step:1405 train loss:4.099353 +step:1406 train loss:4.041539 +step:1407 train loss:4.021818 +step:1408 train loss:4.049635 +step:1409 train loss:4.029102 +step:1410 train loss:4.101557 +step:1411 train loss:4.075132 +step:1412 train loss:4.101326 +step:1413 train loss:4.078221 +step:1414 train loss:4.075980 +step:1415 train loss:4.039411 +step:1416 train loss:4.046824 +step:1417 train loss:4.199940 +step:1418 train loss:4.051047 +step:1419 train loss:4.028490 +step:1420 train loss:4.056699 +step:1421 train loss:4.071503 +step:1422 train loss:4.065286 +step:1423 train loss:4.032498 +step:1424 train loss:4.071196 +step:1425 train loss:4.062903 +step:1426 train loss:4.051088 +step:1427 train loss:4.131831 +step:1428 train loss:4.093263 +step:1429 train loss:4.048480 +step:1430 train loss:4.109305 +step:1431 train loss:4.176229 +step:1432 train loss:4.043187 +step:1433 train loss:4.137704 +step:1434 train loss:4.036344 +step:1435 train loss:4.106583 +step:1436 train loss:4.058204 +step:1437 train loss:4.034600 +step:1438 train loss:4.189953 +step:1439 train loss:4.015385 +step:1440 train loss:4.031831 +step:1441 train loss:4.064512 +step:1442 train loss:3.953870 +step:1443 train loss:4.105418 +step:1444 train loss:4.190777 +step:1445 train loss:4.193644 +step:1446 train loss:4.151257 +step:1447 train loss:4.181537 +step:1448 train loss:4.201953 +step:1449 train loss:4.148695 +step:1450 train loss:4.206832 +step:1451 train loss:4.122735 +step:1452 train loss:4.164500 +step:1453 train loss:4.151578 +step:1454 train loss:4.064724 +step:1455 train loss:4.124853 +step:1456 train loss:4.033124 +step:1457 train loss:4.036281 +step:1458 train loss:4.084930 +step:1459 train loss:4.022797 +step:1460 train loss:4.064351 +step:1461 train loss:4.074939 +step:1462 train loss:4.021343 +step:1463 train loss:4.110166 +step:1464 train loss:4.058531 +step:1465 train loss:4.067438 +step:1466 train loss:4.065661 +step:1467 train loss:4.026659 +step:1468 train loss:4.072526 +step:1469 train loss:4.074968 +step:1470 train loss:4.015821 +step:1471 train loss:4.136553 +step:1472 train loss:4.020217 +step:1473 train loss:4.050090 +step:1474 train loss:4.041836 +step:1475 train loss:4.027083 +step:1476 train loss:4.041570 +step:1477 train loss:4.021933 +step:1478 train loss:4.071215 +step:1479 train loss:3.992185 +step:1480 train loss:4.041518 +step:1481 train loss:4.071550 +step:1482 train loss:4.010582 +step:1483 train loss:4.019197 +step:1484 train loss:4.069636 +step:1485 train loss:4.071249 +step:1486 train loss:4.062792 +step:1487 train loss:4.108794 +step:1488 train loss:4.061078 +step:1489 train loss:4.011064 +step:1490 train loss:4.045055 +step:1491 train loss:3.938149 +step:1492 train loss:4.051600 +step:1493 train loss:4.064589 +step:1494 train loss:3.953388 +step:1495 train loss:3.978008 +step:1496 train loss:4.061042 +step:1497 train loss:4.032832 +step:1498 train loss:4.015355 +step:1499 train loss:4.042627 +step:1500 validation loss:3.977432 total_sharp:2.3443e-02 L1_sharp:3.9335e-02 L2_sharp:3.0331e-03 L3_sharp:3.7161e-03 L4_sharp:6.5850e-03 L5_sharp:8.0321e-03 L6_sharp:4.3463e-03 L7_sharp:5.6661e-03 L8_sharp:6.5723e-03 L9_sharp:5.0571e-03 L10_sharp:3.3175e-03 L11_sharp:3.3446e-03 L12_sharp:7.5717e-03 total_fnorm:1.9233e+00 total_l1_linf:1.6578e+04 total_spectral:1.9233e+00 L1_fnorm:3.4112e-01 L2_fnorm:3.6471e-01 L3_fnorm:3.7480e-01 L4_fnorm:3.5283e-01 L5_fnorm:3.0875e-01 L6_fnorm:3.7347e-01 L7_fnorm:4.0472e-01 L8_fnorm:4.4056e-01 L9_fnorm:4.6501e-01 L10_fnorm:4.7396e-01 L11_fnorm:4.7533e-01 L12_fnorm:4.6221e-01 L1_l1linf:5.2338e-01 L2_l1linf:5.6962e-01 L3_l1linf:5.6825e-01 L4_l1linf:6.5987e-01 L5_l1linf:4.4063e-01 L6_l1linf:5.7296e-01 L7_l1linf:5.2838e-01 L8_l1linf:4.7472e-01 L9_l1linf:4.8961e-01 L10_l1linf:4.9102e-01 L11_l1linf:5.1161e-01 L12_l1linf:5.2648e-01 L1_spectral:6.5772e-02 L2_spectral:7.8340e-02 L3_spectral:7.8088e-02 L4_spectral:8.3485e-02 L5_spectral:6.3334e-02 L6_spectral:7.4307e-02 L7_spectral:6.8631e-02 L8_spectral:6.3268e-02 L9_spectral:6.6131e-02 L10_spectral:6.5186e-02 L11_spectral:6.7772e-02 L12_spectral:8.7346e-02 v_norm:1.9233e+00 cos_v_-g_hvp:1.5492e-01 g_hvp_norm:3.2098e-01 cos_v_-g_t:1.8538e-01 g_t_norm:2.6933e-01 hv_norm:4.7919e-01 cos_v_hv:9.4090e-02 hg_norm:6.6391e+00 cos_g_hg:2.9665e-01 v_par:2.2914e-02 v_perp:1.9231e+00 L1_cos_v_neg_g:2.6154e-01 L1_v_norm:3.4112e-01 L2_cos_v_neg_g:1.6334e-01 L2_v_norm:3.6471e-01 L3_cos_v_neg_g:1.5656e-01 L3_v_norm:3.7480e-01 L4_cos_v_neg_g:1.6785e-01 L4_v_norm:3.5283e-01 L5_cos_v_neg_g:1.2665e-01 L5_v_norm:3.0875e-01 L6_cos_v_neg_g:1.4612e-01 L6_v_norm:3.7347e-01 L7_cos_v_neg_g:1.8076e-01 L7_v_norm:4.0472e-01 L8_cos_v_neg_g:1.9724e-01 L8_v_norm:4.4056e-01 L9_cos_v_neg_g:1.9253e-01 L9_v_norm:4.6501e-01 L10_cos_v_neg_g:2.2868e-01 L10_v_norm:4.7396e-01 L11_cos_v_neg_g:2.3513e-01 L11_v_norm:4.7533e-01 L12_cos_v_neg_g:2.8735e-01 L12_v_norm:4.6221e-01 +step:1500 train loss:4.006004 +step:1501 train loss:4.036758 +step:1502 train loss:3.987093 +step:1503 train loss:4.124811 +step:1504 train loss:4.057302 +step:1505 train loss:4.054475 +step:1506 train loss:4.030760 +step:1507 train loss:4.110936 +step:1508 train loss:4.042574 +step:1509 train loss:4.070294 +step:1510 train loss:4.041777 +step:1511 train loss:4.011537 +step:1512 train loss:3.983303 +step:1513 train loss:4.013465 +step:1514 train loss:4.070337 +step:1515 train loss:4.056033 +step:1516 train loss:3.979057 +step:1517 train loss:3.982043 +step:1518 train loss:3.997520 +step:1519 train loss:4.033577 +step:1520 train loss:4.026779 +step:1521 train loss:3.983669 +step:1522 train loss:4.032100 +step:1523 train loss:3.995683 +step:1524 train loss:3.975759 +step:1525 train loss:4.076441 +step:1526 train loss:4.045836 +step:1527 train loss:4.106213 +step:1528 train loss:4.106568 +step:1529 train loss:4.060649 +step:1530 train loss:4.047601 +step:1531 train loss:4.161231 +step:1532 train loss:4.182824 +step:1533 train loss:4.244190 +step:1534 train loss:4.177976 +step:1535 train loss:4.182621 +step:1536 train loss:4.113600 +step:1537 train loss:4.220630 +step:1538 train loss:4.071525 +step:1539 train loss:4.173637 +step:1540 train loss:4.132188 +step:1541 train loss:4.147896 +step:1542 train loss:4.055629 +step:1543 train loss:4.193712 +step:1544 train loss:4.201702 +step:1545 train loss:4.032428 +step:1546 train loss:4.099865 +step:1547 train loss:4.079118 +step:1548 train loss:4.034834 +step:1549 train loss:4.035259 +step:1550 train loss:3.998631 +step:1551 train loss:4.048478 +step:1552 train loss:4.088521 +step:1553 train loss:4.172823 +step:1554 train loss:4.039567 +step:1555 train loss:3.975766 +step:1556 train loss:4.011262 +step:1557 train loss:4.063696 +step:1558 train loss:4.053935 +step:1559 train loss:4.165483 +step:1560 train loss:4.093562 +step:1561 train loss:4.040383 +step:1562 train loss:4.081625 +step:1563 train loss:4.035516 +step:1564 train loss:3.999581 +step:1565 train loss:4.078452 +step:1566 train loss:4.000726 +step:1567 train loss:4.005221 +step:1568 train loss:4.287070 +step:1569 train loss:4.065474 +step:1570 train loss:4.236117 +step:1571 train loss:4.043157 +step:1572 train loss:4.028034 +step:1573 train loss:4.075545 +step:1574 train loss:3.991407 +step:1575 train loss:4.076448 +step:1576 train loss:3.994479 +step:1577 train loss:4.056335 +step:1578 train loss:4.127758 +step:1579 train loss:4.076045 +step:1580 train loss:4.061715 +step:1581 train loss:4.043264 +step:1582 train loss:4.003675 +step:1583 train loss:4.110726 +step:1584 train loss:4.095713 +step:1585 train loss:3.985598 +step:1586 train loss:4.057157 +step:1587 train loss:4.069798 +step:1588 train loss:3.992923 +step:1589 train loss:4.016708 +step:1590 train loss:4.066312 +step:1591 train loss:4.017684 +step:1592 train loss:4.001977 +step:1593 train loss:4.087400 +step:1594 train loss:4.000637 +step:1595 train loss:3.987398 +step:1596 train loss:3.984849 +step:1597 train loss:3.991085 +step:1598 train loss:4.006760 +step:1599 train loss:3.950663 +step:1600 train loss:4.016788 +step:1601 train loss:4.021024 +step:1602 train loss:4.029495 +step:1603 train loss:3.958024 +step:1604 train loss:3.990497 +step:1605 train loss:4.019953 +step:1606 train loss:3.938840 +step:1607 train loss:3.940255 +step:1608 train loss:3.996486 +step:1609 train loss:4.020761 +step:1610 train loss:4.009410 +step:1611 train loss:3.988868 +step:1612 train loss:3.960176 +step:1613 train loss:4.040052 +step:1614 train loss:4.007203 +step:1615 train loss:4.005671 +step:1616 train loss:3.970388 +step:1617 train loss:4.022801 +step:1618 train loss:4.044428 +step:1619 train loss:4.004464 +step:1620 train loss:3.995707 +step:1621 train loss:4.001220 +step:1622 train loss:3.984928 +step:1623 train loss:4.017017 +step:1624 train loss:4.024188 +step:1625 train loss:4.075988 +step:1626 train loss:4.034352 +step:1627 train loss:4.074355 +step:1628 train loss:4.044392 +step:1629 train loss:3.967520 +step:1630 train loss:3.970987 +step:1631 train loss:4.098352 +step:1632 train loss:3.979418 +step:1633 train loss:4.060083 +step:1634 train loss:4.054980 +step:1635 train loss:3.956268 +step:1636 train loss:4.001574 +step:1637 train loss:3.958913 +step:1638 train loss:4.016793 +step:1639 train loss:4.094298 +step:1640 train loss:3.983152 +step:1641 train loss:4.060009 +step:1642 train loss:3.981957 +step:1643 train loss:4.049812 +step:1644 train loss:3.971065 +step:1645 train loss:4.041951 +step:1646 train loss:4.104157 +step:1647 train loss:3.996078 +step:1648 train loss:4.027244 +step:1649 train loss:4.002259 +step:1650 train loss:3.973032 +step:1651 train loss:3.964790 +step:1652 train loss:3.997493 +step:1653 train loss:3.990285 +step:1654 train loss:4.074933 +step:1655 train loss:4.052404 +step:1656 train loss:3.925625 +step:1657 train loss:4.015386 +step:1658 train loss:4.011752 +step:1659 train loss:3.973532 +step:1660 train loss:3.950249 +step:1661 train loss:3.997036 +step:1662 train loss:3.992013 +step:1663 train loss:3.984031 +step:1664 train loss:3.977922 +step:1665 train loss:4.007599 +step:1666 train loss:4.047226 +step:1667 train loss:4.005720 +step:1668 train loss:4.099792 +step:1669 train loss:3.980783 +step:1670 train loss:3.966655 +step:1671 train loss:3.942691 +step:1672 train loss:3.949094 +step:1673 train loss:3.891453 +step:1674 train loss:3.923469 +step:1675 train loss:3.952452 +step:1676 train loss:3.960301 +step:1677 train loss:4.037203 +step:1678 train loss:3.976888 +step:1679 train loss:3.990140 +step:1680 train loss:4.007972 +step:1681 train loss:4.006398 +step:1682 train loss:4.010591 +step:1683 train loss:3.939182 +step:1684 train loss:4.041826 +step:1685 train loss:4.026499 +step:1686 train loss:3.950836 +step:1687 train loss:3.927528 +step:1688 train loss:4.006274 +step:1689 train loss:4.001221 +step:1690 train loss:4.069072 +step:1691 train loss:3.989383 +step:1692 train loss:4.051680 +step:1693 train loss:3.954750 +step:1694 train loss:3.935442 +step:1695 train loss:3.982823 +step:1696 train loss:4.022387 +step:1697 train loss:4.105833 +step:1698 train loss:3.993319 +step:1699 train loss:3.910499 +step:1700 train loss:3.992617 +step:1701 train loss:3.945824 +step:1702 train loss:3.919173 +step:1703 train loss:3.974900 +step:1704 train loss:3.974233 +step:1705 train loss:3.932274 +step:1706 train loss:3.947296 +step:1707 train loss:4.012592 +step:1708 train loss:3.994307 +step:1709 train loss:3.971862 +step:1710 train loss:3.966905 +step:1711 train loss:3.985888 +step:1712 train loss:3.992928 +step:1713 train loss:3.936175 +step:1714 train loss:3.967766 +step:1715 train loss:3.939587 +step:1716 train loss:3.988470 +step:1717 train loss:3.918730 +step:1718 train loss:4.021564 +step:1719 train loss:3.973782 +step:1720 train loss:3.948980 +step:1721 train loss:3.965275 +step:1722 train loss:3.972824 +step:1723 train loss:3.964635 +step:1724 train loss:4.028574 +step:1725 train loss:3.974496 +step:1726 train loss:3.966834 +step:1727 train loss:4.029122 +step:1728 train loss:3.897422 +step:1729 train loss:3.915944 +step:1730 train loss:3.924644 +step:1731 train loss:3.971929 +step:1732 train loss:3.949969 +step:1733 train loss:4.024845 +step:1734 train loss:3.924655 +step:1735 train loss:4.018728 +step:1736 train loss:3.911709 +step:1737 train loss:3.982270 +step:1738 train loss:3.895421 +step:1739 train loss:4.037077 +step:1740 train loss:3.910578 +step:1741 train loss:3.909836 +step:1742 train loss:3.962820 +step:1743 train loss:4.058383 +step:1744 train loss:3.942125 +step:1745 train loss:3.916935 +step:1746 train loss:4.126302 +step:1747 train loss:3.930806 +step:1748 train loss:3.950826 +step:1749 train loss:3.921762 +step:1750 validation loss:3.910049 +step:1750 train loss:3.964743 +step:1751 train loss:3.894260 +step:1752 train loss:3.977652 +step:1753 train loss:3.965278 +step:1754 train loss:3.964069 +step:1755 train loss:3.996201 +step:1756 train loss:3.929539 +step:1757 train loss:3.937701 +step:1758 train loss:3.957068 +step:1759 train loss:3.936774 +step:1760 train loss:3.947675 +step:1761 train loss:3.919498 +step:1762 train loss:3.975404 +step:1763 train loss:3.896757 +step:1764 train loss:3.985458 +step:1765 train loss:3.917776 +step:1766 train loss:3.965893 +step:1767 train loss:3.916158 +step:1768 train loss:3.932494 +step:1769 train loss:3.900461 +step:1770 train loss:3.947693 +step:1771 train loss:3.936172 +step:1772 train loss:3.983973 +step:1773 train loss:3.934803 +step:1774 train loss:3.993285 +step:1775 train loss:3.945088 +step:1776 train loss:3.936039 +step:1777 train loss:3.914039 +step:1778 train loss:3.889014 +step:1779 train loss:3.968861 +step:1780 train loss:3.907354 +step:1781 train loss:3.960371 +step:1782 train loss:3.979206 +step:1783 train loss:3.899958 +step:1784 train loss:3.920262 +step:1785 train loss:3.990290 +step:1786 train loss:3.954719 +step:1787 train loss:3.925856 +step:1788 train loss:3.908315 +step:1789 train loss:3.907474 +step:1790 train loss:3.905639 +step:1791 train loss:3.964710 +step:1792 train loss:3.941693 +step:1793 train loss:3.933440 +step:1794 train loss:4.047131 +step:1795 train loss:3.866661 +step:1796 train loss:3.963588 +step:1797 train loss:3.936448 +step:1798 train loss:3.981414 +step:1799 train loss:3.892676 +step:1800 train loss:3.925099 +step:1801 train loss:3.922693 +step:1802 train loss:3.973046 +step:1803 train loss:3.917339 +step:1804 train loss:3.922654 +step:1805 train loss:3.979070 +step:1806 train loss:3.908486 +step:1807 train loss:3.916259 +step:1808 train loss:3.981143 +step:1809 train loss:3.967336 +step:1810 train loss:3.875906 +step:1811 train loss:3.953584 +step:1812 train loss:3.899929 +step:1813 train loss:3.899725 +step:1814 train loss:3.980843 +step:1815 train loss:3.919332 +step:1816 train loss:3.909989 +step:1817 train loss:3.883615 +step:1818 train loss:3.919011 +step:1819 train loss:3.951124 +step:1820 train loss:3.888230 +step:1821 train loss:3.946138 +step:1822 train loss:3.890281 +step:1823 train loss:3.861406 +step:1824 train loss:4.011422 +step:1825 train loss:3.909467 +step:1826 train loss:3.918681 +step:1827 train loss:3.936901 +step:1828 train loss:3.881080 +step:1829 train loss:3.969742 +step:1830 train loss:3.935885 +step:1831 train loss:3.893204 +step:1832 train loss:3.905631 +step:1833 train loss:3.818565 +step:1834 train loss:3.873487 +step:1835 train loss:3.855063 +step:1836 train loss:3.911608 +step:1837 train loss:3.959225 +step:1838 train loss:3.937493 +step:1839 train loss:3.940623 +step:1840 train loss:3.920252 +step:1841 train loss:4.015970 +step:1842 train loss:3.948605 +step:1843 train loss:4.015529 +step:1844 train loss:4.049440 +step:1845 train loss:4.410272 +step:1846 train loss:4.037001 +step:1847 train loss:4.047594 +step:1848 train loss:4.143451 +step:1849 train loss:4.055569 +step:1850 train loss:4.211548 +step:1851 train loss:4.019994 +step:1852 train loss:4.040771 +step:1853 train loss:4.066944 +step:1854 train loss:4.073878 +step:1855 train loss:3.983349 +step:1856 train loss:4.041097 +step:1857 train loss:3.972065 +step:1858 train loss:3.973722 +step:1859 train loss:3.993143 +step:1860 train loss:4.126665 +step:1861 train loss:4.007692 +step:1862 train loss:3.990917 +step:1863 train loss:4.061035 +step:1864 train loss:3.950508 +step:1865 train loss:3.947561 +step:1866 train loss:3.912819 +step:1867 train loss:4.070521 +step:1868 train loss:3.900165 +step:1869 train loss:3.960626 +step:1870 train loss:3.987355 +step:1871 train loss:3.894347 +step:1872 train loss:3.887855 +step:1873 train loss:3.938969 +step:1874 train loss:3.890819 +step:1875 train loss:3.933018 +step:1876 train loss:3.900770 +step:1877 train loss:3.898993 +step:1878 train loss:3.941384 +step:1879 train loss:3.895528 +step:1880 train loss:3.901350 +step:1881 train loss:3.911478 +step:1882 train loss:3.847935 +step:1883 train loss:3.888276 +step:1884 train loss:3.956155 +step:1885 train loss:3.962508 +step:1886 train loss:3.915476 +step:1887 train loss:3.937896 +step:1888 train loss:3.855160 +step:1889 train loss:3.884322 +step:1890 train loss:3.854632 +step:1891 train loss:3.935310 +step:1892 train loss:3.931485 +step:1893 train loss:3.873761 +step:1894 train loss:3.898063 +step:1895 train loss:3.913648 +step:1896 train loss:3.924005 +step:1897 train loss:3.894508 +step:1898 train loss:3.945302 +step:1899 train loss:3.955441 +step:1900 train loss:3.895367 +step:1901 train loss:3.895602 +step:1902 train loss:3.948404 +step:1903 train loss:3.860864 +step:1904 train loss:3.866129 +step:1905 train loss:3.894299 +step:1906 train loss:3.884706 +step:1907 train loss:3.888146 +step:1908 train loss:3.880095 +step:1909 train loss:3.929586 +step:1910 train loss:3.865225 +step:1911 train loss:3.907028 +step:1912 train loss:3.966911 +step:1913 train loss:3.926730 +step:1914 train loss:3.920460 +step:1915 train loss:3.974462 +step:1916 train loss:3.925207 +step:1917 train loss:3.913907 +step:1918 train loss:3.904377 +step:1919 train loss:3.843787 +step:1920 train loss:3.853350 +step:1921 train loss:3.891495 +step:1922 train loss:3.961498 +step:1923 train loss:3.851886 +step:1924 train loss:3.915818 +step:1925 train loss:3.887531 +step:1926 train loss:3.912124 +step:1927 train loss:3.932468 +step:1928 train loss:3.901845 +step:1929 train loss:3.970265 +step:1930 train loss:3.869735 +step:1931 train loss:3.978892 +step:1932 train loss:3.946013 +step:1933 train loss:3.922117 +step:1934 train loss:3.832379 +step:1935 train loss:3.902829 +step:1936 train loss:3.933602 +step:1937 train loss:3.941402 +step:1938 train loss:3.868223 +step:1939 train loss:3.910560 +step:1940 train loss:3.918428 +step:1941 train loss:3.901706 +step:1942 train loss:3.875154 +step:1943 train loss:3.877531 +step:1944 train loss:3.927212 +step:1945 train loss:3.825296 +step:1946 train loss:3.914700 +step:1947 train loss:3.924181 +step:1948 train loss:3.894909 +step:1949 train loss:3.859990 +step:1950 train loss:3.860937 +step:1951 train loss:3.870344 +step:1952 train loss:3.856172 +step:1953 train loss:3.910159 +step:1954 train loss:3.881311 +step:1955 train loss:3.889431 +step:1956 train loss:3.899162 +step:1957 train loss:3.939505 +step:1958 train loss:3.936550 +step:1959 train loss:3.909713 +step:1960 train loss:3.906580 +step:1961 train loss:3.881483 +step:1962 train loss:3.948634 +step:1963 train loss:3.865208 +step:1964 train loss:4.019991 +step:1965 train loss:3.888126 +step:1966 train loss:3.891028 +step:1967 train loss:3.875998 +step:1968 train loss:3.923069 +step:1969 train loss:3.859465 +step:1970 train loss:3.960749 +step:1971 train loss:3.875801 +step:1972 train loss:3.930457 +step:1973 train loss:3.910400 +step:1974 train loss:3.844645 +step:1975 train loss:3.927049 +step:1976 train loss:3.850009 +step:1977 train loss:4.024310 +step:1978 train loss:3.923965 +step:1979 train loss:3.915747 +step:1980 train loss:3.899903 +step:1981 train loss:3.889068 +step:1982 train loss:3.851039 +step:1983 train loss:3.911855 +step:1984 train loss:3.911282 +step:1985 train loss:3.931589 +step:1986 train loss:3.955312 +step:1987 train loss:3.917433 +step:1988 train loss:3.842695 +step:1989 train loss:3.929558 +step:1990 train loss:3.891229 +step:1991 train loss:3.892570 +step:1992 train loss:3.894921 +step:1993 train loss:3.850858 +step:1994 train loss:3.863675 +step:1995 train loss:3.830091 +step:1996 train loss:3.843194 +step:1997 train loss:3.863476 +step:1998 train loss:3.862688 +step:1999 train loss:3.863094 +step:2000 validation loss:3.829129 total_sharp:1.4692e-02 L1_sharp:3.8374e-02 L2_sharp:6.0491e-03 L3_sharp:9.0052e-03 L4_sharp:2.6515e-03 L5_sharp:3.2267e-03 L6_sharp:1.8508e-03 L7_sharp:2.6417e-03 L8_sharp:2.8613e-03 L9_sharp:2.2196e-03 L10_sharp:1.6740e-03 L11_sharp:1.6666e-03 L12_sharp:2.7520e-03 total_fnorm:2.0014e+00 total_l1_linf:1.7442e+04 total_spectral:2.0014e+00 L1_fnorm:3.6611e-01 L2_fnorm:4.1461e-01 L3_fnorm:4.1755e-01 L4_fnorm:4.0861e-01 L5_fnorm:3.6867e-01 L6_fnorm:4.2717e-01 L7_fnorm:4.4391e-01 L8_fnorm:4.5855e-01 L9_fnorm:4.7086e-01 L10_fnorm:4.8588e-01 L11_fnorm:4.8697e-01 L12_fnorm:4.7196e-01 L1_l1linf:4.7075e-01 L2_l1linf:5.0654e-01 L3_l1linf:5.4979e-01 L4_l1linf:5.2003e-01 L5_l1linf:3.8993e-01 L6_l1linf:4.6822e-01 L7_l1linf:4.6694e-01 L8_l1linf:4.8435e-01 L9_l1linf:5.1039e-01 L10_l1linf:5.0870e-01 L11_l1linf:5.2811e-01 L12_l1linf:4.9460e-01 L1_spectral:6.1410e-02 L2_spectral:9.1882e-02 L3_spectral:7.4968e-02 L4_spectral:7.0064e-02 L5_spectral:6.1103e-02 L6_spectral:6.2530e-02 L7_spectral:5.9535e-02 L8_spectral:5.5436e-02 L9_spectral:5.3002e-02 L10_spectral:5.6082e-02 L11_spectral:5.8527e-02 L12_spectral:7.8083e-02 v_norm:2.0014e+00 cos_v_-g_hvp:1.1681e-01 g_hvp_norm:2.9994e-01 cos_v_-g_t:1.4339e-01 g_t_norm:2.4519e-01 hv_norm:5.5934e-01 cos_v_hv:5.2571e-02 hg_norm:3.4769e+00 cos_g_hg:5.0107e-01 v_par:1.6341e-02 v_perp:2.0014e+00 L1_cos_v_neg_g:2.1101e-01 L1_v_norm:3.6611e-01 L2_cos_v_neg_g:1.0468e-01 L2_v_norm:4.1461e-01 L3_cos_v_neg_g:1.0151e-01 L3_v_norm:4.1755e-01 L4_cos_v_neg_g:9.7723e-02 L4_v_norm:4.0861e-01 L5_cos_v_neg_g:8.3501e-02 L5_v_norm:3.6867e-01 L6_cos_v_neg_g:1.0130e-01 L6_v_norm:4.2717e-01 L7_cos_v_neg_g:1.2282e-01 L7_v_norm:4.4391e-01 L8_cos_v_neg_g:1.3014e-01 L8_v_norm:4.5855e-01 L9_cos_v_neg_g:1.3196e-01 L9_v_norm:4.7086e-01 L10_cos_v_neg_g:1.5842e-01 L10_v_norm:4.8588e-01 L11_cos_v_neg_g:1.6752e-01 L11_v_norm:4.8697e-01 L12_cos_v_neg_g:2.0192e-01 L12_v_norm:4.7196e-01 +step:2000 train loss:3.877015 +step:2001 train loss:3.891573 +step:2002 train loss:3.870258 +step:2003 train loss:3.851475 +step:2004 train loss:3.900670 +step:2005 train loss:3.903991 +step:2006 train loss:3.876157 +step:2007 train loss:3.881958 +step:2008 train loss:3.892934 +step:2009 train loss:3.861331 +step:2010 train loss:3.839243 +step:2011 train loss:3.879532 +step:2012 train loss:3.994056 +step:2013 train loss:3.869775 +step:2014 train loss:3.888226 +step:2015 train loss:3.895421 +step:2016 train loss:3.873073 +step:2017 train loss:3.928850 +step:2018 train loss:3.861970 +step:2019 train loss:3.907933 +step:2020 train loss:3.908272 +step:2021 train loss:3.888361 +step:2022 train loss:3.923883 +step:2023 train loss:3.837994 +step:2024 train loss:3.905384 +step:2025 train loss:3.899525 +step:2026 train loss:3.977986 +step:2027 train loss:3.861567 +step:2028 train loss:3.880934 +step:2029 train loss:3.862676 +step:2030 train loss:3.901814 +step:2031 train loss:3.922128 +step:2032 train loss:3.897265 +step:2033 train loss:3.922554 +step:2034 train loss:3.875493 +step:2035 train loss:3.835436 +step:2036 train loss:3.913148 +step:2037 train loss:3.847527 +step:2038 train loss:3.868797 +step:2039 train loss:3.865784 +step:2040 train loss:3.860236 +step:2041 train loss:3.861915 +step:2042 train loss:3.888031 +step:2043 train loss:3.826555 +step:2044 train loss:3.858415 +step:2045 train loss:3.861173 +step:2046 train loss:3.855293 +step:2047 train loss:3.976818 +step:2048 train loss:3.819256 +step:2049 train loss:3.915897 +step:2050 train loss:3.901596 +step:2051 train loss:3.861943 +step:2052 train loss:3.814957 +step:2053 train loss:3.835552 +step:2054 train loss:3.868145 +step:2055 train loss:3.853415 +step:2056 train loss:3.848077 +step:2057 train loss:3.862623 +step:2058 train loss:3.765462 +step:2059 train loss:3.842538 +step:2060 train loss:3.804637 +step:2061 train loss:3.904074 +step:2062 train loss:3.881691 +step:2063 train loss:3.880489 +step:2064 train loss:3.879951 +step:2065 train loss:3.852899 +step:2066 train loss:3.865530 +step:2067 train loss:3.824988 +step:2068 train loss:3.867900 +step:2069 train loss:3.896271 +step:2070 train loss:3.875089 +step:2071 train loss:3.853743 +step:2072 train loss:3.847307 +step:2073 train loss:3.935408 +step:2074 train loss:3.883949 +step:2075 train loss:3.923212 +step:2076 train loss:3.830078 +step:2077 train loss:3.843834 +step:2078 train loss:3.913228 +step:2079 train loss:3.848829 +step:2080 train loss:3.869178 +step:2081 train loss:3.839398 +step:2082 train loss:3.877031 +step:2083 train loss:3.885205 +step:2084 train loss:3.841909 +step:2085 train loss:3.856862 +step:2086 train loss:3.814311 +step:2087 train loss:3.833270 +step:2088 train loss:3.832328 +step:2089 train loss:3.909349 +step:2090 train loss:3.838965 +step:2091 train loss:3.836867 +step:2092 train loss:3.870769 +step:2093 train loss:3.859888 +step:2094 train loss:3.977254 +step:2095 train loss:3.965125 +step:2096 train loss:3.885238 +step:2097 train loss:3.858001 +step:2098 train loss:3.877931 +step:2099 train loss:3.837184 +step:2100 train loss:3.840855 +step:2101 train loss:3.849030 +step:2102 train loss:3.846651 +step:2103 train loss:3.874626 +step:2104 train loss:3.802721 +step:2105 train loss:4.003968 +step:2106 train loss:3.928239 +step:2107 train loss:3.907549 +step:2108 train loss:3.931170 +step:2109 train loss:3.846330 +step:2110 train loss:3.892730 +step:2111 train loss:3.892267 +step:2112 train loss:3.870741 +step:2113 train loss:3.855699 +step:2114 train loss:3.927838 +step:2115 train loss:3.866179 +step:2116 train loss:3.884316 +step:2117 train loss:3.889599 +step:2118 train loss:3.895924 +step:2119 train loss:3.805534 +step:2120 train loss:3.881628 +step:2121 train loss:3.846317 +step:2122 train loss:3.826256 +step:2123 train loss:3.933337 +step:2124 train loss:3.859963 +step:2125 train loss:3.835611 +step:2126 train loss:3.954984 +step:2127 train loss:3.801108 +step:2128 train loss:3.949149 +step:2129 train loss:3.851650 +step:2130 train loss:3.905760 +step:2131 train loss:3.810898 +step:2132 train loss:3.800042 +step:2133 train loss:3.808581 +step:2134 train loss:3.810771 +step:2135 train loss:3.848139 +step:2136 train loss:3.813905 +step:2137 train loss:3.775885 +step:2138 train loss:3.848336 +step:2139 train loss:3.818503 +step:2140 train loss:3.731345 +step:2141 train loss:3.887613 +step:2142 train loss:3.869497 +step:2143 train loss:3.940436 +step:2144 train loss:3.888874 +step:2145 train loss:3.855042 +step:2146 train loss:4.099575 +step:2147 train loss:3.842628 +step:2148 train loss:3.884620 +step:2149 train loss:3.823490 +step:2150 train loss:3.818756 +step:2151 train loss:3.921308 +step:2152 train loss:3.833872 +step:2153 train loss:3.842387 +step:2154 train loss:3.825943 +step:2155 train loss:3.899652 +step:2156 train loss:3.869066 +step:2157 train loss:3.841456 +step:2158 train loss:3.840194 +step:2159 train loss:3.797278 +step:2160 train loss:3.907719 +step:2161 train loss:3.866561 +step:2162 train loss:3.820209 +step:2163 train loss:3.883672 +step:2164 train loss:3.818831 +step:2165 train loss:3.872789 +step:2166 train loss:3.810879 +step:2167 train loss:3.860685 +step:2168 train loss:3.882917 +step:2169 train loss:3.853610 +step:2170 train loss:3.863424 +step:2171 train loss:3.855698 +step:2172 train loss:3.843542 +step:2173 train loss:3.831646 +step:2174 train loss:4.025870 +step:2175 train loss:3.874099 +step:2176 train loss:3.844159 +step:2177 train loss:3.851745 +step:2178 train loss:3.873259 +step:2179 train loss:3.790730 +step:2180 train loss:3.811383 +step:2181 train loss:3.799089 +step:2182 train loss:3.806540 +step:2183 train loss:3.845829 +step:2184 train loss:3.853566 +step:2185 train loss:3.857261 +step:2186 train loss:3.888105 +step:2187 train loss:3.883752 +step:2188 train loss:3.865917 +step:2189 train loss:3.845030 +step:2190 train loss:3.864610 +step:2191 train loss:3.836062 +step:2192 train loss:3.837601 +step:2193 train loss:3.841534 +step:2194 train loss:3.854558 +step:2195 train loss:3.862120 +step:2196 train loss:3.857783 +step:2197 train loss:3.869885 +step:2198 train loss:3.854082 +step:2199 train loss:3.867031 +step:2200 train loss:3.847889 +step:2201 train loss:3.839744 +step:2202 train loss:3.842366 +step:2203 train loss:3.809310 +step:2204 train loss:3.760931 +step:2205 train loss:3.870378 +step:2206 train loss:3.817499 +step:2207 train loss:3.817429 +step:2208 train loss:3.837113 +step:2209 train loss:3.799590 +step:2210 train loss:3.823872 +step:2211 train loss:3.787148 +step:2212 train loss:3.884672 +step:2213 train loss:3.859466 +step:2214 train loss:3.810487 +step:2215 train loss:3.867355 +step:2216 train loss:3.847940 +step:2217 train loss:3.838254 +step:2218 train loss:3.803196 +step:2219 train loss:3.791564 +step:2220 train loss:3.832743 +step:2221 train loss:3.833579 +step:2222 train loss:3.824461 +step:2223 train loss:3.797812 +step:2224 train loss:3.888384 +step:2225 train loss:3.809661 +step:2226 train loss:3.823325 +step:2227 train loss:3.844177 +step:2228 train loss:3.877089 +step:2229 train loss:3.776460 +step:2230 train loss:3.889787 +step:2231 train loss:3.839138 +step:2232 train loss:3.856321 +step:2233 train loss:3.825090 +step:2234 train loss:3.875612 +step:2235 train loss:3.835164 +step:2236 train loss:3.771496 +step:2237 train loss:3.861832 +step:2238 train loss:3.798350 +step:2239 train loss:3.878583 +step:2240 train loss:3.863442 +step:2241 train loss:3.842820 +step:2242 train loss:3.908922 +step:2243 train loss:3.823069 +step:2244 train loss:3.812625 +step:2245 train loss:3.965616 +step:2246 train loss:3.833626 +step:2247 train loss:3.810699 +step:2248 train loss:3.794871 +step:2249 train loss:3.895351 +step:2250 validation loss:3.783296 +step:2250 train loss:3.813283 +step:2251 train loss:3.922607 +step:2252 train loss:3.880674 +step:2253 train loss:3.829792 +step:2254 train loss:3.827214 +step:2255 train loss:3.819005 +step:2256 train loss:3.810343 +step:2257 train loss:3.793758 +step:2258 train loss:3.777994 +step:2259 train loss:3.875831 +step:2260 train loss:3.747443 +step:2261 train loss:3.845535 +step:2262 train loss:3.848029 +step:2263 train loss:3.824851 +step:2264 train loss:3.854747 +step:2265 train loss:3.786365 +step:2266 train loss:3.998503 +step:2267 train loss:3.851776 +step:2268 train loss:3.855174 +step:2269 train loss:3.821196 +step:2270 train loss:3.696996 +step:2271 train loss:3.805748 +step:2272 train loss:3.828068 +step:2273 train loss:3.867382 +step:2274 train loss:3.884356 +step:2275 train loss:3.815491 +step:2276 train loss:3.880143 +step:2277 train loss:3.779822 +step:2278 train loss:3.858689 +step:2279 train loss:3.764463 +step:2280 train loss:3.781646 +step:2281 train loss:3.811801 +step:2282 train loss:3.793887 +step:2283 train loss:3.812935 +step:2284 train loss:3.792794 +step:2285 train loss:3.820708 +step:2286 train loss:3.863579 +step:2287 train loss:3.824767 +step:2288 train loss:3.800798 +step:2289 train loss:3.878627 +step:2290 train loss:3.872095 +step:2291 train loss:3.953610 +step:2292 train loss:3.793161 +step:2293 train loss:3.838351 +step:2294 train loss:3.861794 +step:2295 train loss:3.795771 +step:2296 train loss:3.793547 +step:2297 train loss:3.838069 +step:2298 train loss:3.843195 +step:2299 train loss:3.866000 +step:2300 train loss:3.793450 +step:2301 train loss:3.777967 +step:2302 train loss:3.899604 +step:2303 train loss:3.846873 +step:2304 train loss:3.791329 +step:2305 train loss:3.800034 +step:2306 train loss:3.820498 +step:2307 train loss:3.794264 +step:2308 train loss:3.841995 +step:2309 train loss:3.852648 +step:2310 train loss:3.821261 +step:2311 train loss:3.806465 +step:2312 train loss:3.828854 +step:2313 train loss:3.846120 +step:2314 train loss:3.877172 +step:2315 train loss:3.862875 +step:2316 train loss:3.842698 +step:2317 train loss:3.768467 +step:2318 train loss:3.809915 +step:2319 train loss:3.830629 +step:2320 train loss:3.824166 +step:2321 train loss:3.807357 +step:2322 train loss:3.830102 +step:2323 train loss:3.800042 +step:2324 train loss:3.866339 +step:2325 train loss:3.848003 +step:2326 train loss:3.781970 +step:2327 train loss:3.812748 +step:2328 train loss:3.814568 +step:2329 train loss:3.817227 +step:2330 train loss:3.812037 +step:2331 train loss:3.853961 +step:2332 train loss:3.831630 +step:2333 train loss:3.764745 +step:2334 train loss:3.864186 +step:2335 train loss:3.895709 +step:2336 train loss:3.829453 +step:2337 train loss:3.834945 +step:2338 train loss:3.819414 +step:2339 train loss:3.817012 +step:2340 train loss:3.798516 +step:2341 train loss:3.854594 +step:2342 train loss:3.811202 +step:2343 train loss:3.828488 +step:2344 train loss:3.813985 +step:2345 train loss:3.774487 +step:2346 train loss:3.786499 +step:2347 train loss:3.878040 +step:2348 train loss:3.861079 +step:2349 train loss:3.801094 +step:2350 train loss:3.833813 +step:2351 train loss:3.839857 +step:2352 train loss:3.812910 +step:2353 train loss:3.734077 +step:2354 train loss:3.787515 +step:2355 train loss:3.835532 +step:2356 train loss:3.757178 +step:2357 train loss:3.858377 +step:2358 train loss:3.842046 +step:2359 train loss:3.812272 +step:2360 train loss:3.826044 +step:2361 train loss:3.854976 +step:2362 train loss:3.776345 +step:2363 train loss:3.791715 +step:2364 train loss:3.828947 +step:2365 train loss:3.842544 +step:2366 train loss:3.767063 +step:2367 train loss:3.848954 +step:2368 train loss:3.779728 +step:2369 train loss:3.761377 +step:2370 train loss:3.821622 +step:2371 train loss:3.814081 +step:2372 train loss:3.805301 +step:2373 train loss:3.881968 +step:2374 train loss:3.830733 +step:2375 train loss:3.802653 +step:2376 train loss:3.861433 +step:2377 train loss:3.839272 +step:2378 train loss:3.972180 +step:2379 train loss:3.891101 +step:2380 train loss:3.783917 +step:2381 train loss:3.815768 +step:2382 train loss:3.828613 +step:2383 train loss:3.826002 +step:2384 train loss:3.845681 +step:2385 train loss:3.848112 +step:2386 train loss:3.900746 +step:2387 train loss:3.823786 +step:2388 train loss:3.832207 +step:2389 train loss:3.810150 +step:2390 train loss:3.819103 +step:2391 train loss:3.831656 +step:2392 train loss:3.817352 +step:2393 train loss:3.797553 +step:2394 train loss:3.803251 +step:2395 train loss:3.892016 +step:2396 train loss:3.748598 +step:2397 train loss:3.802899 +step:2398 train loss:3.740390 +step:2399 train loss:3.862854 +step:2400 train loss:3.773514 +step:2401 train loss:3.806096 +step:2402 train loss:3.856032 +step:2403 train loss:3.866671 +step:2404 train loss:3.824487 +step:2405 train loss:3.799865 +step:2406 train loss:3.803753 +step:2407 train loss:3.813717 +step:2408 train loss:3.838421 +step:2409 train loss:3.747382 +step:2410 train loss:3.803576 +step:2411 train loss:3.787699 +step:2412 train loss:3.746857 +step:2413 train loss:3.838973 +step:2414 train loss:3.841940 +step:2415 train loss:3.812246 +step:2416 train loss:3.774160 +step:2417 train loss:3.889191 +step:2418 train loss:3.769909 +step:2419 train loss:3.780815 +step:2420 train loss:3.775945 +step:2421 train loss:3.796132 +step:2422 train loss:3.825815 +step:2423 train loss:3.808350 +step:2424 train loss:3.792434 +step:2425 train loss:3.832233 +step:2426 train loss:3.779768 +step:2427 train loss:3.854173 +step:2428 train loss:3.831353 +step:2429 train loss:3.784149 +step:2430 train loss:3.743457 +step:2431 train loss:3.762616 +step:2432 train loss:3.772719 +step:2433 train loss:3.805794 +step:2434 train loss:3.802403 +step:2435 train loss:3.955658 +step:2436 train loss:3.797870 +step:2437 train loss:3.780745 +step:2438 train loss:3.797523 +step:2439 train loss:3.775879 +step:2440 train loss:3.750118 +step:2441 train loss:3.764157 +step:2442 train loss:3.823318 +step:2443 train loss:3.818931 +step:2444 train loss:3.886329 +step:2445 train loss:3.823781 +step:2446 train loss:3.800777 +step:2447 train loss:3.782173 +step:2448 train loss:3.792962 +step:2449 train loss:3.823372 +step:2450 train loss:3.791935 +step:2451 train loss:3.784101 +step:2452 train loss:3.817158 +step:2453 train loss:3.791818 +step:2454 train loss:3.792799 +step:2455 train loss:3.841932 +step:2456 train loss:3.793684 +step:2457 train loss:3.812280 +step:2458 train loss:3.832026 +step:2459 train loss:3.789531 +step:2460 train loss:3.796416 +step:2461 train loss:3.805707 +step:2462 train loss:3.811907 +step:2463 train loss:3.796550 +step:2464 train loss:3.878173 +step:2465 train loss:3.931523 +step:2466 train loss:3.873488 +step:2467 train loss:3.822341 +step:2468 train loss:3.818303 +step:2469 train loss:3.812798 +step:2470 train loss:3.798955 +step:2471 train loss:3.807449 +step:2472 train loss:3.820550 +step:2473 train loss:3.785471 +step:2474 train loss:3.817572 +step:2475 train loss:3.841933 +step:2476 train loss:3.896302 +step:2477 train loss:3.823386 +step:2478 train loss:3.809785 +step:2479 train loss:3.773852 +step:2480 train loss:3.800429 +step:2481 train loss:3.785410 +step:2482 train loss:3.781981 +step:2483 train loss:3.826241 +step:2484 train loss:3.856448 +step:2485 train loss:3.882192 +step:2486 train loss:3.782032 +step:2487 train loss:3.797932 +step:2488 train loss:3.820396 +step:2489 train loss:3.746600 +step:2490 train loss:3.796111 +step:2491 train loss:3.761025 +step:2492 train loss:3.757625 +step:2493 train loss:3.799270 +step:2494 train loss:3.806868 +step:2495 train loss:3.790508 +step:2496 train loss:3.828231 +step:2497 train loss:3.829641 +step:2498 train loss:3.864090 +step:2499 train loss:3.807655 +step:2500 validation loss:3.741284 total_sharp:1.6847e-02 L1_sharp:2.5590e-02 L2_sharp:1.1773e-03 L3_sharp:3.1203e-03 L4_sharp:2.7423e-03 L5_sharp:3.4541e-03 L6_sharp:2.6112e-03 L7_sharp:3.3938e-03 L8_sharp:4.2994e-03 L9_sharp:3.9615e-03 L10_sharp:2.3284e-03 L11_sharp:2.3234e-03 L12_sharp:6.4653e-03 total_fnorm:2.1101e+00 total_l1_linf:1.8580e+04 total_spectral:2.1101e+00 L1_fnorm:4.4348e-01 L2_fnorm:4.5485e-01 L3_fnorm:4.5785e-01 L4_fnorm:4.5203e-01 L5_fnorm:4.0821e-01 L6_fnorm:4.6245e-01 L7_fnorm:4.7384e-01 L8_fnorm:4.8438e-01 L9_fnorm:4.9654e-01 L10_fnorm:5.1268e-01 L11_fnorm:5.1576e-01 L12_fnorm:5.0392e-01 L1_l1linf:5.6754e-01 L2_l1linf:5.4940e-01 L3_l1linf:6.2825e-01 L4_l1linf:5.7481e-01 L5_l1linf:4.2503e-01 L6_l1linf:5.4399e-01 L7_l1linf:5.0811e-01 L8_l1linf:5.1038e-01 L9_l1linf:5.1323e-01 L10_l1linf:5.3422e-01 L11_l1linf:5.4349e-01 L12_l1linf:5.2178e-01 L1_spectral:8.9807e-02 L2_spectral:8.4749e-02 L3_spectral:8.3743e-02 L4_spectral:8.6282e-02 L5_spectral:6.5561e-02 L6_spectral:7.7168e-02 L7_spectral:6.7658e-02 L8_spectral:5.8218e-02 L9_spectral:5.5689e-02 L10_spectral:5.5014e-02 L11_spectral:5.8041e-02 L12_spectral:9.0137e-02 v_norm:2.1101e+00 cos_v_-g_hvp:1.0440e-01 g_hvp_norm:3.8637e-01 cos_v_-g_t:1.1691e-01 g_t_norm:3.4621e-01 hv_norm:6.0113e-01 cos_v_hv:5.9137e-02 hg_norm:3.3265e+00 cos_g_hg:7.2443e-01 v_par:1.3793e-02 v_perp:2.1101e+00 L1_cos_v_neg_g:2.1271e-01 L1_v_norm:4.4348e-01 L2_cos_v_neg_g:1.0528e-01 L2_v_norm:4.5485e-01 L3_cos_v_neg_g:8.8388e-02 L3_v_norm:4.5785e-01 L4_cos_v_neg_g:9.7835e-02 L4_v_norm:4.5203e-01 L5_cos_v_neg_g:7.8582e-02 L5_v_norm:4.0821e-01 L6_cos_v_neg_g:9.2324e-02 L6_v_norm:4.6245e-01 L7_cos_v_neg_g:1.1742e-01 L7_v_norm:4.7384e-01 L8_cos_v_neg_g:1.1072e-01 L8_v_norm:4.8438e-01 L9_cos_v_neg_g:1.0440e-01 L9_v_norm:4.9654e-01 L10_cos_v_neg_g:1.1753e-01 L10_v_norm:5.1268e-01 L11_cos_v_neg_g:1.3540e-01 L11_v_norm:5.1576e-01 L12_cos_v_neg_g:1.8559e-01 L12_v_norm:5.0392e-01 +step:2500 train loss:3.802038 +step:2501 train loss:3.717361 +step:2502 train loss:3.898287 +step:2503 train loss:3.806000 +step:2504 train loss:3.827176 +step:2505 train loss:3.792581 +step:2506 train loss:3.828477 +step:2507 train loss:3.752957 +step:2508 train loss:3.809923 +step:2509 train loss:3.754482 +step:2510 train loss:3.781629 +step:2511 train loss:3.748687 +step:2512 train loss:3.847468 +step:2513 train loss:3.776498 +step:2514 train loss:3.809836 +step:2515 train loss:3.809134 +step:2516 train loss:3.776148 +step:2517 train loss:3.723531 +step:2518 train loss:3.768418 +step:2519 train loss:3.829592 +step:2520 train loss:3.758789 +step:2521 train loss:3.802723 +step:2522 train loss:3.809782 +step:2523 train loss:3.789497 +step:2524 train loss:3.831410 +step:2525 train loss:3.747698 +step:2526 train loss:3.785705 +step:2527 train loss:3.756254 +step:2528 train loss:3.877788 +step:2529 train loss:3.764628 +step:2530 train loss:3.795370 +step:2531 train loss:3.800350 +step:2532 train loss:3.792017 +step:2533 train loss:3.861219 +step:2534 train loss:3.877246 +step:2535 train loss:3.751524 +step:2536 train loss:3.794384 +step:2537 train loss:3.737946 +step:2538 train loss:3.817122 +step:2539 train loss:3.733268 +step:2540 train loss:3.785450 +step:2541 train loss:3.770183 +step:2542 train loss:3.771360 +step:2543 train loss:3.784483 +step:2544 train loss:3.781284 +step:2545 train loss:3.742368 +step:2546 train loss:3.788287 +step:2547 train loss:3.811831 +step:2548 train loss:3.795759 +step:2549 train loss:3.710326 +step:2550 train loss:3.797735 +step:2551 train loss:3.761866 +step:2552 train loss:3.813635 +step:2553 train loss:3.746369 +step:2554 train loss:3.857602 +step:2555 train loss:3.739266 +step:2556 train loss:3.783198 +step:2557 train loss:3.766776 +step:2558 train loss:3.801003 +step:2559 train loss:3.791644 +step:2560 train loss:3.795633 +step:2561 train loss:3.759814 +step:2562 train loss:3.733124 +step:2563 train loss:3.836245 +step:2564 train loss:3.801936 +step:2565 train loss:3.841375 +step:2566 train loss:3.755168 +step:2567 train loss:3.789376 +step:2568 train loss:3.703964 +step:2569 train loss:3.804780 +step:2570 train loss:3.734929 +step:2571 train loss:3.786455 +step:2572 train loss:3.700357 +step:2573 train loss:3.734127 +step:2574 train loss:3.695022 +step:2575 train loss:3.755862 +step:2576 train loss:3.760664 +step:2577 train loss:3.804469 +step:2578 train loss:3.743916 +step:2579 train loss:3.803268 +step:2580 train loss:3.769446 +step:2581 train loss:3.797544 +step:2582 train loss:3.805532 +step:2583 train loss:3.796062 +step:2584 train loss:3.787100 +step:2585 train loss:3.717382 +step:2586 train loss:3.790027 +step:2587 train loss:3.692387 +step:2588 train loss:3.737958 +step:2589 train loss:3.764115 +step:2590 train loss:3.748556 +step:2591 train loss:3.753082 +step:2592 train loss:3.806155 +step:2593 train loss:3.797210 +step:2594 train loss:3.804436 +step:2595 train loss:3.779441 +step:2596 train loss:3.807330 +step:2597 train loss:3.757916 +step:2598 train loss:3.807971 +step:2599 train loss:3.752367 +step:2600 train loss:3.769706 +step:2601 train loss:3.776381 +step:2602 train loss:3.831554 +step:2603 train loss:3.781886 +step:2604 train loss:3.791255 +step:2605 train loss:3.759495 +step:2606 train loss:3.872485 +step:2607 train loss:3.775672 +step:2608 train loss:3.819296 +step:2609 train loss:3.828108 +step:2610 train loss:3.784229 +step:2611 train loss:3.737781 +step:2612 train loss:3.774024 +step:2613 train loss:3.752469 +step:2614 train loss:3.817419 +step:2615 train loss:3.879780 +step:2616 train loss:3.815230 +step:2617 train loss:3.783167 +step:2618 train loss:3.787950 +step:2619 train loss:3.799618 +step:2620 train loss:3.837439 +step:2621 train loss:3.777478 +step:2622 train loss:3.807432 +step:2623 train loss:3.749309 +step:2624 train loss:3.747331 +step:2625 train loss:3.790845 +step:2626 train loss:3.761200 +step:2627 train loss:3.762645 +step:2628 train loss:3.866620 +step:2629 train loss:3.804757 +step:2630 train loss:3.742316 +step:2631 train loss:3.810529 +step:2632 train loss:3.766844 +step:2633 train loss:3.792778 +step:2634 train loss:3.759027 +step:2635 train loss:3.762113 +step:2636 train loss:3.712286 +step:2637 train loss:3.716583 +step:2638 train loss:3.703089 +step:2639 train loss:3.786431 +step:2640 train loss:3.794998 +step:2641 train loss:3.704839 +step:2642 train loss:3.737749 +step:2643 train loss:3.809382 +step:2644 train loss:3.889246 +step:2645 train loss:3.770614 +step:2646 train loss:3.744639 +step:2647 train loss:3.727279 +step:2648 train loss:3.851729 +step:2649 train loss:3.830395 +step:2650 train loss:3.798828 +step:2651 train loss:3.851937 +step:2652 train loss:3.864682 +step:2653 train loss:3.759289 +step:2654 train loss:3.820289 +step:2655 train loss:3.808346 +step:2656 train loss:3.736240 +step:2657 train loss:3.730485 +step:2658 train loss:3.704031 +step:2659 train loss:3.734062 +step:2660 train loss:3.785272 +step:2661 train loss:3.750849 +step:2662 train loss:3.732462 +step:2663 train loss:3.861954 +step:2664 train loss:3.759595 +step:2665 train loss:3.809417 +step:2666 train loss:3.799464 +step:2667 train loss:3.865315 +step:2668 train loss:3.770502 +step:2669 train loss:3.745345 +step:2670 train loss:3.733871 +step:2671 train loss:3.785259 +step:2672 train loss:3.752550 +step:2673 train loss:3.775970 +step:2674 train loss:3.847169 +step:2675 train loss:3.828723 +step:2676 train loss:3.705130 +step:2677 train loss:3.829464 +step:2678 train loss:3.778501 +step:2679 train loss:3.825070 +step:2680 train loss:3.775888 +step:2681 train loss:3.814505 +step:2682 train loss:3.746518 +step:2683 train loss:3.702771 +step:2684 train loss:3.791025 +step:2685 train loss:3.795803 +step:2686 train loss:3.760236 +step:2687 train loss:3.791921 +step:2688 train loss:3.723319 +step:2689 train loss:3.794929 +step:2690 train loss:3.798180 +step:2691 train loss:3.700656 +step:2692 train loss:3.813499 +step:2693 train loss:3.743690 +step:2694 train loss:3.733293 +step:2695 train loss:3.806598 +step:2696 train loss:3.791131 +step:2697 train loss:3.741518 +step:2698 train loss:3.812348 +step:2699 train loss:3.765054 +step:2700 train loss:3.714067 +step:2701 train loss:3.725581 +step:2702 train loss:3.719553 +step:2703 train loss:3.813632 +step:2704 train loss:3.710701 +step:2705 train loss:3.849268 +step:2706 train loss:3.683368 +step:2707 train loss:3.727550 +step:2708 train loss:3.745980 +step:2709 train loss:3.789034 +step:2710 train loss:3.824875 +step:2711 train loss:3.745574 +step:2712 train loss:3.687456 +step:2713 train loss:3.730052 +step:2714 train loss:3.767716 +step:2715 train loss:3.711413 +step:2716 train loss:3.758086 +step:2717 train loss:3.747043 +step:2718 train loss:3.759988 +step:2719 train loss:3.719352 +step:2720 train loss:3.774686 +step:2721 train loss:3.788028 +step:2722 train loss:3.716290 +step:2723 train loss:3.769192 +step:2724 train loss:3.722801 +step:2725 train loss:3.721386 +step:2726 train loss:3.748877 +step:2727 train loss:3.686541 +step:2728 train loss:3.751780 +step:2729 train loss:3.709193 +step:2730 train loss:3.759798 +step:2731 train loss:3.735813 +step:2732 train loss:3.757061 +step:2733 train loss:3.773632 +step:2734 train loss:3.711977 +step:2735 train loss:3.710723 +step:2736 train loss:3.769553 +step:2737 train loss:3.690294 +step:2738 train loss:3.702275 +step:2739 train loss:3.753865 +step:2740 train loss:3.736055 +step:2741 train loss:3.689413 +step:2742 train loss:3.749819 +step:2743 train loss:3.789495 +step:2744 train loss:3.745880 +step:2745 train loss:3.722298 +step:2746 train loss:3.761225 +step:2747 train loss:3.731996 +step:2748 train loss:3.739459 +step:2749 train loss:3.703524 +step:2750 validation loss:3.705013 +step:2750 train loss:3.770528 +step:2751 train loss:3.753260 +step:2752 train loss:3.741611 +step:2753 train loss:3.774189 +step:2754 train loss:3.768592 +step:2755 train loss:3.722201 +step:2756 train loss:3.745215 +step:2757 train loss:3.757532 +step:2758 train loss:3.743972 +step:2759 train loss:3.739555 +step:2760 train loss:3.764160 +step:2761 train loss:3.719175 +step:2762 train loss:3.730174 +step:2763 train loss:3.749014 +step:2764 train loss:3.788968 +step:2765 train loss:3.754849 +step:2766 train loss:3.761224 +step:2767 train loss:3.772815 +step:2768 train loss:3.730580 +step:2769 train loss:3.704577 +step:2770 train loss:3.739804 +step:2771 train loss:3.781107 +step:2772 train loss:3.855196 +step:2773 train loss:3.824699 +step:2774 train loss:3.690061 +step:2775 train loss:3.750344 +step:2776 train loss:3.742255 +step:2777 train loss:3.788555 +step:2778 train loss:3.809318 +step:2779 train loss:3.759665 +step:2780 train loss:3.763392 +step:2781 train loss:3.733210 +step:2782 train loss:3.750594 +step:2783 train loss:3.733153 +step:2784 train loss:3.829905 +step:2785 train loss:3.735584 +step:2786 train loss:3.705326 +step:2787 train loss:3.805637 +step:2788 train loss:3.737488 +step:2789 train loss:3.757836 +step:2790 train loss:3.731557 +step:2791 train loss:3.745144 +step:2792 train loss:3.725850 +step:2793 train loss:3.742833 +step:2794 train loss:3.709177 +step:2795 train loss:3.717273 +step:2796 train loss:3.763634 +step:2797 train loss:3.737785 +step:2798 train loss:3.736120 +step:2799 train loss:3.707379 +step:2800 train loss:3.764735 +step:2801 train loss:3.721865 +step:2802 train loss:3.768007 +step:2803 train loss:3.780174 +step:2804 train loss:3.710995 +step:2805 train loss:3.821216 +step:2806 train loss:3.771461 +step:2807 train loss:3.705696 +step:2808 train loss:3.724169 +step:2809 train loss:3.735993 +step:2810 train loss:3.755141 +step:2811 train loss:3.685141 +step:2812 train loss:3.757014 +step:2813 train loss:3.791804 +step:2814 train loss:3.708315 +step:2815 train loss:3.741944 +step:2816 train loss:3.685123 +step:2817 train loss:3.755923 +step:2818 train loss:3.726987 +step:2819 train loss:3.682565 +step:2820 train loss:3.737092 +step:2821 train loss:3.832840 +step:2822 train loss:3.771041 +step:2823 train loss:3.731172 +step:2824 train loss:3.774494 +step:2825 train loss:3.693853 +step:2826 train loss:3.723804 +step:2827 train loss:3.714951 +step:2828 train loss:3.687096 +step:2829 train loss:3.742499 +step:2830 train loss:3.678150 +step:2831 train loss:3.776724 +step:2832 train loss:3.756075 +step:2833 train loss:3.740939 +step:2834 train loss:3.745581 +step:2835 train loss:3.721590 +step:2836 train loss:3.741545 +step:2837 train loss:3.686347 +step:2838 train loss:3.706687 +step:2839 train loss:3.675118 +step:2840 train loss:3.787611 +step:2841 train loss:3.731792 +step:2842 train loss:3.733701 +step:2843 train loss:3.766587 +step:2844 train loss:3.687395 +step:2845 train loss:3.746744 +step:2846 train loss:3.706330 +step:2847 train loss:3.748867 +step:2848 train loss:3.742418 +step:2849 train loss:3.768054 +step:2850 train loss:3.828636 +step:2851 train loss:3.754125 +step:2852 train loss:3.784132 +step:2853 train loss:3.749969 +step:2854 train loss:3.723782 +step:2855 train loss:3.747611 +step:2856 train loss:3.868534 +step:2857 train loss:3.768111 +step:2858 train loss:3.780898 +step:2859 train loss:3.739639 +step:2860 train loss:3.772923 +step:2861 train loss:3.875639 +step:2862 train loss:3.770977 +step:2863 train loss:3.783710 +step:2864 train loss:3.793196 +step:2865 train loss:3.762711 +step:2866 train loss:3.772921 +step:2867 train loss:3.803412 +step:2868 train loss:3.732524 +step:2869 train loss:3.788128 +step:2870 train loss:3.827268 +step:2871 train loss:3.736719 +step:2872 train loss:3.754501 +step:2873 train loss:3.747718 +step:2874 train loss:3.750693 +step:2875 train loss:3.759942 +step:2876 train loss:3.753633 +step:2877 train loss:3.781808 +step:2878 train loss:3.746227 +step:2879 train loss:3.723275 +step:2880 train loss:3.806778 +step:2881 train loss:3.757642 +step:2882 train loss:3.748487 +step:2883 train loss:3.794431 +step:2884 train loss:3.791382 +step:2885 train loss:3.748485 +step:2886 train loss:3.736739 +step:2887 train loss:3.780418 +step:2888 train loss:3.793600 +step:2889 train loss:3.767764 +step:2890 train loss:3.776035 +step:2891 train loss:3.757175 +step:2892 train loss:3.747562 +step:2893 train loss:3.737828 +step:2894 train loss:3.785353 +step:2895 train loss:3.729690 +step:2896 train loss:3.759843 +step:2897 train loss:3.769797 +step:2898 train loss:3.813237 +step:2899 train loss:3.746861 +step:2900 train loss:3.732533 +step:2901 train loss:3.798633 +step:2902 train loss:3.677985 +step:2903 train loss:3.825438 +step:2904 train loss:3.799822 +step:2905 train loss:3.775261 +step:2906 train loss:3.746497 +step:2907 train loss:3.799924 +step:2908 train loss:3.735746 +step:2909 train loss:3.773890 +step:2910 train loss:3.810475 +step:2911 train loss:3.701346 +step:2912 train loss:3.734714 +step:2913 train loss:3.772076 +step:2914 train loss:3.724151 +step:2915 train loss:3.749146 +step:2916 train loss:3.704701 +step:2917 train loss:3.722538 +step:2918 train loss:3.789392 +step:2919 train loss:3.785229 +step:2920 train loss:3.761021 +step:2921 train loss:3.745839 +step:2922 train loss:3.716422 +step:2923 train loss:3.741874 +step:2924 train loss:3.734315 +step:2925 train loss:3.780035 +step:2926 train loss:3.759026 +step:2927 train loss:3.703125 +step:2928 train loss:3.762365 +step:2929 train loss:3.720192 +step:2930 train loss:3.705864 +step:2931 train loss:3.737292 +step:2932 train loss:3.775884 +step:2933 train loss:3.845317 +step:2934 train loss:3.748805 +step:2935 train loss:3.726013 +step:2936 train loss:3.742488 +step:2937 train loss:3.760013 +step:2938 train loss:3.761519 +step:2939 train loss:3.863958 +step:2940 train loss:3.780367 +step:2941 train loss:3.813717 +step:2942 train loss:3.745703 +step:2943 train loss:3.770214 +step:2944 train loss:3.759342 +step:2945 train loss:3.760897 +step:2946 train loss:3.742632 +step:2947 train loss:3.718194 +step:2948 train loss:3.706660 +step:2949 train loss:3.725996 +step:2950 train loss:3.807524 +step:2951 train loss:3.763283 +step:2952 train loss:3.757196 +step:2953 train loss:3.747682 +step:2954 train loss:3.756036 +step:2955 train loss:3.828521 +step:2956 train loss:3.733864 +step:2957 train loss:3.752406 +step:2958 train loss:3.783733 +step:2959 train loss:3.735316 +step:2960 train loss:3.757310 +step:2961 train loss:3.733323 +step:2962 train loss:3.742279 +step:2963 train loss:3.717080 +step:2964 train loss:3.762562 +step:2965 train loss:3.804684 +step:2966 train loss:3.680883 +step:2967 train loss:3.755433 +step:2968 train loss:3.676961 +step:2969 train loss:3.765724 +step:2970 train loss:3.728211 +step:2971 train loss:3.697329 +step:2972 train loss:3.672854 +step:2973 train loss:3.763003 +step:2974 train loss:3.681221 +step:2975 train loss:3.697279 +step:2976 train loss:3.716557 +step:2977 train loss:3.708496 +step:2978 train loss:3.729227 +step:2979 train loss:3.682886 +step:2980 train loss:3.718535 +step:2981 train loss:3.755002 +step:2982 train loss:3.674748 +step:2983 train loss:3.708838 +step:2984 train loss:3.722596 +step:2985 train loss:3.709902 +step:2986 train loss:3.747350 +step:2987 train loss:3.706111 +step:2988 train loss:3.732509 +step:2989 train loss:3.752842 +step:2990 train loss:3.688250 +step:2991 train loss:3.751115 +step:2992 train loss:3.677270 +step:2993 train loss:3.662774 +step:2994 train loss:3.755244 +step:2995 train loss:3.695490 +step:2996 train loss:3.698790 +step:2997 train loss:3.694894 +step:2998 train loss:3.697209 +step:2999 train loss:3.684907 +step:3000 validation loss:3.680790 total_sharp:1.2894e-02 L1_sharp:2.0079e-02 L2_sharp:1.0931e-03 L3_sharp:2.1777e-03 L4_sharp:2.7575e-03 L5_sharp:3.0270e-03 L6_sharp:1.9643e-03 L7_sharp:2.4242e-03 L8_sharp:3.2204e-03 L9_sharp:2.8791e-03 L10_sharp:1.7733e-03 L11_sharp:1.7131e-03 L12_sharp:4.8701e-03 total_fnorm:2.1572e+00 total_l1_linf:1.9072e+04 total_spectral:2.1572e+00 L1_fnorm:4.6746e-01 L2_fnorm:4.8207e-01 L3_fnorm:4.8068e-01 L4_fnorm:4.7369e-01 L5_fnorm:4.3801e-01 L6_fnorm:4.7760e-01 L7_fnorm:4.8796e-01 L8_fnorm:4.9829e-01 L9_fnorm:5.0747e-01 L10_fnorm:5.2165e-01 L11_fnorm:5.2294e-01 L12_fnorm:5.1773e-01 L1_l1linf:5.8562e-01 L2_l1linf:6.0219e-01 L3_l1linf:5.9630e-01 L4_l1linf:5.9583e-01 L5_l1linf:4.4742e-01 L6_l1linf:5.9501e-01 L7_l1linf:5.7979e-01 L8_l1linf:5.5468e-01 L9_l1linf:5.2747e-01 L10_l1linf:5.3883e-01 L11_l1linf:5.4845e-01 L12_l1linf:5.5973e-01 L1_spectral:8.3700e-02 L2_spectral:8.4448e-02 L3_spectral:9.1732e-02 L4_spectral:9.6908e-02 L5_spectral:7.0151e-02 L6_spectral:8.2971e-02 L7_spectral:7.2103e-02 L8_spectral:6.2881e-02 L9_spectral:5.2400e-02 L10_spectral:5.1058e-02 L11_spectral:5.3703e-02 L12_spectral:8.0186e-02 v_norm:2.1572e+00 cos_v_-g_hvp:9.3163e-02 g_hvp_norm:3.8515e-01 cos_v_-g_t:1.0492e-01 g_t_norm:3.4391e-01 hv_norm:5.6267e-01 cos_v_hv:4.9436e-02 hg_norm:3.4722e+00 cos_g_hg:6.9760e-01 v_par:1.2535e-02 v_perp:2.1572e+00 L1_cos_v_neg_g:1.9744e-01 L1_v_norm:4.6746e-01 L2_cos_v_neg_g:9.6839e-02 L2_v_norm:4.8207e-01 L3_cos_v_neg_g:7.4696e-02 L3_v_norm:4.8068e-01 L4_cos_v_neg_g:8.2804e-02 L4_v_norm:4.7369e-01 L5_cos_v_neg_g:6.4540e-02 L5_v_norm:4.3801e-01 L6_cos_v_neg_g:8.1160e-02 L6_v_norm:4.7760e-01 L7_cos_v_neg_g:9.4952e-02 L7_v_norm:4.8796e-01 L8_cos_v_neg_g:9.7932e-02 L8_v_norm:4.9829e-01 L9_cos_v_neg_g:9.0875e-02 L9_v_norm:5.0747e-01 L10_cos_v_neg_g:1.0257e-01 L10_v_norm:5.2165e-01 L11_cos_v_neg_g:1.1893e-01 L11_v_norm:5.2294e-01 L12_cos_v_neg_g:1.8135e-01 L12_v_norm:5.1773e-01 +step:3000 train loss:3.657906 +step:3001 train loss:3.723990 +step:3002 train loss:3.769659 +step:3003 train loss:3.730167 +step:3004 train loss:3.748719 +step:3005 train loss:3.734407 +step:3006 train loss:3.749321 +step:3007 train loss:3.785045 +step:3008 train loss:3.755071 +step:3009 train loss:3.648398 +step:3010 train loss:3.729136 +step:3011 train loss:3.716345 +step:3012 train loss:3.687591 +step:3013 train loss:3.703146 +step:3014 train loss:3.665164 +step:3015 train loss:3.711109 +step:3016 train loss:3.708270 +step:3017 train loss:3.789697 +step:3018 train loss:3.741698 +step:3019 train loss:3.667001 +step:3020 train loss:3.729913 +step:3021 train loss:3.714890 +step:3022 train loss:3.684963 +step:3023 train loss:3.688342 +step:3024 train loss:3.704568 +step:3025 train loss:3.739332 +step:3026 train loss:3.739172 +step:3027 train loss:3.675146 +step:3028 train loss:3.750025 +step:3029 train loss:3.682989 +step:3030 train loss:3.751506 +step:3031 train loss:3.679400 +step:3032 train loss:3.697476 +step:3033 train loss:3.789391 +step:3034 train loss:3.656062 +step:3035 train loss:3.760488 +step:3036 train loss:3.702160 +step:3037 train loss:3.667559 +step:3038 train loss:3.725253 +step:3039 train loss:3.659431 +step:3040 train loss:3.725251 +step:3041 train loss:3.728688 +step:3042 train loss:3.682077 +step:3043 train loss:3.703487 +step:3044 train loss:3.630259 +step:3045 train loss:3.723414 +step:3046 train loss:3.798256 +step:3047 train loss:3.773654 +step:3048 train loss:3.729072 +step:3049 train loss:3.741055 +step:3050 train loss:3.709047 +step:3051 train loss:3.713615 +step:3052 train loss:3.722176 +step:3053 train loss:3.725347 +step:3054 train loss:3.679638 +step:3055 train loss:3.646560 +step:3056 train loss:3.727468 +step:3057 train loss:3.773553 +step:3058 train loss:3.756401 +step:3059 train loss:3.755357 +step:3060 train loss:3.734998 +step:3061 train loss:3.715001 +step:3062 train loss:3.712836 +step:3063 train loss:3.669228 +step:3064 train loss:3.741329 +step:3065 train loss:3.660832 +step:3066 train loss:3.709297 +step:3067 train loss:3.688228 +step:3068 train loss:3.612923 +step:3069 train loss:3.752726 +step:3070 train loss:3.699120 +step:3071 train loss:3.744057 +step:3072 train loss:3.717026 +step:3073 train loss:3.952199 +step:3074 train loss:3.728319 +step:3075 train loss:3.665305 +step:3076 train loss:3.745972 +step:3077 train loss:3.645657 +step:3078 train loss:3.714710 +step:3079 train loss:3.764134 +step:3080 train loss:3.658772 +step:3081 train loss:3.775171 +step:3082 train loss:3.668921 +step:3083 train loss:3.723197 +step:3084 train loss:3.698076 +step:3085 train loss:3.699352 +step:3086 train loss:3.801672 +step:3087 train loss:3.700671 +step:3088 train loss:3.705735 +step:3089 train loss:3.751124 +step:3090 train loss:3.648798 +step:3091 train loss:3.722280 +step:3092 train loss:3.639568 +step:3093 train loss:3.709369 +step:3094 train loss:3.697445 +step:3095 train loss:3.689104 +step:3096 train loss:3.681670 +step:3097 train loss:3.621743 +step:3098 train loss:3.755977 +step:3099 train loss:3.733718 +step:3100 train loss:3.673415 +step:3101 train loss:3.656553 +step:3102 train loss:3.754729 +step:3103 train loss:3.731135 +step:3104 train loss:3.726028 +step:3105 train loss:3.697652 +step:3106 train loss:3.708141 +step:3107 train loss:3.679183 +step:3108 train loss:3.735678 +step:3109 train loss:3.681141 +step:3110 train loss:3.725955 +step:3111 train loss:3.758439 +step:3112 train loss:3.676062 +step:3113 train loss:3.735682 +step:3114 train loss:3.676852 +step:3115 train loss:3.681323 +step:3116 train loss:3.729356 +step:3117 train loss:3.721842 +step:3118 train loss:3.700501 +step:3119 train loss:3.629304 +step:3120 train loss:3.724960 +step:3121 train loss:3.692319 +step:3122 train loss:3.725321 +step:3123 train loss:3.684378 +step:3124 train loss:3.724086 +step:3125 train loss:3.677565 +step:3126 train loss:3.630973 +step:3127 train loss:3.698301 +step:3128 train loss:3.690901 +step:3129 train loss:3.672202 +step:3130 train loss:3.672195 +step:3131 train loss:3.684718 +step:3132 train loss:3.750906 +step:3133 train loss:3.671134 +step:3134 train loss:3.736857 +step:3135 train loss:3.680101 +step:3136 train loss:3.687285 +step:3137 train loss:3.731318 +step:3138 train loss:3.653981 +step:3139 train loss:3.756728 +step:3140 train loss:3.635105 +step:3141 train loss:3.759254 +step:3142 train loss:3.676827 +step:3143 train loss:3.697267 +step:3144 train loss:3.678252 +step:3145 train loss:3.689627 +step:3146 train loss:3.672942 +step:3147 train loss:3.640167 +step:3148 train loss:3.733007 +step:3149 train loss:3.659353 +step:3150 train loss:3.712489 +step:3151 train loss:3.713586 +step:3152 train loss:3.672999 +step:3153 train loss:3.688097 +step:3154 train loss:3.674743 +step:3155 train loss:3.674254 +step:3156 train loss:3.757688 +step:3157 train loss:3.764824 +step:3158 train loss:3.735743 +step:3159 train loss:3.712063 +step:3160 train loss:3.720118 +step:3161 train loss:3.797299 +step:3162 train loss:3.773399 +step:3163 train loss:3.781108 +step:3164 train loss:3.771266 +step:3165 train loss:3.711174 +step:3166 train loss:3.694760 +step:3167 train loss:3.683607 +step:3168 train loss:3.787685 +step:3169 train loss:3.690669 +step:3170 train loss:3.747809 +step:3171 train loss:3.762517 +step:3172 train loss:3.736702 +step:3173 train loss:3.763758 +step:3174 train loss:3.758154 +step:3175 train loss:3.718920 +step:3176 train loss:3.691043 +step:3177 train loss:3.646760 +step:3178 train loss:3.752255 +step:3179 train loss:3.705089 +step:3180 train loss:3.689535 +step:3181 train loss:3.732855 +step:3182 train loss:3.735457 +step:3183 train loss:3.739254 +step:3184 train loss:3.740278 +step:3185 train loss:3.683377 +step:3186 train loss:3.757581 +step:3187 train loss:3.660703 +step:3188 train loss:3.716846 +step:3189 train loss:3.853886 +step:3190 train loss:3.697774 +step:3191 train loss:3.698479 +step:3192 train loss:3.692005 +step:3193 train loss:3.720532 +step:3194 train loss:3.699930 +step:3195 train loss:3.791963 +step:3196 train loss:3.725303 +step:3197 train loss:3.658861 +step:3198 train loss:3.728923 +step:3199 train loss:3.715489 +step:3200 train loss:3.683179 +step:3201 train loss:3.740144 +step:3202 train loss:3.643915 +step:3203 train loss:3.765707 +step:3204 train loss:3.715599 +step:3205 train loss:3.732054 +step:3206 train loss:3.746283 +step:3207 train loss:3.842006 +step:3208 train loss:3.798226 +step:3209 train loss:3.701914 +step:3210 train loss:3.742048 +step:3211 train loss:3.712610 +step:3212 train loss:3.730872 +step:3213 train loss:3.761316 +step:3214 train loss:3.756727 +step:3215 train loss:3.686119 +step:3216 train loss:3.700356 +step:3217 train loss:3.724597 +step:3218 train loss:3.732504 +step:3219 train loss:3.688858 +step:3220 train loss:3.738981 +step:3221 train loss:3.733096 +step:3222 train loss:3.670518 +step:3223 train loss:3.770972 +step:3224 train loss:3.687803 +step:3225 train loss:3.718762 +step:3226 train loss:3.683769 +step:3227 train loss:3.745604 +step:3228 train loss:3.707443 +step:3229 train loss:3.674521 +step:3230 train loss:3.684844 +step:3231 train loss:3.707434 +step:3232 train loss:3.710818 +step:3233 train loss:3.677226 +step:3234 train loss:3.689529 +step:3235 train loss:3.805368 +step:3236 train loss:3.735786 +step:3237 train loss:3.705235 +step:3238 train loss:3.745161 +step:3239 train loss:3.696961 +step:3240 train loss:3.710450 +step:3241 train loss:3.663309 +step:3242 train loss:3.710565 +step:3243 train loss:3.736915 +step:3244 train loss:3.691956 +step:3245 train loss:3.684576 +step:3246 train loss:3.703274 +step:3247 train loss:3.765577 +step:3248 train loss:3.702688 +step:3249 train loss:3.688165 +step:3250 validation loss:3.653088 +step:3250 train loss:3.729463 +step:3251 train loss:3.727526 +step:3252 train loss:3.694562 +step:3253 train loss:3.666693 +step:3254 train loss:3.697547 +step:3255 train loss:3.715023 +step:3256 train loss:3.678309 +step:3257 train loss:3.786720 +step:3258 train loss:3.796565 +step:3259 train loss:3.735740 +step:3260 train loss:3.704717 +step:3261 train loss:3.706649 +step:3262 train loss:3.675658 +step:3263 train loss:3.717394 +step:3264 train loss:3.744009 +step:3265 train loss:3.687494 +step:3266 train loss:3.653046 +step:3267 train loss:3.699170 +step:3268 train loss:3.690320 +step:3269 train loss:3.731143 +step:3270 train loss:3.753500 +step:3271 train loss:3.718100 +step:3272 train loss:3.703852 +step:3273 train loss:3.739095 +step:3274 train loss:3.841205 +step:3275 train loss:3.731012 +step:3276 train loss:3.683311 +step:3277 train loss:3.716502 +step:3278 train loss:3.719713 +step:3279 train loss:3.716770 +step:3280 train loss:3.696072 +step:3281 train loss:3.716669 +step:3282 train loss:3.699427 +step:3283 train loss:3.630382 +step:3284 train loss:3.658965 +step:3285 train loss:3.692497 +step:3286 train loss:3.727489 +step:3287 train loss:3.709501 +step:3288 train loss:3.719431 +step:3289 train loss:3.714690 +step:3290 train loss:3.745619 +step:3291 train loss:3.667697 +step:3292 train loss:3.732972 +step:3293 train loss:3.683441 +step:3294 train loss:3.744920 +step:3295 train loss:3.668615 +step:3296 train loss:3.766520 +step:3297 train loss:3.710597 +step:3298 train loss:3.735672 +step:3299 train loss:3.665045 +step:3300 train loss:3.703045 +step:3301 train loss:3.710361 +step:3302 train loss:3.798858 +step:3303 train loss:3.721883 +step:3304 train loss:3.699721 +step:3305 train loss:3.686993 +step:3306 train loss:3.682129 +step:3307 train loss:3.690812 +step:3308 train loss:3.787141 +step:3309 train loss:3.719865 +step:3310 train loss:3.696442 +step:3311 train loss:3.749079 +step:3312 train loss:3.779322 +step:3313 train loss:3.670032 +step:3314 train loss:3.744867 +step:3315 train loss:3.689390 +step:3316 train loss:3.741627 +step:3317 train loss:3.739307 +step:3318 train loss:3.735846 +step:3319 train loss:3.737854 +step:3320 train loss:3.694694 +step:3321 train loss:3.679004 +step:3322 train loss:3.711872 +step:3323 train loss:3.665993 +step:3324 train loss:3.681437 +step:3325 train loss:3.777905 +step:3326 train loss:3.641818 +step:3327 train loss:3.706842 +step:3328 train loss:3.699739 +step:3329 train loss:3.657891 +step:3330 train loss:3.635315 +step:3331 train loss:3.775845 +step:3332 train loss:3.744174 +step:3333 train loss:3.696200 +step:3334 train loss:3.713032 +step:3335 train loss:3.768754 +step:3336 train loss:3.729836 +step:3337 train loss:3.827644 +step:3338 train loss:3.677034 +step:3339 train loss:3.770043 +step:3340 train loss:3.766034 +step:3341 train loss:3.654924 +step:3342 train loss:3.691251 +step:3343 train loss:3.695862 +step:3344 train loss:3.636458 +step:3345 train loss:3.734813 +step:3346 train loss:3.739234 +step:3347 train loss:3.716544 +step:3348 train loss:3.705390 +step:3349 train loss:3.680774 +step:3350 train loss:3.741500 +step:3351 train loss:3.711064 +step:3352 train loss:3.733974 +step:3353 train loss:3.686137 +step:3354 train loss:3.730441 +step:3355 train loss:3.678802 +step:3356 train loss:3.700986 +step:3357 train loss:3.767285 +step:3358 train loss:3.690121 +step:3359 train loss:3.642924 +step:3360 train loss:3.736802 +step:3361 train loss:3.684699 +step:3362 train loss:3.737805 +step:3363 train loss:3.695124 +step:3364 train loss:3.687067 +step:3365 train loss:3.705512 +step:3366 train loss:3.694413 +step:3367 train loss:3.662077 +step:3368 train loss:3.673503 +step:3369 train loss:3.634659 +step:3370 train loss:3.709204 +step:3371 train loss:3.701153 +step:3372 train loss:3.700130 +step:3373 train loss:3.740078 +step:3374 train loss:3.701219 +step:3375 train loss:3.714107 +step:3376 train loss:3.648178 +step:3377 train loss:3.659227 +step:3378 train loss:3.634501 +step:3379 train loss:3.681876 +step:3380 train loss:3.715744 +step:3381 train loss:3.711230 +step:3382 train loss:3.636706 +step:3383 train loss:3.726080 +step:3384 train loss:3.688171 +step:3385 train loss:3.672573 +step:3386 train loss:3.721206 +step:3387 train loss:3.696293 +step:3388 train loss:3.717651 +step:3389 train loss:3.632009 +step:3390 train loss:3.693894 +step:3391 train loss:3.731095 +step:3392 train loss:3.681751 +step:3393 train loss:3.641157 +step:3394 train loss:3.689053 +step:3395 train loss:3.688817 +step:3396 train loss:3.719040 +step:3397 train loss:3.826795 +step:3398 train loss:3.620661 +step:3399 train loss:3.680934 +step:3400 train loss:3.658342 +step:3401 train loss:3.699578 +step:3402 train loss:3.691560 +step:3403 train loss:3.774873 +step:3404 train loss:3.677769 +step:3405 train loss:3.725473 +step:3406 train loss:3.666614 +step:3407 train loss:3.715615 +step:3408 train loss:3.733373 +step:3409 train loss:3.743949 +step:3410 train loss:3.771874 +step:3411 train loss:3.712036 +step:3412 train loss:3.682265 +step:3413 train loss:3.640637 +step:3414 train loss:3.662586 +step:3415 train loss:3.685763 +step:3416 train loss:3.760801 +step:3417 train loss:3.701316 +step:3418 train loss:3.687436 +step:3419 train loss:3.670684 +step:3420 train loss:3.758645 +step:3421 train loss:3.745465 +step:3422 train loss:3.709822 +step:3423 train loss:3.666883 +step:3424 train loss:3.728264 +step:3425 train loss:3.714431 +step:3426 train loss:3.745283 +step:3427 train loss:3.715132 +step:3428 train loss:3.657286 +step:3429 train loss:3.717110 +step:3430 train loss:3.735305 +step:3431 train loss:3.673415 +step:3432 train loss:3.691308 +step:3433 train loss:3.717515 +step:3434 train loss:3.674816 +step:3435 train loss:3.629508 +step:3436 train loss:3.688834 +step:3437 train loss:3.706980 +step:3438 train loss:3.704422 +step:3439 train loss:3.656568 +step:3440 train loss:3.703638 +step:3441 train loss:3.724769 +step:3442 train loss:3.644699 +step:3443 train loss:3.633805 +step:3444 train loss:3.649545 +step:3445 train loss:3.655950 +step:3446 train loss:3.680752 +step:3447 train loss:3.655886 +step:3448 train loss:3.671277 +step:3449 train loss:3.701503 +step:3450 train loss:3.720480 +step:3451 train loss:3.681565 +step:3452 train loss:3.629188 +step:3453 train loss:3.666742 +step:3454 train loss:3.713961 +step:3455 train loss:3.672009 +step:3456 train loss:3.662810 +step:3457 train loss:3.663947 +step:3458 train loss:3.684867 +step:3459 train loss:3.914567 +step:3460 train loss:3.683976 +step:3461 train loss:3.697519 +step:3462 train loss:3.686280 +step:3463 train loss:3.761978 +step:3464 train loss:3.720654 +step:3465 train loss:3.714981 +step:3466 train loss:3.641672 +step:3467 train loss:3.684278 +step:3468 train loss:3.666954 +step:3469 train loss:3.698683 +step:3470 train loss:3.646754 +step:3471 train loss:3.706352 +step:3472 train loss:3.697132 +step:3473 train loss:3.735291 +step:3474 train loss:3.681770 +step:3475 train loss:3.649015 +step:3476 train loss:3.740868 +step:3477 train loss:3.677054 +step:3478 train loss:3.745787 +step:3479 train loss:3.639540 +step:3480 train loss:3.779921 +step:3481 train loss:3.691573 +step:3482 train loss:3.714539 +step:3483 train loss:3.673386 +step:3484 train loss:3.683774 +step:3485 train loss:3.707644 +step:3486 train loss:3.670054 +step:3487 train loss:3.763237 +step:3488 train loss:3.628449 +step:3489 train loss:3.662066 +step:3490 train loss:3.681931 +step:3491 train loss:3.638516 +step:3492 train loss:3.677056 +step:3493 train loss:3.829593 +step:3494 train loss:3.672420 +step:3495 train loss:3.671958 +step:3496 train loss:3.618006 +step:3497 train loss:3.633993 +step:3498 train loss:3.696754 +step:3499 train loss:3.670257 +step:3500 validation loss:3.626328 total_sharp:7.2713e-03 L1_sharp:8.0612e-03 L2_sharp:4.4769e-04 L3_sharp:8.2274e-04 L4_sharp:1.3475e-03 L5_sharp:1.2252e-03 L6_sharp:1.1149e-03 L7_sharp:1.7417e-03 L8_sharp:2.1062e-03 L9_sharp:1.9639e-03 L10_sharp:1.4804e-03 L11_sharp:1.4099e-03 L12_sharp:3.4588e-03 total_fnorm:2.1353e+00 total_l1_linf:1.8845e+04 total_spectral:2.1353e+00 L1_fnorm:4.4663e-01 L2_fnorm:4.7443e-01 L3_fnorm:4.7121e-01 L4_fnorm:4.6331e-01 L5_fnorm:4.3647e-01 L6_fnorm:4.7310e-01 L7_fnorm:4.8602e-01 L8_fnorm:4.9306e-01 L9_fnorm:5.0138e-01 L10_fnorm:5.1863e-01 L11_fnorm:5.2133e-01 L12_fnorm:5.0765e-01 L1_l1linf:5.1161e-01 L2_l1linf:5.3764e-01 L3_l1linf:6.1387e-01 L4_l1linf:5.3507e-01 L5_l1linf:4.7040e-01 L6_l1linf:5.4784e-01 L7_l1linf:5.3943e-01 L8_l1linf:4.9833e-01 L9_l1linf:5.1869e-01 L10_l1linf:5.3353e-01 L11_l1linf:5.2611e-01 L12_l1linf:5.6745e-01 L1_spectral:7.7078e-02 L2_spectral:8.2164e-02 L3_spectral:7.7359e-02 L4_spectral:7.5699e-02 L5_spectral:5.8368e-02 L6_spectral:7.1541e-02 L7_spectral:6.4356e-02 L8_spectral:5.6348e-02 L9_spectral:4.7034e-02 L10_spectral:4.7324e-02 L11_spectral:4.9612e-02 L12_spectral:7.2375e-02 v_norm:2.1353e+00 cos_v_-g_hvp:8.5757e-02 g_hvp_norm:3.4247e-01 cos_v_-g_t:1.0153e-01 g_t_norm:2.9050e-01 hv_norm:3.8853e-01 cos_v_hv:3.9962e-02 hg_norm:3.3032e+00 cos_g_hg:6.6294e-01 v_par:1.3892e-02 v_perp:2.1353e+00 L1_cos_v_neg_g:1.5792e-01 L1_v_norm:4.4663e-01 L2_cos_v_neg_g:7.9182e-02 L2_v_norm:4.7443e-01 L3_cos_v_neg_g:6.8473e-02 L3_v_norm:4.7121e-01 L4_cos_v_neg_g:7.6192e-02 L4_v_norm:4.6331e-01 L5_cos_v_neg_g:5.5107e-02 L5_v_norm:4.3647e-01 L6_cos_v_neg_g:6.9351e-02 L6_v_norm:4.7310e-01 L7_cos_v_neg_g:8.8018e-02 L7_v_norm:4.8602e-01 L8_cos_v_neg_g:8.8594e-02 L8_v_norm:4.9306e-01 L9_cos_v_neg_g:8.4536e-02 L9_v_norm:5.0138e-01 L10_cos_v_neg_g:9.9816e-02 L10_v_norm:5.1863e-01 L11_cos_v_neg_g:1.1608e-01 L11_v_norm:5.2133e-01 L12_cos_v_neg_g:1.5858e-01 L12_v_norm:5.0765e-01 +step:3500 train loss:3.657841 +step:3501 train loss:3.679561 +step:3502 train loss:3.637475 +step:3503 train loss:3.660922 +step:3504 train loss:3.673054 +step:3505 train loss:3.657723 +step:3506 train loss:3.695228 +step:3507 train loss:3.659222 +step:3508 train loss:3.657660 +step:3509 train loss:3.705585 +step:3510 train loss:3.629970 +step:3511 train loss:3.715985 +step:3512 train loss:3.748195 +step:3513 train loss:3.727425 +step:3514 train loss:3.783442 +step:3515 train loss:3.641212 +step:3516 train loss:3.673871 +step:3517 train loss:3.662762 +step:3518 train loss:3.688130 +step:3519 train loss:3.676600 +step:3520 train loss:3.642720 +step:3521 train loss:3.703101 +step:3522 train loss:3.667150 +step:3523 train loss:3.632789 +step:3524 train loss:3.664186 +step:3525 train loss:3.635141 +step:3526 train loss:3.656414 +step:3527 train loss:3.708316 +step:3528 train loss:3.688229 +step:3529 train loss:3.639737 +step:3530 train loss:3.610672 +step:3531 train loss:3.701982 +step:3532 train loss:3.650728 +step:3533 train loss:3.641073 +step:3534 train loss:3.663451 +step:3535 train loss:3.676261 +step:3536 train loss:3.667288 +step:3537 train loss:3.697217 +step:3538 train loss:3.649589 +step:3539 train loss:3.673773 +step:3540 train loss:3.663567 +step:3541 train loss:3.692507 +step:3542 train loss:3.675317 +step:3543 train loss:3.690570 +step:3544 train loss:3.604542 +step:3545 train loss:3.661503 +step:3546 train loss:3.606556 +step:3547 train loss:3.614559 +step:3548 train loss:3.647664 +step:3549 train loss:3.651659 +step:3550 train loss:3.628204 +step:3551 train loss:3.704715 +step:3552 train loss:3.692984 +step:3553 train loss:3.674796 +step:3554 train loss:3.751552 +step:3555 train loss:3.645624 +step:3556 train loss:3.631083 +step:3557 train loss:3.662646 +step:3558 train loss:3.646869 +step:3559 train loss:3.696898 +step:3560 train loss:3.758580 +step:3561 train loss:3.675134 +step:3562 train loss:3.674229 +step:3563 train loss:3.769969 +step:3564 train loss:3.615820 +step:3565 train loss:3.638261 +step:3566 train loss:3.670206 +step:3567 train loss:3.723763 +step:3568 train loss:3.661521 +step:3569 train loss:3.669220 +step:3570 train loss:3.700591 +step:3571 train loss:3.670900 +step:3572 train loss:3.746312 +step:3573 train loss:3.682177 +step:3574 train loss:3.688214 +step:3575 train loss:3.639581 +step:3576 train loss:3.658089 +step:3577 train loss:3.660654 +step:3578 train loss:3.687473 +step:3579 train loss:3.592795 +step:3580 train loss:3.647671 +step:3581 train loss:3.640268 +step:3582 train loss:3.601863 +step:3583 train loss:3.656342 +step:3584 train loss:3.647596 +step:3585 train loss:3.676716 +step:3586 train loss:3.638645 +step:3587 train loss:3.645473 +step:3588 train loss:3.627160 +step:3589 train loss:3.652354 +step:3590 train loss:3.684016 +step:3591 train loss:3.672218 +step:3592 train loss:3.711558 +step:3593 train loss:3.677881 +step:3594 train loss:3.618518 +step:3595 train loss:3.728979 +step:3596 train loss:3.682974 +step:3597 train loss:3.610316 +step:3598 train loss:3.697379 +step:3599 train loss:3.648076 +step:3600 train loss:3.635404 +step:3601 train loss:3.647766 +step:3602 train loss:3.635337 +step:3603 train loss:3.593687 +step:3604 train loss:3.721991 +step:3605 train loss:3.640609 +step:3606 train loss:3.709221 +step:3607 train loss:3.734344 +step:3608 train loss:3.673020 +step:3609 train loss:3.802180 +step:3610 train loss:3.694235 +step:3611 train loss:3.657253 +step:3612 train loss:3.643338 +step:3613 train loss:3.616760 +step:3614 train loss:3.634925 +step:3615 train loss:3.661985 +step:3616 train loss:3.652588 +step:3617 train loss:3.596307 +step:3618 train loss:3.650053 +step:3619 train loss:3.636461 +step:3620 train loss:3.661922 +step:3621 train loss:3.737876 +step:3622 train loss:3.615054 +step:3623 train loss:3.569233 +step:3624 train loss:3.697467 +step:3625 train loss:3.665991 +step:3626 train loss:3.579360 +step:3627 train loss:3.676066 +step:3628 train loss:3.674644 +step:3629 train loss:3.646617 +step:3630 train loss:3.642438 +step:3631 train loss:3.644755 +step:3632 train loss:3.624700 +step:3633 train loss:3.667956 +step:3634 train loss:3.639357 +step:3635 train loss:3.693868 +step:3636 train loss:3.690482 +step:3637 train loss:3.772799 +step:3638 train loss:3.598566 +step:3639 train loss:3.669768 +step:3640 train loss:3.661931 +step:3641 train loss:3.650147 +step:3642 train loss:3.646572 +step:3643 train loss:3.645223 +step:3644 train loss:3.656965 +step:3645 train loss:3.656263 +step:3646 train loss:3.648343 +step:3647 train loss:3.633398 +step:3648 train loss:3.687244 +step:3649 train loss:3.704584 +step:3650 train loss:3.623355 +step:3651 train loss:3.694203 +step:3652 train loss:3.638646 +step:3653 train loss:3.641332 +step:3654 train loss:3.627827 +step:3655 train loss:3.609047 +step:3656 train loss:3.656811 +step:3657 train loss:3.625967 +step:3658 train loss:3.721096 +step:3659 train loss:3.655555 +step:3660 train loss:3.609206 +step:3661 train loss:3.635967 +step:3662 train loss:3.661252 +step:3663 train loss:3.684535 +step:3664 train loss:3.649173 +step:3665 train loss:3.633096 +step:3666 train loss:3.632158 +step:3667 train loss:3.637765 +step:3668 train loss:3.631778 +step:3669 train loss:3.690296 +step:3670 train loss:3.663019 +step:3671 train loss:3.674649 +step:3672 train loss:3.681056 +step:3673 train loss:3.630879 +step:3674 train loss:3.639812 +step:3675 train loss:3.659679 +step:3676 train loss:3.609864 +step:3677 train loss:3.593715 +step:3678 train loss:3.700009 +step:3679 train loss:3.669472 +step:3680 train loss:3.663074 +step:3681 train loss:3.704036 +step:3682 train loss:3.597354 +step:3683 train loss:3.600880 +step:3684 train loss:3.649780 +step:3685 train loss:3.673608 +step:3686 train loss:3.647568 +step:3687 train loss:4.032678 +step:3688 train loss:3.697711 +step:3689 train loss:3.650631 +step:3690 train loss:3.645937 +step:3691 train loss:3.688994 +step:3692 train loss:3.677302 +step:3693 train loss:3.632110 +step:3694 train loss:3.654876 +step:3695 train loss:3.601071 +step:3696 train loss:3.596972 +step:3697 train loss:3.657881 +step:3698 train loss:3.632599 +step:3699 train loss:3.673906 +step:3700 train loss:3.709471 +step:3701 train loss:3.629605 +step:3702 train loss:3.627595 +step:3703 train loss:3.650232 +step:3704 train loss:3.645756 +step:3705 train loss:3.594546 +step:3706 train loss:3.618460 +step:3707 train loss:3.645023 +step:3708 train loss:3.634650 +step:3709 train loss:3.574440 +step:3710 train loss:3.634494 +step:3711 train loss:3.620148 +step:3712 train loss:3.628527 +step:3713 train loss:3.623221 +step:3714 train loss:3.589072 +step:3715 train loss:3.610137 +step:3716 train loss:3.657815 +step:3717 train loss:3.635686 +step:3718 train loss:3.683669 +step:3719 train loss:3.591500 +step:3720 train loss:3.708024 +step:3721 train loss:3.717355 +step:3722 train loss:3.619227 +step:3723 train loss:3.605092 +step:3724 train loss:3.782691 +step:3725 train loss:3.669230 +step:3726 train loss:3.689754 +step:3727 train loss:3.647772 +step:3728 train loss:3.655559 +step:3729 train loss:3.758415 +step:3730 train loss:3.965283 +step:3731 train loss:3.680054 +step:3732 train loss:3.700360 +step:3733 train loss:3.814440 +step:3734 train loss:3.759569 +step:3735 train loss:3.743374 +step:3736 train loss:3.740876 +step:3737 train loss:3.703033 +step:3738 train loss:3.735189 +step:3739 train loss:3.714925 +step:3740 train loss:3.716197 +step:3741 train loss:3.778033 +step:3742 train loss:3.694101 +step:3743 train loss:3.724105 +step:3744 train loss:3.634533 +step:3745 train loss:3.673743 +step:3746 train loss:3.696335 +step:3747 train loss:3.711319 +step:3748 train loss:3.774638 +step:3749 train loss:3.666843 +step:3750 validation loss:3.642446 +step:3750 train loss:3.653258 +step:3751 train loss:3.708356 +step:3752 train loss:3.691662 +step:3753 train loss:3.627366 +step:3754 train loss:3.678915 +step:3755 train loss:3.638489 +step:3756 train loss:3.653259 +step:3757 train loss:3.616349 +step:3758 train loss:3.593521 +step:3759 train loss:3.699854 +step:3760 train loss:3.707982 +step:3761 train loss:3.627067 +step:3762 train loss:3.684952 +step:3763 train loss:3.619100 +step:3764 train loss:3.647309 +step:3765 train loss:3.671628 +step:3766 train loss:3.637247 +step:3767 train loss:3.651895 +step:3768 train loss:3.655965 +step:3769 train loss:3.700245 +step:3770 train loss:3.686343 +step:3771 train loss:3.580174 +step:3772 train loss:3.696817 +step:3773 train loss:3.628344 +step:3774 train loss:3.697061 +step:3775 train loss:3.696469 +step:3776 train loss:3.660193 +step:3777 train loss:3.747556 +step:3778 train loss:3.648433 +step:3779 train loss:3.698335 +step:3780 train loss:3.663403 +step:3781 train loss:3.576724 +step:3782 train loss:3.699805 +step:3783 train loss:3.667348 +step:3784 train loss:3.614041 +step:3785 train loss:3.701597 +step:3786 train loss:3.658322 +step:3787 train loss:3.672890 +step:3788 train loss:3.652663 +step:3789 train loss:3.675591 +step:3790 train loss:3.662890 +step:3791 train loss:3.614563 +step:3792 train loss:3.699738 +step:3793 train loss:3.622713 +step:3794 train loss:3.647194 +step:3795 train loss:3.640377 +step:3796 train loss:3.666480 +step:3797 train loss:3.665735 +step:3798 train loss:3.620559 +step:3799 train loss:3.636745 +step:3800 train loss:3.665301 +step:3801 train loss:3.656515 +step:3802 train loss:3.655083 +step:3803 train loss:3.605031 +step:3804 train loss:3.682621 +step:3805 train loss:3.571510 +step:3806 train loss:3.664096 +step:3807 train loss:3.643362 +step:3808 train loss:3.621142 +step:3809 train loss:3.685495 +step:3810 train loss:3.694431 +step:3811 train loss:3.646807 +step:3812 train loss:3.660788 +step:3813 train loss:3.656756 +step:3814 train loss:3.620673 +step:3815 train loss:3.654179 +step:3816 train loss:3.631463 +step:3817 train loss:3.648449 +step:3818 train loss:3.602994 +step:3819 train loss:3.575731 +step:3820 train loss:3.656559 +step:3821 train loss:3.737140 +step:3822 train loss:3.700323 +step:3823 train loss:3.623598 +step:3824 train loss:3.662831 +step:3825 train loss:3.669250 +step:3826 train loss:3.616479 +step:3827 train loss:3.620482 +step:3828 train loss:3.640646 +step:3829 train loss:3.619506 +step:3830 train loss:3.685540 +step:3831 train loss:3.621741 +step:3832 train loss:3.617405 +step:3833 train loss:3.628257 +step:3834 train loss:3.611477 +step:3835 train loss:3.569906 +step:3836 train loss:3.670587 +step:3837 train loss:3.605886 +step:3838 train loss:3.605631 +step:3839 train loss:3.625008 +step:3840 train loss:3.655273 +step:3841 train loss:3.608122 +step:3842 train loss:3.685258 +step:3843 train loss:3.664248 +step:3844 train loss:3.614548 +step:3845 train loss:3.587624 +step:3846 train loss:3.592341 +step:3847 train loss:3.686825 +step:3848 train loss:3.708730 +step:3849 train loss:3.616691 +step:3850 train loss:3.625003 +step:3851 train loss:3.605037 +step:3852 train loss:3.665136 +step:3853 train loss:3.631099 +step:3854 train loss:3.634245 +step:3855 train loss:3.608155 +step:3856 train loss:3.664103 +step:3857 train loss:3.696919 +step:3858 train loss:3.606122 +step:3859 train loss:3.667051 +step:3860 train loss:3.631319 +step:3861 train loss:3.641872 +step:3862 train loss:3.606869 +step:3863 train loss:3.671640 +step:3864 train loss:3.627857 +step:3865 train loss:3.626407 +step:3866 train loss:3.624924 +step:3867 train loss:3.648962 +step:3868 train loss:3.726122 +step:3869 train loss:3.622114 +step:3870 train loss:3.657138 +step:3871 train loss:3.605597 +step:3872 train loss:3.685572 +step:3873 train loss:3.609311 +step:3874 train loss:3.596464 +step:3875 train loss:3.680609 +step:3876 train loss:3.613810 +step:3877 train loss:3.634863 +step:3878 train loss:3.638729 +step:3879 train loss:3.650367 +step:3880 train loss:3.631974 +step:3881 train loss:3.664598 +step:3882 train loss:3.610859 +step:3883 train loss:3.623376 +step:3884 train loss:3.605829 +step:3885 train loss:3.708235 +step:3886 train loss:3.709938 +step:3887 train loss:3.630505 +step:3888 train loss:3.572119 +step:3889 train loss:3.634183 +step:3890 train loss:3.558444 +step:3891 train loss:3.618635 +step:3892 train loss:3.659205 +step:3893 train loss:3.622536 +step:3894 train loss:3.636927 +step:3895 train loss:3.616433 +step:3896 train loss:3.604596 +step:3897 train loss:3.653071 +step:3898 train loss:3.713868 +step:3899 train loss:3.659039 +step:3900 train loss:3.645997 +step:3901 train loss:3.677935 +step:3902 train loss:3.616384 +step:3903 train loss:3.615292 +step:3904 train loss:3.622873 +step:3905 train loss:3.652491 +step:3906 train loss:3.621851 +step:3907 train loss:3.656549 +step:3908 train loss:3.619674 +step:3909 train loss:3.682237 +step:3910 train loss:3.671379 +step:3911 train loss:3.663745 +step:3912 train loss:3.700402 +step:3913 train loss:3.732581 +step:3914 train loss:3.576134 +step:3915 train loss:3.675033 +step:3916 train loss:3.640044 +step:3917 train loss:3.627660 +step:3918 train loss:3.649386 +step:3919 train loss:3.638420 +step:3920 train loss:3.724799 +step:3921 train loss:3.644609 +step:3922 train loss:3.688822 +step:3923 train loss:3.592381 +step:3924 train loss:3.709385 +step:3925 train loss:3.657297 +step:3926 train loss:3.659017 +step:3927 train loss:3.632680 +step:3928 train loss:3.587685 +step:3929 train loss:3.693567 +step:3930 train loss:3.738879 +step:3931 train loss:3.666316 +step:3932 train loss:3.702812 +step:3933 train loss:3.673084 +step:3934 train loss:3.699461 +step:3935 train loss:3.632020 +step:3936 train loss:3.572384 +step:3937 train loss:3.547274 +step:3938 train loss:3.681989 +step:3939 train loss:3.658463 +step:3940 train loss:3.647258 +step:3941 train loss:3.619813 +step:3942 train loss:3.685104 +step:3943 train loss:3.697961 +step:3944 train loss:3.652656 +step:3945 train loss:3.668344 +step:3946 train loss:3.634275 +step:3947 train loss:3.655093 +step:3948 train loss:3.643297 +step:3949 train loss:3.654615 +step:3950 train loss:3.653083 +step:3951 train loss:3.637007 +step:3952 train loss:3.742770 +step:3953 train loss:3.652679 +step:3954 train loss:3.686469 +step:3955 train loss:3.642604 +step:3956 train loss:3.679027 +step:3957 train loss:3.621837 +step:3958 train loss:3.657247 +step:3959 train loss:3.602511 +step:3960 train loss:3.670115 +step:3961 train loss:3.626687 +step:3962 train loss:3.634087 +step:3963 train loss:3.625762 +step:3964 train loss:3.612416 +step:3965 train loss:3.616746 +step:3966 train loss:3.677095 +step:3967 train loss:3.601507 +step:3968 train loss:3.647842 +step:3969 train loss:3.615377 +step:3970 train loss:3.651332 +step:3971 train loss:3.648867 +step:3972 train loss:3.678146 +step:3973 train loss:3.608957 +step:3974 train loss:3.672586 +step:3975 train loss:3.596160 +step:3976 train loss:3.670475 +step:3977 train loss:3.690251 +step:3978 train loss:3.635685 +step:3979 train loss:3.597565 +step:3980 train loss:3.646940 +step:3981 train loss:3.622174 +step:3982 train loss:3.631010 +step:3983 train loss:3.705268 +step:3984 train loss:3.625305 +step:3985 train loss:3.661018 +step:3986 train loss:3.652563 +step:3987 train loss:3.618117 +step:3988 train loss:3.632578 +step:3989 train loss:3.619225 +step:3990 train loss:3.644342 +step:3991 train loss:3.645321 +step:3992 train loss:3.655255 +step:3993 train loss:3.740672 +step:3994 train loss:3.579548 +step:3995 train loss:3.650868 +step:3996 train loss:3.693146 +step:3997 train loss:3.638364 +step:3998 train loss:3.730888 +step:3999 train loss:3.668009 +step:4000 validation loss:3.592647 total_sharp:7.6884e-03 L1_sharp:8.9915e-03 L2_sharp:6.7309e-04 L3_sharp:1.8112e-03 L4_sharp:1.4849e-03 L5_sharp:1.2990e-03 L6_sharp:1.3486e-03 L7_sharp:1.8363e-03 L8_sharp:2.7392e-03 L9_sharp:2.3790e-03 L10_sharp:1.3069e-03 L11_sharp:1.2350e-03 L12_sharp:3.3961e-03 total_fnorm:2.1891e+00 total_l1_linf:1.9409e+04 total_spectral:2.1891e+00 L1_fnorm:4.9208e-01 L2_fnorm:5.0929e-01 L3_fnorm:4.8900e-01 L4_fnorm:4.8631e-01 L5_fnorm:4.7708e-01 L6_fnorm:4.9203e-01 L7_fnorm:4.9558e-01 L8_fnorm:5.0096e-01 L9_fnorm:5.0532e-01 L10_fnorm:5.2227e-01 L11_fnorm:5.2713e-01 L12_fnorm:5.1257e-01 L1_l1linf:5.7256e-01 L2_l1linf:5.5638e-01 L3_l1linf:6.2358e-01 L4_l1linf:5.8845e-01 L5_l1linf:5.2653e-01 L6_l1linf:5.7843e-01 L7_l1linf:5.5703e-01 L8_l1linf:5.6828e-01 L9_l1linf:6.0149e-01 L10_l1linf:5.4662e-01 L11_l1linf:5.3136e-01 L12_l1linf:5.4702e-01 L1_spectral:9.1210e-02 L2_spectral:8.2660e-02 L3_spectral:7.9895e-02 L4_spectral:7.8307e-02 L5_spectral:6.3984e-02 L6_spectral:7.1676e-02 L7_spectral:6.8080e-02 L8_spectral:6.7048e-02 L9_spectral:5.6948e-02 L10_spectral:4.5166e-02 L11_spectral:4.7368e-02 L12_spectral:8.1835e-02 v_norm:2.1891e+00 cos_v_-g_hvp:7.9907e-02 g_hvp_norm:3.5475e-01 cos_v_-g_t:9.4512e-02 g_t_norm:3.0149e-01 hv_norm:4.3466e-01 cos_v_hv:3.8721e-02 hg_norm:3.7312e+00 cos_g_hg:6.8192e-01 v_par:1.1258e-02 v_perp:2.1891e+00 L1_cos_v_neg_g:1.4880e-01 L1_v_norm:4.9208e-01 L2_cos_v_neg_g:7.6066e-02 L2_v_norm:5.0929e-01 L3_cos_v_neg_g:6.2197e-02 L3_v_norm:4.8900e-01 L4_cos_v_neg_g:7.0736e-02 L4_v_norm:4.8631e-01 L5_cos_v_neg_g:5.1161e-02 L5_v_norm:4.7708e-01 L6_cos_v_neg_g:7.1331e-02 L6_v_norm:4.9203e-01 L7_cos_v_neg_g:8.3531e-02 L7_v_norm:4.9558e-01 L8_cos_v_neg_g:7.9926e-02 L8_v_norm:5.0096e-01 L9_cos_v_neg_g:7.8567e-02 L9_v_norm:5.0532e-01 L10_cos_v_neg_g:9.0606e-02 L10_v_norm:5.2227e-01 L11_cos_v_neg_g:9.8653e-02 L11_v_norm:5.2713e-01 L12_cos_v_neg_g:1.4165e-01 L12_v_norm:5.1257e-01 +step:4000 train loss:3.662617 +step:4001 train loss:3.791881 +step:4002 train loss:3.653516 +step:4003 train loss:3.674924 +step:4004 train loss:3.684093 +step:4005 train loss:3.651041 +step:4006 train loss:3.651390 +step:4007 train loss:3.668551 +step:4008 train loss:3.636841 +step:4009 train loss:3.684521 +step:4010 train loss:3.714596 +step:4011 train loss:3.670275 +step:4012 train loss:3.611792 +step:4013 train loss:3.624730 +step:4014 train loss:3.649175 +step:4015 train loss:3.627521 +step:4016 train loss:3.631722 +step:4017 train loss:3.647369 +step:4018 train loss:3.693801 +step:4019 train loss:3.612954 +step:4020 train loss:3.654047 +step:4021 train loss:3.614489 +step:4022 train loss:3.673933 +step:4023 train loss:3.587677 +step:4024 train loss:3.628219 +step:4025 train loss:3.636462 +step:4026 train loss:3.660651 +step:4027 train loss:3.612010 +step:4028 train loss:3.659576 +step:4029 train loss:3.686219 +step:4030 train loss:3.681908 +step:4031 train loss:3.755432 +step:4032 train loss:3.611295 +step:4033 train loss:3.707440 +step:4034 train loss:3.652843 +step:4035 train loss:3.686883 +step:4036 train loss:3.610382 +step:4037 train loss:3.630349 +step:4038 train loss:3.617960 +step:4039 train loss:3.675394 +step:4040 train loss:3.624758 +step:4041 train loss:3.614478 +step:4042 train loss:3.617429 +step:4043 train loss:3.630756 +step:4044 train loss:3.668559 +step:4045 train loss:3.664883 +step:4046 train loss:3.692075 +step:4047 train loss:3.639320 +step:4048 train loss:3.693482 +step:4049 train loss:3.696981 +step:4050 train loss:3.646400 +step:4051 train loss:3.675329 +step:4052 train loss:3.722225 +step:4053 train loss:3.645367 +step:4054 train loss:3.645405 +step:4055 train loss:3.645064 +step:4056 train loss:3.619541 +step:4057 train loss:3.670322 +step:4058 train loss:3.695782 +step:4059 train loss:3.648269 +step:4060 train loss:3.667828 +step:4061 train loss:3.656772 +step:4062 train loss:3.635345 +step:4063 train loss:3.665146 +step:4064 train loss:3.649339 +step:4065 train loss:3.686810 +step:4066 train loss:3.632071 +step:4067 train loss:3.830169 +step:4068 train loss:3.569771 +step:4069 train loss:3.670746 +step:4070 train loss:3.655734 +step:4071 train loss:3.663242 +step:4072 train loss:3.637454 +step:4073 train loss:3.680532 +step:4074 train loss:3.604483 +step:4075 train loss:3.676199 +step:4076 train loss:3.646565 +step:4077 train loss:3.648460 +step:4078 train loss:3.615124 +step:4079 train loss:3.655869 +step:4080 train loss:3.793394 +step:4081 train loss:3.756262 +step:4082 train loss:3.756590 +step:4083 train loss:3.641771 +step:4084 train loss:3.660122 +step:4085 train loss:3.642329 +step:4086 train loss:3.605996 +step:4087 train loss:3.582839 +step:4088 train loss:3.624817 +step:4089 train loss:3.634549 +step:4090 train loss:3.654578 +step:4091 train loss:3.576536 +step:4092 train loss:3.633214 +step:4093 train loss:3.604940 +step:4094 train loss:3.622276 +step:4095 train loss:3.700013 +step:4096 train loss:3.700033 +step:4097 train loss:3.638321 +step:4098 train loss:3.635643 +step:4099 train loss:3.668806 +step:4100 train loss:3.683667 +step:4101 train loss:3.684194 +step:4102 train loss:3.574115 +step:4103 train loss:3.620815 +step:4104 train loss:3.574255 +step:4105 train loss:3.661582 +step:4106 train loss:3.597601 +step:4107 train loss:3.640443 +step:4108 train loss:3.576899 +step:4109 train loss:3.713604 +step:4110 train loss:3.605835 +step:4111 train loss:3.625913 +step:4112 train loss:3.751954 +step:4113 train loss:3.543638 +step:4114 train loss:3.652120 +step:4115 train loss:3.590293 +step:4116 train loss:3.701475 +step:4117 train loss:3.632267 +step:4118 train loss:3.603923 +step:4119 train loss:3.674582 +step:4120 train loss:3.600783 +step:4121 train loss:3.583314 +step:4122 train loss:3.585557 +step:4123 train loss:3.624695 +step:4124 train loss:3.578916 +step:4125 train loss:3.591154 +step:4126 train loss:3.707194 +step:4127 train loss:3.578506 +step:4128 train loss:3.605357 +step:4129 train loss:3.606145 +step:4130 train loss:3.641090 +step:4131 train loss:3.630872 +step:4132 train loss:3.640256 +step:4133 train loss:3.613940 +step:4134 train loss:3.610326 +step:4135 train loss:3.669417 +step:4136 train loss:3.599727 +step:4137 train loss:3.591127 +step:4138 train loss:3.623249 +step:4139 train loss:3.566386 +step:4140 train loss:3.603510 +step:4141 train loss:3.647271 +step:4142 train loss:3.558943 +step:4143 train loss:3.667030 +step:4144 train loss:3.581100 +step:4145 train loss:3.613862 +step:4146 train loss:3.653997 +step:4147 train loss:3.604947 +step:4148 train loss:3.634757 +step:4149 train loss:3.592160 +step:4150 train loss:3.648808 +step:4151 train loss:3.619709 +step:4152 train loss:3.591758 +step:4153 train loss:3.596499 +step:4154 train loss:3.652803 +step:4155 train loss:3.766921 +step:4156 train loss:3.646581 +step:4157 train loss:3.602364 +step:4158 train loss:3.585232 +step:4159 train loss:3.599104 +step:4160 train loss:3.634385 +step:4161 train loss:3.654057 +step:4162 train loss:3.630825 +step:4163 train loss:3.617149 +step:4164 train loss:3.637866 +step:4165 train loss:3.597604 +step:4166 train loss:3.692244 +step:4167 train loss:3.660108 +step:4168 train loss:3.636265 +step:4169 train loss:3.603954 +step:4170 train loss:3.580426 +step:4171 train loss:3.576384 +step:4172 train loss:3.582471 +step:4173 train loss:3.619891 +step:4174 train loss:3.594234 +step:4175 train loss:3.585436 +step:4176 train loss:3.689841 +step:4177 train loss:3.582727 +step:4178 train loss:3.638624 +step:4179 train loss:3.611278 +step:4180 train loss:3.584896 +step:4181 train loss:3.626441 +step:4182 train loss:3.546495 +step:4183 train loss:3.569139 +step:4184 train loss:3.590030 +step:4185 train loss:3.626858 +step:4186 train loss:3.648827 +step:4187 train loss:3.593474 +step:4188 train loss:3.603837 +step:4189 train loss:3.675541 +step:4190 train loss:3.647806 +step:4191 train loss:3.576789 +step:4192 train loss:3.592518 +step:4193 train loss:3.590344 +step:4194 train loss:3.537523 +step:4195 train loss:3.641867 +step:4196 train loss:3.667528 +step:4197 train loss:3.549068 +step:4198 train loss:3.619060 +step:4199 train loss:3.535247 +step:4200 train loss:3.636503 +step:4201 train loss:3.605314 +step:4202 train loss:3.621711 +step:4203 train loss:3.629667 +step:4204 train loss:3.591639 +step:4205 train loss:3.631203 +step:4206 train loss:3.602672 +step:4207 train loss:3.617938 +step:4208 train loss:3.610196 +step:4209 train loss:3.604079 +step:4210 train loss:3.651483 +step:4211 train loss:3.693947 +step:4212 train loss:3.707863 +step:4213 train loss:3.563149 +step:4214 train loss:3.611156 +step:4215 train loss:3.577614 +step:4216 train loss:3.568624 +step:4217 train loss:3.552961 +step:4218 train loss:3.577435 +step:4219 train loss:3.556893 +step:4220 train loss:3.609008 +step:4221 train loss:3.608896 +step:4222 train loss:3.618678 +step:4223 train loss:3.584906 +step:4224 train loss:3.599411 +step:4225 train loss:3.573768 +step:4226 train loss:3.607075 +step:4227 train loss:3.634294 +step:4228 train loss:3.574240 +step:4229 train loss:3.582483 +step:4230 train loss:3.539342 +step:4231 train loss:3.589071 +step:4232 train loss:3.566702 +step:4233 train loss:3.622988 +step:4234 train loss:3.584120 +step:4235 train loss:3.609539 +step:4236 train loss:3.650139 +step:4237 train loss:3.614779 +step:4238 train loss:3.588235 +step:4239 train loss:3.656326 +step:4240 train loss:3.566343 +step:4241 train loss:3.659919 +step:4242 train loss:3.624392 +step:4243 train loss:3.587655 +step:4244 train loss:3.589247 +step:4245 train loss:3.598530 +step:4246 train loss:3.619633 +step:4247 train loss:3.630854 +step:4248 train loss:3.664125 +step:4249 train loss:3.590026 +step:4250 validation loss:3.582737 +step:4250 train loss:3.588815 +step:4251 train loss:3.589983 +step:4252 train loss:3.613160 +step:4253 train loss:3.607517 +step:4254 train loss:3.662434 +step:4255 train loss:3.614425 +step:4256 train loss:3.609234 +step:4257 train loss:3.599947 +step:4258 train loss:3.653199 +step:4259 train loss:3.646685 +step:4260 train loss:3.601807 +step:4261 train loss:3.623086 +step:4262 train loss:3.597424 +step:4263 train loss:3.605535 +step:4264 train loss:3.590822 +step:4265 train loss:3.577929 +step:4266 train loss:3.609364 +step:4267 train loss:3.545859 +step:4268 train loss:3.601481 +step:4269 train loss:3.542084 +step:4270 train loss:3.627911 +step:4271 train loss:3.655574 +step:4272 train loss:3.611230 +step:4273 train loss:3.603771 +step:4274 train loss:3.554053 +step:4275 train loss:3.656073 +step:4276 train loss:3.610184 +step:4277 train loss:3.680354 +step:4278 train loss:3.586562 +step:4279 train loss:3.639534 +step:4280 train loss:3.714828 +step:4281 train loss:3.736068 +step:4282 train loss:3.591976 +step:4283 train loss:3.612553 +step:4284 train loss:3.646671 +step:4285 train loss:3.646520 +step:4286 train loss:3.579422 +step:4287 train loss:3.616187 +step:4288 train loss:3.596823 +step:4289 train loss:3.695551 +step:4290 train loss:3.562963 +step:4291 train loss:3.582813 +step:4292 train loss:3.570971 +step:4293 train loss:3.591218 +step:4294 train loss:3.601970 +step:4295 train loss:3.603387 +step:4296 train loss:3.558015 +step:4297 train loss:3.611681 +step:4298 train loss:3.617996 +step:4299 train loss:3.590523 +step:4300 train loss:3.660453 +step:4301 train loss:3.683476 +step:4302 train loss:3.676056 +step:4303 train loss:3.620050 +step:4304 train loss:3.597300 +step:4305 train loss:3.675280 +step:4306 train loss:3.607300 +step:4307 train loss:3.641345 +step:4308 train loss:3.719172 +step:4309 train loss:3.646607 +step:4310 train loss:3.612661 +step:4311 train loss:3.641794 +step:4312 train loss:3.645603 +step:4313 train loss:3.649024 +step:4314 train loss:3.675712 +step:4315 train loss:3.731352 +step:4316 train loss:3.693119 +step:4317 train loss:3.655067 +step:4318 train loss:3.691382 +step:4319 train loss:3.690437 +step:4320 train loss:3.668247 +step:4321 train loss:3.762750 +step:4322 train loss:3.620899 +step:4323 train loss:3.688560 +step:4324 train loss:3.725229 +step:4325 train loss:3.659811 +step:4326 train loss:3.658788 +step:4327 train loss:3.615448 +step:4328 train loss:3.601846 +step:4329 train loss:3.612405 +step:4330 train loss:3.655318 +step:4331 train loss:3.600894 +step:4332 train loss:3.527381 +step:4333 train loss:3.618899 +step:4334 train loss:3.652751 +step:4335 train loss:3.606535 +step:4336 train loss:3.669978 +step:4337 train loss:3.621172 +step:4338 train loss:3.649744 +step:4339 train loss:3.599387 +step:4340 train loss:3.625987 +step:4341 train loss:3.618267 +step:4342 train loss:3.601660 +step:4343 train loss:3.654852 +step:4344 train loss:3.697695 +step:4345 train loss:3.760022 +step:4346 train loss:3.638077 +step:4347 train loss:3.609052 +step:4348 train loss:3.654540 +step:4349 train loss:3.630574 +step:4350 train loss:3.654541 +step:4351 train loss:3.614578 +step:4352 train loss:3.678398 +step:4353 train loss:3.676604 +step:4354 train loss:3.612181 +step:4355 train loss:3.606082 +step:4356 train loss:3.635583 +step:4357 train loss:3.582385 +step:4358 train loss:3.638247 +step:4359 train loss:3.668294 +step:4360 train loss:3.650708 +step:4361 train loss:3.618339 +step:4362 train loss:3.608592 +step:4363 train loss:3.624705 +step:4364 train loss:3.667841 +step:4365 train loss:3.667749 +step:4366 train loss:3.594130 +step:4367 train loss:3.615182 +step:4368 train loss:3.629413 +step:4369 train loss:3.550624 +step:4370 train loss:3.719679 +step:4371 train loss:3.778987 +step:4372 train loss:3.609496 +step:4373 train loss:3.632909 +step:4374 train loss:3.663407 +step:4375 train loss:3.645546 +step:4376 train loss:3.537699 +step:4377 train loss:3.613215 +step:4378 train loss:3.581208 +step:4379 train loss:3.625791 +step:4380 train loss:3.668762 +step:4381 train loss:3.625167 +step:4382 train loss:3.639194 +step:4383 train loss:3.628639 +step:4384 train loss:3.624833 +step:4385 train loss:3.643780 +step:4386 train loss:3.620571 +step:4387 train loss:3.592431 +step:4388 train loss:3.612650 +step:4389 train loss:3.654187 +step:4390 train loss:3.656424 +step:4391 train loss:3.619902 +step:4392 train loss:3.634661 +step:4393 train loss:3.736297 +step:4394 train loss:3.620942 +step:4395 train loss:3.641471 +step:4396 train loss:3.631525 +step:4397 train loss:3.604504 +step:4398 train loss:3.637146 +step:4399 train loss:3.710304 +step:4400 train loss:3.675117 +step:4401 train loss:3.596427 +step:4402 train loss:3.620663 +step:4403 train loss:3.635823 +step:4404 train loss:3.704925 +step:4405 train loss:3.658005 +step:4406 train loss:3.699028 +step:4407 train loss:3.624115 +step:4408 train loss:3.562323 +step:4409 train loss:3.663883 +step:4410 train loss:3.720375 +step:4411 train loss:3.629821 +step:4412 train loss:3.638640 +step:4413 train loss:3.594638 +step:4414 train loss:3.631698 +step:4415 train loss:3.633627 +step:4416 train loss:3.746584 +step:4417 train loss:3.620902 +step:4418 train loss:3.528883 +step:4419 train loss:3.714338 +step:4420 train loss:3.628487 +step:4421 train loss:3.644766 +step:4422 train loss:3.634846 +step:4423 train loss:3.612635 +step:4424 train loss:3.627404 +step:4425 train loss:3.629951 +step:4426 train loss:3.667147 +step:4427 train loss:3.653061 +step:4428 train loss:3.682357 +step:4429 train loss:3.760331 +step:4430 train loss:3.659945 +step:4431 train loss:3.615605 +step:4432 train loss:3.608256 +step:4433 train loss:3.706964 +step:4434 train loss:3.749853 +step:4435 train loss:3.620773 +step:4436 train loss:3.630097 +step:4437 train loss:3.591987 +step:4438 train loss:3.586735 +step:4439 train loss:3.622310 +step:4440 train loss:3.595953 +step:4441 train loss:3.633036 +step:4442 train loss:3.597269 +step:4443 train loss:3.613317 +step:4444 train loss:3.626830 +step:4445 train loss:3.635573 +step:4446 train loss:3.607079 +step:4447 train loss:3.884109 +step:4448 train loss:3.673710 +step:4449 train loss:3.645685 +step:4450 train loss:3.640578 +step:4451 train loss:3.669226 +step:4452 train loss:3.605262 +step:4453 train loss:3.607011 +step:4454 train loss:3.591191 +step:4455 train loss:3.619299 +step:4456 train loss:3.608911 +step:4457 train loss:3.578187 +step:4458 train loss:3.635975 +step:4459 train loss:3.645707 +step:4460 train loss:3.571117 +step:4461 train loss:3.622639 +step:4462 train loss:3.630077 +step:4463 train loss:3.635891 +step:4464 train loss:3.624560 +step:4465 train loss:3.616357 +step:4466 train loss:3.603424 +step:4467 train loss:3.544156 +step:4468 train loss:3.615845 +step:4469 train loss:3.675004 +step:4470 train loss:3.624497 +step:4471 train loss:3.630172 +step:4472 train loss:3.638993 +step:4473 train loss:3.632525 +step:4474 train loss:3.609183 +step:4475 train loss:3.618985 +step:4476 train loss:3.641721 +step:4477 train loss:3.584312 +step:4478 train loss:3.599335 +step:4479 train loss:3.604728 +step:4480 train loss:3.604120 +step:4481 train loss:3.641573 +step:4482 train loss:3.635486 +step:4483 train loss:3.667577 +step:4484 train loss:3.651222 +step:4485 train loss:3.675272 +step:4486 train loss:3.621504 +step:4487 train loss:3.649435 +step:4488 train loss:3.634870 +step:4489 train loss:3.617692 +step:4490 train loss:3.629572 +step:4491 train loss:3.619728 +step:4492 train loss:3.627319 +step:4493 train loss:3.682445 +step:4494 train loss:3.613728 +step:4495 train loss:3.649206 +step:4496 train loss:3.663590 +step:4497 train loss:3.650485 +step:4498 train loss:3.579872 +step:4499 train loss:3.633158 +step:4500 validation loss:3.567437 total_sharp:7.2136e-03 L1_sharp:1.1376e-02 L2_sharp:7.0915e-04 L3_sharp:1.4249e-03 L4_sharp:1.5158e-03 L5_sharp:9.6938e-04 L6_sharp:1.3122e-03 L7_sharp:1.7213e-03 L8_sharp:2.2612e-03 L9_sharp:1.9525e-03 L10_sharp:1.0833e-03 L11_sharp:1.1924e-03 L12_sharp:3.3302e-03 total_fnorm:2.1714e+00 total_l1_linf:1.9228e+04 total_spectral:2.1714e+00 L1_fnorm:4.6023e-01 L2_fnorm:4.8541e-01 L3_fnorm:4.8996e-01 L4_fnorm:4.8049e-01 L5_fnorm:4.5728e-01 L6_fnorm:4.8862e-01 L7_fnorm:4.9584e-01 L8_fnorm:4.9796e-01 L9_fnorm:5.0975e-01 L10_fnorm:5.2746e-01 L11_fnorm:5.3146e-01 L12_fnorm:5.1978e-01 L1_l1linf:5.9286e-01 L2_l1linf:5.7994e-01 L3_l1linf:6.8262e-01 L4_l1linf:5.9364e-01 L5_l1linf:4.9722e-01 L6_l1linf:5.9608e-01 L7_l1linf:6.3212e-01 L8_l1linf:5.9965e-01 L9_l1linf:5.3499e-01 L10_l1linf:5.5967e-01 L11_l1linf:5.5697e-01 L12_l1linf:5.6467e-01 L1_spectral:7.7519e-02 L2_spectral:8.2442e-02 L3_spectral:8.6583e-02 L4_spectral:8.6462e-02 L5_spectral:6.7003e-02 L6_spectral:8.1431e-02 L7_spectral:7.3433e-02 L8_spectral:6.9681e-02 L9_spectral:5.6283e-02 L10_spectral:4.7000e-02 L11_spectral:4.8332e-02 L12_spectral:7.8054e-02 v_norm:2.1714e+00 cos_v_-g_hvp:7.8430e-02 g_hvp_norm:3.5996e-01 cos_v_-g_t:9.2388e-02 g_t_norm:3.0796e-01 hv_norm:4.3603e-01 cos_v_hv:3.5923e-02 hg_norm:4.5888e+00 cos_g_hg:6.4776e-01 v_par:1.1720e-02 v_perp:2.1714e+00 L1_cos_v_neg_g:1.5919e-01 L1_v_norm:4.6023e-01 L2_cos_v_neg_g:7.3969e-02 L2_v_norm:4.8541e-01 L3_cos_v_neg_g:5.9869e-02 L3_v_norm:4.8996e-01 L4_cos_v_neg_g:6.7673e-02 L4_v_norm:4.8049e-01 L5_cos_v_neg_g:4.7725e-02 L5_v_norm:4.5728e-01 L6_cos_v_neg_g:6.3150e-02 L6_v_norm:4.8862e-01 L7_cos_v_neg_g:7.8066e-02 L7_v_norm:4.9584e-01 L8_cos_v_neg_g:7.4548e-02 L8_v_norm:4.9796e-01 L9_cos_v_neg_g:7.3518e-02 L9_v_norm:5.0975e-01 L10_cos_v_neg_g:8.5821e-02 L10_v_norm:5.2746e-01 L11_cos_v_neg_g:1.0101e-01 L11_v_norm:5.3146e-01 L12_cos_v_neg_g:1.4719e-01 L12_v_norm:5.1978e-01 +step:4500 train loss:3.671772 +step:4501 train loss:3.635969 +step:4502 train loss:3.577676 +step:4503 train loss:3.621811 +step:4504 train loss:3.648028 +step:4505 train loss:3.659837 +step:4506 train loss:3.638939 +step:4507 train loss:3.668601 +step:4508 train loss:3.600703 +step:4509 train loss:3.623218 +step:4510 train loss:3.633772 +step:4511 train loss:3.635241 +step:4512 train loss:3.692994 +step:4513 train loss:3.651163 +step:4514 train loss:3.588153 +step:4515 train loss:3.648686 +step:4516 train loss:3.599381 +step:4517 train loss:3.602608 +step:4518 train loss:3.600740 +step:4519 train loss:3.665341 +step:4520 train loss:3.631064 +step:4521 train loss:3.641130 +step:4522 train loss:3.643842 +step:4523 train loss:3.630847 +step:4524 train loss:3.610580 +step:4525 train loss:3.679616 +step:4526 train loss:3.699117 +step:4527 train loss:3.684002 +step:4528 train loss:3.654589 +step:4529 train loss:3.688795 +step:4530 train loss:3.603960 +step:4531 train loss:3.581614 +step:4532 train loss:3.620907 +step:4533 train loss:3.637988 +step:4534 train loss:3.624279 +step:4535 train loss:3.624051 +step:4536 train loss:3.633594 +step:4537 train loss:3.600494 +step:4538 train loss:3.584864 +step:4539 train loss:3.688700 +step:4540 train loss:3.608837 +step:4541 train loss:3.628865 +step:4542 train loss:3.678898 +step:4543 train loss:3.588531 +step:4544 train loss:3.613672 +step:4545 train loss:3.722797 +step:4546 train loss:3.640907 +step:4547 train loss:3.690110 +step:4548 train loss:3.605538 +step:4549 train loss:3.612421 +step:4550 train loss:3.685911 +step:4551 train loss:3.686445 +step:4552 train loss:3.616647 +step:4553 train loss:3.630647 +step:4554 train loss:3.586581 +step:4555 train loss:3.625578 +step:4556 train loss:3.621824 +step:4557 train loss:3.606833 +step:4558 train loss:3.665979 +step:4559 train loss:3.639706 +step:4560 train loss:3.657730 +step:4561 train loss:3.596788 +step:4562 train loss:3.611131 +step:4563 train loss:3.630593 +step:4564 train loss:3.691858 +step:4565 train loss:3.617368 +step:4566 train loss:3.693469 +step:4567 train loss:3.629329 +step:4568 train loss:3.646736 +step:4569 train loss:3.722329 +step:4570 train loss:3.624104 +step:4571 train loss:3.585190 +step:4572 train loss:3.619491 +step:4573 train loss:3.650068 +step:4574 train loss:3.657191 +step:4575 train loss:3.677724 +step:4576 train loss:3.617536 +step:4577 train loss:3.626029 +step:4578 train loss:3.634815 +step:4579 train loss:3.634775 +step:4580 train loss:3.679169 +step:4581 train loss:3.719237 +step:4582 train loss:3.657076 +step:4583 train loss:3.633725 +step:4584 train loss:3.601763 +step:4585 train loss:3.757853 +step:4586 train loss:3.625395 +step:4587 train loss:3.605132 +step:4588 train loss:3.589335 +step:4589 train loss:3.680116 +step:4590 train loss:3.607790 +step:4591 train loss:3.582728 +step:4592 train loss:3.575489 +step:4593 train loss:3.554552 +step:4594 train loss:3.591472 +step:4595 train loss:3.636580 +step:4596 train loss:3.626560 +step:4597 train loss:3.616292 +step:4598 train loss:3.556978 +step:4599 train loss:3.607416 +step:4600 train loss:3.659242 +step:4601 train loss:3.590049 +step:4602 train loss:3.658194 +step:4603 train loss:3.629553 +step:4604 train loss:3.640871 +step:4605 train loss:3.602526 +step:4606 train loss:3.657303 +step:4607 train loss:3.596986 +step:4608 train loss:3.588543 +step:4609 train loss:3.631490 +step:4610 train loss:3.680704 +step:4611 train loss:3.636316 +step:4612 train loss:3.611744 +step:4613 train loss:3.557192 +step:4614 train loss:3.625468 +step:4615 train loss:3.583851 +step:4616 train loss:3.637379 +step:4617 train loss:3.583466 +step:4618 train loss:3.640709 +step:4619 train loss:3.651135 +step:4620 train loss:3.580193 +step:4621 train loss:3.599471 +step:4622 train loss:3.575837 +step:4623 train loss:3.582365 +step:4624 train loss:3.609225 +step:4625 train loss:3.645700 +step:4626 train loss:3.608282 +step:4627 train loss:3.594424 +step:4628 train loss:3.645499 +step:4629 train loss:3.600257 +step:4630 train loss:3.598093 +step:4631 train loss:3.638089 +step:4632 train loss:3.559550 +step:4633 train loss:3.664105 +step:4634 train loss:3.559901 +step:4635 train loss:3.607148 +step:4636 train loss:3.667294 +step:4637 train loss:3.656507 +step:4638 train loss:3.620146 +step:4639 train loss:3.592293 +step:4640 train loss:3.577358 +step:4641 train loss:3.616634 +step:4642 train loss:3.594257 +step:4643 train loss:3.605062 +step:4644 train loss:3.638638 +step:4645 train loss:3.632946 +step:4646 train loss:3.537583 +step:4647 train loss:3.596105 +step:4648 train loss:3.534391 +step:4649 train loss:3.532418 +step:4650 train loss:3.613661 +step:4651 train loss:3.609554 +step:4652 train loss:3.576313 +step:4653 train loss:3.594419 +step:4654 train loss:3.579519 +step:4655 train loss:3.600195 +step:4656 train loss:3.647175 +step:4657 train loss:3.576869 +step:4658 train loss:3.608936 +step:4659 train loss:3.561965 +step:4660 train loss:3.641956 +step:4661 train loss:3.668322 +step:4662 train loss:3.632821 +step:4663 train loss:3.573290 +step:4664 train loss:3.585887 +step:4665 train loss:3.561419 +step:4666 train loss:3.584834 +step:4667 train loss:3.642255 +step:4668 train loss:3.638587 +step:4669 train loss:3.611892 +step:4670 train loss:3.550239 +step:4671 train loss:3.642847 +step:4672 train loss:3.659961 +step:4673 train loss:3.606731 +step:4674 train loss:3.601094 +step:4675 train loss:3.598550 +step:4676 train loss:3.605073 +step:4677 train loss:3.580625 +step:4678 train loss:3.619411 +step:4679 train loss:3.620227 +step:4680 train loss:3.615789 +step:4681 train loss:3.572943 +step:4682 train loss:3.625641 +step:4683 train loss:3.602717 +step:4684 train loss:3.646616 +step:4685 train loss:3.615018 +step:4686 train loss:3.624663 +step:4687 train loss:3.630906 +step:4688 train loss:3.594755 +step:4689 train loss:3.646434 +step:4690 train loss:3.624772 +step:4691 train loss:3.662341 +step:4692 train loss:3.624341 +step:4693 train loss:3.605873 +step:4694 train loss:3.627184 +step:4695 train loss:3.628550 +step:4696 train loss:3.611839 +step:4697 train loss:3.630392 +step:4698 train loss:3.590542 +step:4699 train loss:3.577121 +step:4700 train loss:3.583756 +step:4701 train loss:3.619338 +step:4702 train loss:3.609062 +step:4703 train loss:3.649859 +step:4704 train loss:3.694536 +step:4705 train loss:3.712980 +step:4706 train loss:3.669969 +step:4707 train loss:3.663826 +step:4708 train loss:3.617391 +step:4709 train loss:3.628545 +step:4710 train loss:3.563985 +step:4711 train loss:3.610414 +step:4712 train loss:3.624710 +step:4713 train loss:3.611137 +step:4714 train loss:3.601195 +step:4715 train loss:3.587565 +step:4716 train loss:3.630582 +step:4717 train loss:3.565645 +step:4718 train loss:3.635669 +step:4719 train loss:3.617763 +step:4720 train loss:3.597628 +step:4721 train loss:3.657494 +step:4722 train loss:3.589972 +step:4723 train loss:3.623154 +step:4724 train loss:3.563353 +step:4725 train loss:3.593978 +step:4726 train loss:3.629026 +step:4727 train loss:3.628954 +step:4728 train loss:3.587764 +step:4729 train loss:3.619226 +step:4730 train loss:3.653887 +step:4731 train loss:3.604826 +step:4732 train loss:3.639442 +step:4733 train loss:3.721719 +step:4734 train loss:3.606160 +step:4735 train loss:3.557562 +step:4736 train loss:3.615566 +step:4737 train loss:3.679698 +step:4738 train loss:3.625230 +step:4739 train loss:3.610403 +step:4740 train loss:3.589346 +step:4741 train loss:3.649056 +step:4742 train loss:3.654089 +step:4743 train loss:3.659722 +step:4744 train loss:3.626631 +step:4745 train loss:3.584371 +step:4746 train loss:3.632996 +step:4747 train loss:3.647017 +step:4748 train loss:3.638065 +step:4749 train loss:3.596309 +step:4750 validation loss:3.551444 +step:4750 train loss:3.615862 +step:4751 train loss:3.682713 +step:4752 train loss:3.659051 +step:4753 train loss:3.636451 +step:4754 train loss:3.640510 +step:4755 train loss:3.596242 +step:4756 train loss:3.562766 +step:4757 train loss:3.600875 +step:4758 train loss:3.602496 +step:4759 train loss:3.583444 +step:4760 train loss:3.623368 +step:4761 train loss:3.630924 +step:4762 train loss:3.719355 +step:4763 train loss:3.556651 +step:4764 train loss:3.642439 +step:4765 train loss:3.723411 +step:4766 train loss:3.701729 +step:4767 train loss:3.602065 +step:4768 train loss:3.605513 +step:4769 train loss:3.588834 +step:4770 train loss:3.604664 +step:4771 train loss:3.581116 +step:4772 train loss:3.564604 +step:4773 train loss:3.618188 +step:4774 train loss:3.586944 +step:4775 train loss:3.602253 +step:4776 train loss:3.590324 +step:4777 train loss:3.602680 +step:4778 train loss:3.583845 +step:4779 train loss:3.621110 +step:4780 train loss:3.605910 +step:4781 train loss:3.625056 +step:4782 train loss:3.711730 +step:4783 train loss:3.595436 +step:4784 train loss:3.595442 +step:4785 train loss:3.586508 +step:4786 train loss:3.639474 +step:4787 train loss:3.577083 +step:4788 train loss:3.582090 +step:4789 train loss:3.584486 +step:4790 train loss:3.611131 +step:4791 train loss:3.645432 +step:4792 train loss:3.592041 +step:4793 train loss:3.571688 +step:4794 train loss:3.522061 +step:4795 train loss:3.578358 +step:4796 train loss:3.607999 +step:4797 train loss:3.627687 +step:4798 train loss:3.642729 +step:4799 train loss:3.573453 +step:4800 train loss:3.608744 +step:4801 train loss:3.615877 +step:4802 train loss:3.564015 +step:4803 train loss:3.588536 +step:4804 train loss:3.663034 +step:4805 train loss:3.626782 +step:4806 train loss:3.596552 +step:4807 train loss:3.646485 +step:4808 train loss:3.585778 +step:4809 train loss:3.619093 +step:4810 train loss:3.594985 +step:4811 train loss:3.635490 +step:4812 train loss:3.703318 +step:4813 train loss:3.752235 +step:4814 train loss:3.626844 +step:4815 train loss:3.632107 +step:4816 train loss:3.607199 +step:4817 train loss:3.560534 +step:4818 train loss:3.560986 +step:4819 train loss:3.661080 +step:4820 train loss:3.646987 +step:4821 train loss:3.644688 +step:4822 train loss:3.680261 +step:4823 train loss:3.629550 +step:4824 train loss:3.576510 +step:4825 train loss:3.574519 +step:4826 train loss:3.612213 +step:4827 train loss:3.585491 +step:4828 train loss:3.579022 +step:4829 train loss:3.643150 +step:4830 train loss:3.642865 +step:4831 train loss:3.591584 +step:4832 train loss:3.639998 +step:4833 train loss:3.608780 +step:4834 train loss:3.578929 +step:4835 train loss:3.587305 +step:4836 train loss:3.609207 +step:4837 train loss:3.666719 +step:4838 train loss:3.752774 +step:4839 train loss:3.606949 +step:4840 train loss:3.584255 +step:4841 train loss:3.628479 +step:4842 train loss:3.580577 +step:4843 train loss:3.561993 +step:4844 train loss:3.604016 +step:4845 train loss:3.600013 +step:4846 train loss:3.584975 +step:4847 train loss:3.689303 +step:4848 train loss:3.619915 +step:4849 train loss:3.598169 +step:4850 train loss:3.590353 +step:4851 train loss:3.591321 +step:4852 train loss:3.564622 +step:4853 train loss:3.592324 +step:4854 train loss:3.616749 +step:4855 train loss:3.640879 +step:4856 train loss:3.690541 +step:4857 train loss:3.650183 +step:4858 train loss:3.616590 +step:4859 train loss:3.566039 +step:4860 train loss:3.632390 +step:4861 train loss:3.533796 +step:4862 train loss:3.639053 +step:4863 train loss:3.625409 +step:4864 train loss:3.594534 +step:4865 train loss:3.587468 +step:4866 train loss:3.572090 +step:4867 train loss:3.642757 +step:4868 train loss:3.635162 +step:4869 train loss:3.623094 +step:4870 train loss:3.669472 +step:4871 train loss:3.615582 +step:4872 train loss:3.626973 +step:4873 train loss:3.679578 +step:4874 train loss:3.636259 +step:4875 train loss:3.668808 +step:4876 train loss:3.662579 +step:4877 train loss:3.653526 +step:4878 train loss:3.626635 +step:4879 train loss:3.631621 +step:4880 train loss:3.636889 +step:4881 train loss:3.565771 +step:4882 train loss:3.581981 +step:4883 train loss:3.629660 +step:4884 train loss:3.589380 +step:4885 train loss:3.628873 +step:4886 train loss:3.605219 +step:4887 train loss:3.692332 +step:4888 train loss:3.612463 +step:4889 train loss:3.640113 +step:4890 train loss:3.565608 +step:4891 train loss:3.596894 +step:4892 train loss:3.623889 +step:4893 train loss:3.619513 +step:4894 train loss:3.596661 +step:4895 train loss:3.657005 +step:4896 train loss:3.533913 +step:4897 train loss:3.574441 +step:4898 train loss:3.677077 +step:4899 train loss:3.664189 +step:4900 train loss:3.622854 +step:4901 train loss:3.539719 +step:4902 train loss:3.602830 +step:4903 train loss:3.649064 +step:4904 train loss:3.587182 +step:4905 train loss:3.586874 +step:4906 train loss:3.601644 +step:4907 train loss:3.592457 +step:4908 train loss:3.570138 +step:4909 train loss:3.615708 +step:4910 train loss:3.581141 +step:4911 train loss:3.567131 +step:4912 train loss:3.598843 +step:4913 train loss:3.583835 +step:4914 train loss:3.565093 +step:4915 train loss:3.609788 +step:4916 train loss:3.633418 +step:4917 train loss:3.582139 +step:4918 train loss:3.631106 +step:4919 train loss:3.598000 +step:4920 train loss:3.629803 +step:4921 train loss:3.593026 +step:4922 train loss:3.625900 +step:4923 train loss:3.536400 +step:4924 train loss:3.548692 +step:4925 train loss:3.539390 +step:4926 train loss:3.615987 +step:4927 train loss:3.630490 +step:4928 train loss:3.562802 +step:4929 train loss:3.625446 +step:4930 train loss:3.575496 +step:4931 train loss:3.591248 +step:4932 train loss:3.602921 +step:4933 train loss:3.564110 +step:4934 train loss:3.658901 +step:4935 train loss:3.589321 +step:4936 train loss:3.545030 +step:4937 train loss:3.571367 +step:4938 train loss:3.575475 +step:4939 train loss:3.591661 +step:4940 train loss:3.579422 +step:4941 train loss:3.565554 +step:4942 train loss:3.658238 +step:4943 train loss:3.577291 +step:4944 train loss:3.591609 +step:4945 train loss:3.577171 +step:4946 train loss:3.618260 +step:4947 train loss:3.623488 +step:4948 train loss:3.588234 +step:4949 train loss:3.572930 +step:4950 train loss:3.715169 +step:4951 train loss:3.620355 +step:4952 train loss:3.626899 +step:4953 train loss:3.599805 +step:4954 train loss:3.614791 +step:4955 train loss:3.599782 +step:4956 train loss:3.609332 +step:4957 train loss:3.618746 +step:4958 train loss:3.584257 +step:4959 train loss:3.545006 +step:4960 train loss:3.600238 +step:4961 train loss:3.589554 +step:4962 train loss:3.555244 +step:4963 train loss:3.630739 +step:4964 train loss:3.596421 +step:4965 train loss:3.562569 +step:4966 train loss:3.662241 +step:4967 train loss:3.676685 +step:4968 train loss:3.569475 +step:4969 train loss:3.581307 +step:4970 train loss:3.612921 +step:4971 train loss:3.559922 +step:4972 train loss:3.640258 +step:4973 train loss:3.635077 +step:4974 train loss:3.592538 +step:4975 train loss:3.623029 +step:4976 train loss:3.589864 +step:4977 train loss:3.551383 +step:4978 train loss:3.601085 +step:4979 train loss:3.607636 +step:4980 train loss:3.552478 +step:4981 train loss:3.525126 +step:4982 train loss:3.790198 +step:4983 train loss:3.636551 +step:4984 train loss:3.604347 +step:4985 train loss:3.561195 +step:4986 train loss:3.544111 +step:4987 train loss:3.619121 +step:4988 train loss:3.597605 +step:4989 train loss:3.606663 +step:4990 train loss:3.579219 +step:4991 train loss:3.552364 +step:4992 train loss:3.594604 +step:4993 train loss:3.558016 +step:4994 train loss:3.586315 +step:4995 train loss:3.603698 +step:4996 train loss:3.605750 +step:4997 train loss:3.637084 +step:4998 train loss:3.570697 +step:4999 train loss:3.570868 +step:5000 validation loss:3.549190 total_sharp:8.8194e-03 L1_sharp:1.0978e-02 L2_sharp:5.9553e-04 L3_sharp:1.5996e-03 L4_sharp:2.6859e-03 L5_sharp:1.2405e-03 L6_sharp:1.3756e-03 L7_sharp:1.7730e-03 L8_sharp:2.4475e-03 L9_sharp:2.4395e-03 L10_sharp:1.2695e-03 L11_sharp:1.3531e-03 L12_sharp:4.5093e-03 total_fnorm:2.2140e+00 total_l1_linf:1.9677e+04 total_spectral:2.2140e+00 L1_fnorm:5.0044e-01 L2_fnorm:5.0344e-01 L3_fnorm:4.9845e-01 L4_fnorm:4.9501e-01 L5_fnorm:4.7405e-01 L6_fnorm:4.9766e-01 L7_fnorm:5.0654e-01 L8_fnorm:5.0816e-01 L9_fnorm:5.1668e-01 L10_fnorm:5.3250e-01 L11_fnorm:5.3799e-01 L12_fnorm:5.3124e-01 L1_l1linf:6.4495e-01 L2_l1linf:5.9637e-01 L3_l1linf:6.3068e-01 L4_l1linf:6.3378e-01 L5_l1linf:5.7142e-01 L6_l1linf:6.2497e-01 L7_l1linf:5.9404e-01 L8_l1linf:5.8751e-01 L9_l1linf:5.5228e-01 L10_l1linf:5.5029e-01 L11_l1linf:5.6230e-01 L12_l1linf:5.7717e-01 L1_spectral:8.4424e-02 L2_spectral:8.3562e-02 L3_spectral:8.8792e-02 L4_spectral:8.7488e-02 L5_spectral:6.8828e-02 L6_spectral:8.4193e-02 L7_spectral:7.9593e-02 L8_spectral:7.6291e-02 L9_spectral:6.0829e-02 L10_spectral:4.6917e-02 L11_spectral:5.0481e-02 L12_spectral:7.7966e-02 v_norm:2.2140e+00 cos_v_-g_hvp:7.4794e-02 g_hvp_norm:4.0397e-01 cos_v_-g_t:8.6812e-02 g_t_norm:3.4942e-01 hv_norm:5.9925e-01 cos_v_hv:3.2584e-02 hg_norm:6.5155e+00 cos_g_hg:7.4083e-01 v_par:9.2820e-03 v_perp:2.2140e+00 L1_cos_v_neg_g:1.5658e-01 L1_v_norm:5.0044e-01 L2_cos_v_neg_g:6.5515e-02 L2_v_norm:5.0344e-01 L3_cos_v_neg_g:5.3100e-02 L3_v_norm:4.9845e-01 L4_cos_v_neg_g:6.1285e-02 L4_v_norm:4.9501e-01 L5_cos_v_neg_g:4.2642e-02 L5_v_norm:4.7405e-01 L6_cos_v_neg_g:5.9566e-02 L6_v_norm:4.9766e-01 L7_cos_v_neg_g:7.3039e-02 L7_v_norm:5.0654e-01 L8_cos_v_neg_g:7.1413e-02 L8_v_norm:5.0816e-01 L9_cos_v_neg_g:6.8521e-02 L9_v_norm:5.1668e-01 L10_cos_v_neg_g:8.6441e-02 L10_v_norm:5.3250e-01 L11_cos_v_neg_g:1.0280e-01 L11_v_norm:5.3799e-01 L12_cos_v_neg_g:1.6013e-01 L12_v_norm:5.3124e-01 +step:5000 train loss:3.624559 +step:5001 train loss:3.674883 +step:5002 train loss:3.571294 +step:5003 train loss:3.526414 +step:5004 train loss:3.591703 +step:5005 train loss:3.583825 +step:5006 train loss:3.545742 +step:5007 train loss:3.564623 +step:5008 train loss:3.549574 +step:5009 train loss:3.637197 +step:5010 train loss:3.630292 +step:5011 train loss:3.599148 +step:5012 train loss:3.618296 +step:5013 train loss:3.613911 +step:5014 train loss:3.687875 +step:5015 train loss:3.571982 +step:5016 train loss:3.528482 +step:5017 train loss:3.553427 +step:5018 train loss:3.524589 +step:5019 train loss:3.624661 +step:5020 train loss:3.589643 +step:5021 train loss:3.566868 +step:5022 train loss:3.654141 +step:5023 train loss:3.579871 +step:5024 train loss:3.546537 +step:5025 train loss:3.573712 +step:5026 train loss:3.566788 +step:5027 train loss:3.652521 +step:5028 train loss:3.594216 +step:5029 train loss:3.615742 +step:5030 train loss:3.624635 +step:5031 train loss:3.617424 +step:5032 train loss:3.596777 +step:5033 train loss:3.541612 +step:5034 train loss:3.596016 +step:5035 train loss:3.571826 +step:5036 train loss:3.577185 +step:5037 train loss:3.609300 +step:5038 train loss:3.656035 +step:5039 train loss:3.530762 +step:5040 train loss:3.586018 +step:5041 train loss:3.656873 +step:5042 train loss:3.605739 +step:5043 train loss:3.634529 +step:5044 train loss:3.584506 +step:5045 train loss:3.600919 +step:5046 train loss:3.639261 +step:5047 train loss:3.703680 +step:5048 train loss:3.595532 +step:5049 train loss:3.613521 +step:5050 train loss:3.587401 +step:5051 train loss:3.653846 +step:5052 train loss:3.615166 +step:5053 train loss:3.639007 +step:5054 train loss:3.597968 +step:5055 train loss:3.599554 +step:5056 train loss:3.587621 +step:5057 train loss:3.606168 +step:5058 train loss:3.572286 +step:5059 train loss:3.557623 +step:5060 train loss:3.691225 +step:5061 train loss:3.592978 +step:5062 train loss:3.716653 +step:5063 train loss:3.591953 +step:5064 train loss:3.629890 +step:5065 train loss:3.678858 +step:5066 train loss:3.647324 +step:5067 train loss:3.609345 +step:5068 train loss:3.672730 +step:5069 train loss:3.602829 +step:5070 train loss:3.628637 +step:5071 train loss:3.651821 +step:5072 train loss:3.613869 +step:5073 train loss:3.569272 +step:5074 train loss:3.666032 +step:5075 train loss:3.700481 +step:5076 train loss:3.619415 +step:5077 train loss:3.577085 +step:5078 train loss:3.535060 +step:5079 train loss:3.564637 +step:5080 train loss:3.600375 +step:5081 train loss:3.604004 +step:5082 train loss:3.639282 +step:5083 train loss:3.617180 +step:5084 train loss:3.619533 +step:5085 train loss:3.587984 +step:5086 train loss:3.622789 +step:5087 train loss:3.595371 +step:5088 train loss:3.541276 +step:5089 train loss:3.671635 +step:5090 train loss:3.733846 +step:5091 train loss:3.619460 +step:5092 train loss:3.565061 +step:5093 train loss:3.572035 +step:5094 train loss:3.565650 +step:5095 train loss:3.597594 +step:5096 train loss:3.530504 +step:5097 train loss:3.571283 +step:5098 train loss:3.594646 +step:5099 train loss:3.615057 +step:5100 train loss:3.621938 +step:5101 train loss:3.639142 +step:5102 train loss:3.622814 +step:5103 train loss:3.693711 +step:5104 train loss:3.582862 +step:5105 train loss:3.640358 +step:5106 train loss:3.630656 +step:5107 train loss:3.599714 +step:5108 train loss:3.631438 +step:5109 train loss:3.628345 +step:5110 train loss:3.629925 +step:5111 train loss:3.604560 +step:5112 train loss:3.560186 +step:5113 train loss:3.581312 +step:5114 train loss:3.600279 +step:5115 train loss:3.625565 +step:5116 train loss:3.567056 +step:5117 train loss:3.601560 +step:5118 train loss:3.579265 +step:5119 train loss:3.616812 +step:5120 train loss:3.604484 +step:5121 train loss:3.613155 +step:5122 train loss:3.644231 +step:5123 train loss:3.637964 +step:5124 train loss:3.609240 +step:5125 train loss:3.598785 +step:5126 train loss:3.618925 +step:5127 train loss:3.668758 +step:5128 train loss:3.575127 +step:5129 train loss:3.618453 +step:5130 train loss:3.560534 +step:5131 train loss:3.587918 +step:5132 train loss:3.591488 +step:5133 train loss:3.564871 +step:5134 train loss:3.545775 +step:5135 train loss:3.610362 +step:5136 train loss:3.633096 +step:5137 train loss:3.553215 +step:5138 train loss:3.584824 +step:5139 train loss:3.621746 +step:5140 train loss:3.602950 +step:5141 train loss:3.639987 +step:5142 train loss:3.589683 +step:5143 train loss:3.596607 +step:5144 train loss:3.621885 +step:5145 train loss:3.594028 +step:5146 train loss:3.603447 +step:5147 train loss:3.573787 +step:5148 train loss:3.594757 +step:5149 train loss:3.572767 +step:5150 train loss:3.610200 +step:5151 train loss:3.622599 +step:5152 train loss:3.649021 +step:5153 train loss:3.616409 +step:5154 train loss:3.573745 +step:5155 train loss:3.596305 +step:5156 train loss:3.548790 +step:5157 train loss:3.570326 +step:5158 train loss:3.586753 +step:5159 train loss:3.571501 +step:5160 train loss:3.571735 +step:5161 train loss:3.620767 +step:5162 train loss:3.603662 +step:5163 train loss:3.558004 +step:5164 train loss:3.577593 +step:5165 train loss:3.607334 +step:5166 train loss:3.541636 +step:5167 train loss:3.534203 +step:5168 train loss:3.543026 +step:5169 train loss:3.533776 +step:5170 train loss:3.698488 +step:5171 train loss:3.571368 +step:5172 train loss:3.575160 +step:5173 train loss:3.554556 +step:5174 train loss:3.512004 +step:5175 train loss:3.579740 +step:5176 train loss:3.604501 +step:5177 train loss:3.592572 +step:5178 train loss:3.554567 +step:5179 train loss:3.600468 +step:5180 train loss:3.582996 +step:5181 train loss:3.557554 +step:5182 train loss:3.607755 +step:5183 train loss:3.562413 +step:5184 train loss:3.525256 +step:5185 train loss:3.542800 +step:5186 train loss:3.570932 +step:5187 train loss:3.634770 +step:5188 train loss:3.595662 +step:5189 train loss:3.548257 +step:5190 train loss:3.581288 +step:5191 train loss:3.644509 +step:5192 train loss:3.565464 +step:5193 train loss:3.565699 +step:5194 train loss:3.564377 +step:5195 train loss:3.559719 +step:5196 train loss:3.619489 +step:5197 train loss:3.618152 +step:5198 train loss:3.541792 +step:5199 train loss:3.571990 +step:5200 train loss:3.561316 +step:5201 train loss:3.588932 +step:5202 train loss:3.536169 +step:5203 train loss:3.515395 +step:5204 train loss:3.536305 +step:5205 train loss:3.655109 +step:5206 train loss:3.557938 +step:5207 train loss:3.568633 +step:5208 train loss:3.577740 +step:5209 train loss:3.550808 +step:5210 train loss:3.522769 +step:5211 train loss:3.596843 +step:5212 train loss:3.587194 +step:5213 train loss:3.589168 +step:5214 train loss:3.601823 +step:5215 train loss:3.629753 +step:5216 train loss:3.570991 +step:5217 train loss:3.568486 +step:5218 train loss:3.520286 +step:5219 train loss:3.608898 +step:5220 train loss:3.604786 +step:5221 train loss:3.544665 +step:5222 train loss:3.619214 +step:5223 train loss:3.578415 +step:5224 train loss:3.586766 +step:5225 train loss:3.510932 +step:5226 train loss:3.587645 +step:5227 train loss:3.548041 +step:5228 train loss:3.518643 +step:5229 train loss:3.546196 +step:5230 train loss:3.604503 +step:5231 train loss:3.515323 +step:5232 train loss:3.559545 +step:5233 train loss:3.547430 +step:5234 train loss:3.491786 +step:5235 train loss:3.547581 +step:5236 train loss:3.491791 +step:5237 train loss:3.548548 +step:5238 train loss:3.532014 +step:5239 train loss:3.587440 +step:5240 train loss:3.528635 +step:5241 train loss:3.530214 +step:5242 train loss:3.576494 +step:5243 train loss:3.578393 +step:5244 train loss:3.578647 +step:5245 train loss:3.578923 +step:5246 train loss:3.547183 +step:5247 train loss:3.633323 +step:5248 train loss:3.593161 +step:5249 train loss:3.599409 +step:5250 validation loss:3.529480 +step:5250 train loss:3.563643 +step:5251 train loss:3.597001 +step:5252 train loss:3.572008 +step:5253 train loss:3.532646 +step:5254 train loss:3.577107 +step:5255 train loss:3.523700 +step:5256 train loss:3.635401 +step:5257 train loss:3.576569 +step:5258 train loss:3.569334 +step:5259 train loss:3.585562 +step:5260 train loss:3.558796 +step:5261 train loss:3.614534 +step:5262 train loss:3.601613 +step:5263 train loss:3.599744 +step:5264 train loss:3.557183 +step:5265 train loss:3.608613 +step:5266 train loss:3.579135 +step:5267 train loss:3.602344 +step:5268 train loss:3.595045 +step:5269 train loss:3.593637 +step:5270 train loss:3.564768 +step:5271 train loss:3.594182 +step:5272 train loss:3.633353 +step:5273 train loss:3.646934 +step:5274 train loss:3.633357 +step:5275 train loss:3.644490 +step:5276 train loss:3.652460 +step:5277 train loss:3.581040 +step:5278 train loss:3.595675 +step:5279 train loss:3.620433 +step:5280 train loss:3.623795 +step:5281 train loss:3.581316 +step:5282 train loss:3.541045 +step:5283 train loss:3.645377 +step:5284 train loss:3.561679 +step:5285 train loss:3.591182 +step:5286 train loss:3.548924 +step:5287 train loss:3.569532 +step:5288 train loss:3.579726 +step:5289 train loss:3.608152 +step:5290 train loss:3.597713 +step:5291 train loss:3.592279 +step:5292 train loss:3.637797 +step:5293 train loss:3.574209 +step:5294 train loss:3.558288 +step:5295 train loss:3.588452 +step:5296 train loss:3.557400 +step:5297 train loss:3.592215 +step:5298 train loss:3.546207 +step:5299 train loss:3.545529 +step:5300 train loss:3.562959 +step:5301 train loss:3.587667 +step:5302 train loss:3.557403 +step:5303 train loss:3.566685 +step:5304 train loss:3.553132 +step:5305 train loss:3.546025 +step:5306 train loss:3.617043 +step:5307 train loss:3.559393 +step:5308 train loss:3.571424 +step:5309 train loss:3.521899 +step:5310 train loss:3.574167 +step:5311 train loss:3.556006 +step:5312 train loss:3.546383 +step:5313 train loss:3.559603 +step:5314 train loss:3.559741 +step:5315 train loss:3.577386 +step:5316 train loss:3.577870 +step:5317 train loss:3.531656 +step:5318 train loss:3.615032 +step:5319 train loss:3.554014 +step:5320 train loss:3.599385 +step:5321 train loss:3.593664 +step:5322 train loss:3.605684 +step:5323 train loss:3.543805 +step:5324 train loss:3.549160 +step:5325 train loss:3.546239 +step:5326 train loss:3.565290 +step:5327 train loss:3.604666 +step:5328 train loss:3.583926 +step:5329 train loss:3.535321 +step:5330 train loss:3.544041 +step:5331 train loss:3.597175 +step:5332 train loss:3.600343 +step:5333 train loss:3.517419 +step:5334 train loss:3.534188 +step:5335 train loss:3.594752 +step:5336 train loss:3.660771 +step:5337 train loss:3.570292 +step:5338 train loss:3.533685 +step:5339 train loss:3.595787 +step:5340 train loss:3.624473 +step:5341 train loss:3.596962 +step:5342 train loss:3.653816 +step:5343 train loss:3.580673 +step:5344 train loss:3.547001 +step:5345 train loss:3.598079 +step:5346 train loss:3.532620 +step:5347 train loss:3.548169 +step:5348 train loss:3.606192 +step:5349 train loss:3.570886 +step:5350 train loss:3.553963 +step:5351 train loss:3.610394 +step:5352 train loss:3.586748 +step:5353 train loss:3.567066 +step:5354 train loss:3.541485 +step:5355 train loss:3.482454 +step:5356 train loss:3.608556 +step:5357 train loss:3.583909 +step:5358 train loss:3.706695 +step:5359 train loss:3.612946 +step:5360 train loss:3.574756 +step:5361 train loss:3.557207 +step:5362 train loss:3.545662 +step:5363 train loss:3.623368 +step:5364 train loss:3.618076 +step:5365 train loss:3.547378 +step:5366 train loss:3.619367 +step:5367 train loss:3.651243 +step:5368 train loss:3.605590 +step:5369 train loss:3.647155 +step:5370 train loss:3.605963 +step:5371 train loss:3.568337 +step:5372 train loss:3.576739 +step:5373 train loss:3.547580 +step:5374 train loss:3.548267 +step:5375 train loss:3.576796 +step:5376 train loss:3.525320 +step:5377 train loss:3.561916 +step:5378 train loss:3.596645 +step:5379 train loss:3.593287 +step:5380 train loss:3.592537 +step:5381 train loss:3.616521 +step:5382 train loss:3.627234 +step:5383 train loss:3.577723 +step:5384 train loss:3.481721 +step:5385 train loss:3.570238 +step:5386 train loss:3.572929 +step:5387 train loss:3.535189 +step:5388 train loss:3.566003 +step:5389 train loss:3.606209 +step:5390 train loss:3.592892 +step:5391 train loss:3.557017 +step:5392 train loss:3.613709 +step:5393 train loss:3.625211 +step:5394 train loss:3.599621 +step:5395 train loss:3.575286 +step:5396 train loss:3.633057 +step:5397 train loss:3.589011 +step:5398 train loss:3.587290 +step:5399 train loss:3.552520 +step:5400 train loss:3.551993 +step:5401 train loss:3.554604 +step:5402 train loss:3.583902 +step:5403 train loss:3.592009 +step:5404 train loss:3.592649 +step:5405 train loss:3.545503 +step:5406 train loss:3.521281 +step:5407 train loss:3.560379 +step:5408 train loss:3.554337 +step:5409 train loss:3.627403 +step:5410 train loss:3.585770 +step:5411 train loss:3.554136 +step:5412 train loss:3.567124 +step:5413 train loss:3.573458 +step:5414 train loss:3.587149 +step:5415 train loss:3.600710 +step:5416 train loss:3.585793 +step:5417 train loss:3.546323 +step:5418 train loss:3.604310 +step:5419 train loss:3.582205 +step:5420 train loss:3.544316 +step:5421 train loss:3.511415 +step:5422 train loss:3.549384 +step:5423 train loss:3.559092 +step:5424 train loss:3.544495 +step:5425 train loss:3.558956 +step:5426 train loss:3.601776 +step:5427 train loss:3.567019 +step:5428 train loss:3.610531 +step:5429 train loss:3.533912 +step:5430 train loss:3.567507 +step:5431 train loss:3.584106 +step:5432 train loss:3.578260 +step:5433 train loss:3.562477 +step:5434 train loss:3.588492 +step:5435 train loss:3.527709 +step:5436 train loss:3.564779 +step:5437 train loss:3.548160 +step:5438 train loss:3.599521 +step:5439 train loss:3.538500 +step:5440 train loss:3.547853 +step:5441 train loss:3.603467 +step:5442 train loss:3.555104 +step:5443 train loss:3.515251 +step:5444 train loss:3.584858 +step:5445 train loss:3.613476 +step:5446 train loss:3.631497 +step:5447 train loss:3.554421 +step:5448 train loss:3.576209 +step:5449 train loss:3.580798 +step:5450 train loss:3.669724 +step:5451 train loss:3.597256 +step:5452 train loss:3.619284 +step:5453 train loss:3.616673 +step:5454 train loss:3.593138 +step:5455 train loss:3.578290 +step:5456 train loss:3.580595 +step:5457 train loss:3.546903 +step:5458 train loss:3.562418 +step:5459 train loss:3.592424 +step:5460 train loss:3.605032 +step:5461 train loss:3.586544 +step:5462 train loss:3.596595 +step:5463 train loss:3.588685 +step:5464 train loss:3.560277 +step:5465 train loss:3.569907 +step:5466 train loss:3.571566 +step:5467 train loss:3.595702 +step:5468 train loss:3.620515 +step:5469 train loss:3.574917 +step:5470 train loss:3.592333 +step:5471 train loss:3.575369 +step:5472 train loss:3.562224 +step:5473 train loss:3.592265 +step:5474 train loss:3.566372 +step:5475 train loss:3.587980 +step:5476 train loss:3.600886 +step:5477 train loss:3.597503 +step:5478 train loss:3.600586 +step:5479 train loss:3.653854 +step:5480 train loss:3.614249 +step:5481 train loss:3.611665 +step:5482 train loss:3.572503 +step:5483 train loss:3.539145 +step:5484 train loss:3.576104 +step:5485 train loss:3.565334 +step:5486 train loss:3.567441 +step:5487 train loss:3.582365 +step:5488 train loss:3.584707 +step:5489 train loss:3.540422 +step:5490 train loss:3.607389 +step:5491 train loss:3.601711 +step:5492 train loss:3.585175 +step:5493 train loss:3.648613 +step:5494 train loss:3.583080 +step:5495 train loss:3.566051 +step:5496 train loss:3.559155 +step:5497 train loss:3.617635 +step:5498 train loss:3.627742 +step:5499 train loss:3.576268 +step:5500 validation loss:3.515103 total_sharp:1.0996e-02 L1_sharp:3.7867e-02 L2_sharp:1.9264e-03 L3_sharp:1.1549e-03 L4_sharp:1.2254e-03 L5_sharp:1.4019e-03 L6_sharp:1.3246e-03 L7_sharp:1.6474e-03 L8_sharp:2.6106e-03 L9_sharp:2.2807e-03 L10_sharp:1.5401e-03 L11_sharp:1.4712e-03 L12_sharp:2.3260e-03 total_fnorm:2.2235e+00 total_l1_linf:1.9742e+04 total_spectral:2.2235e+00 L1_fnorm:4.9926e-01 L2_fnorm:5.1080e-01 L3_fnorm:4.9843e-01 L4_fnorm:4.9917e-01 L5_fnorm:4.8340e-01 L6_fnorm:5.0515e-01 L7_fnorm:5.0703e-01 L8_fnorm:5.0644e-01 L9_fnorm:5.1842e-01 L10_fnorm:5.4062e-01 L11_fnorm:5.4450e-01 L12_fnorm:5.3203e-01 L1_l1linf:6.2862e-01 L2_l1linf:5.7130e-01 L3_l1linf:6.0720e-01 L4_l1linf:5.7324e-01 L5_l1linf:5.0195e-01 L6_l1linf:5.7098e-01 L7_l1linf:5.3211e-01 L8_l1linf:5.5777e-01 L9_l1linf:5.3585e-01 L10_l1linf:5.6469e-01 L11_l1linf:5.5746e-01 L12_l1linf:5.6442e-01 L1_spectral:8.5038e-02 L2_spectral:8.2082e-02 L3_spectral:7.8613e-02 L4_spectral:8.5260e-02 L5_spectral:7.0682e-02 L6_spectral:8.3635e-02 L7_spectral:7.4594e-02 L8_spectral:7.2905e-02 L9_spectral:5.8569e-02 L10_spectral:4.9316e-02 L11_spectral:5.2790e-02 L12_spectral:6.8346e-02 v_norm:2.2235e+00 cos_v_-g_hvp:7.8759e-02 g_hvp_norm:3.7120e-01 cos_v_-g_t:9.4559e-02 g_t_norm:3.1120e-01 hv_norm:6.8109e-01 cos_v_hv:3.5897e-02 hg_norm:1.3405e+01 cos_g_hg:3.3222e-01 v_par:1.0471e-02 v_perp:2.2235e+00 L1_cos_v_neg_g:1.6535e-01 L1_v_norm:4.9926e-01 L2_cos_v_neg_g:8.1677e-02 L2_v_norm:5.1080e-01 L3_cos_v_neg_g:5.9741e-02 L3_v_norm:4.9843e-01 L4_cos_v_neg_g:7.0068e-02 L4_v_norm:4.9917e-01 L5_cos_v_neg_g:4.8059e-02 L5_v_norm:4.8340e-01 L6_cos_v_neg_g:6.6434e-02 L6_v_norm:5.0515e-01 L7_cos_v_neg_g:7.8788e-02 L7_v_norm:5.0703e-01 L8_cos_v_neg_g:7.5108e-02 L8_v_norm:5.0644e-01 L9_cos_v_neg_g:7.3468e-02 L9_v_norm:5.1842e-01 L10_cos_v_neg_g:8.9036e-02 L10_v_norm:5.4062e-01 L11_cos_v_neg_g:9.3345e-02 L11_v_norm:5.4450e-01 L12_cos_v_neg_g:1.4181e-01 L12_v_norm:5.3203e-01 +step:5500 train loss:3.585586 +step:5501 train loss:3.600716 +step:5502 train loss:3.560579 +step:5503 train loss:3.662070 +step:5504 train loss:3.591555 +step:5505 train loss:3.554869 +step:5506 train loss:3.585156 +step:5507 train loss:3.578326 +step:5508 train loss:3.567317 +step:5509 train loss:3.592582 +step:5510 train loss:3.597853 +step:5511 train loss:3.523290 +step:5512 train loss:3.564682 +step:5513 train loss:3.670929 +step:5514 train loss:3.548053 +step:5515 train loss:3.591527 +step:5516 train loss:3.644899 +step:5517 train loss:3.572470 +step:5518 train loss:3.579011 +step:5519 train loss:3.593876 +step:5520 train loss:3.569037 +step:5521 train loss:3.593137 +step:5522 train loss:3.587038 +step:5523 train loss:3.580575 +step:5524 train loss:3.630537 +step:5525 train loss:3.560560 +step:5526 train loss:3.609720 +step:5527 train loss:3.588699 +step:5528 train loss:3.572666 +step:5529 train loss:3.566165 +step:5530 train loss:3.653055 +step:5531 train loss:3.748571 +step:5532 train loss:3.562899 +step:5533 train loss:3.572451 +step:5534 train loss:3.605590 +step:5535 train loss:3.578145 +step:5536 train loss:3.580179 +step:5537 train loss:3.584396 +step:5538 train loss:3.580922 +step:5539 train loss:3.541425 +step:5540 train loss:3.594940 +step:5541 train loss:3.594663 +step:5542 train loss:3.594725 +step:5543 train loss:3.590330 +step:5544 train loss:3.576441 +step:5545 train loss:3.656685 +step:5546 train loss:3.561198 +step:5547 train loss:3.592019 +step:5548 train loss:3.573262 +step:5549 train loss:3.502853 +step:5550 train loss:3.554488 +step:5551 train loss:3.563155 +step:5552 train loss:3.573094 +step:5553 train loss:3.586081 +step:5554 train loss:3.603690 +step:5555 train loss:3.586458 +step:5556 train loss:3.565845 +step:5557 train loss:3.554805 +step:5558 train loss:3.573587 +step:5559 train loss:3.585537 +step:5560 train loss:3.535183 +step:5561 train loss:3.571336 +step:5562 train loss:3.578117 +step:5563 train loss:3.606276 +step:5564 train loss:3.625786 +step:5565 train loss:3.531556 +step:5566 train loss:3.565232 +step:5567 train loss:3.544899 +step:5568 train loss:3.576331 +step:5569 train loss:3.581508 +step:5570 train loss:3.600674 +step:5571 train loss:3.596886 +step:5572 train loss:3.573892 +step:5573 train loss:3.597648 +step:5574 train loss:3.593989 +step:5575 train loss:3.547774 +step:5576 train loss:3.552114 +step:5577 train loss:3.600069 +step:5578 train loss:3.566497 +step:5579 train loss:3.577035 +step:5580 train loss:3.599655 +step:5581 train loss:3.548780 +step:5582 train loss:3.542669 +step:5583 train loss:3.559649 +step:5584 train loss:3.591306 +step:5585 train loss:3.540120 +step:5586 train loss:3.587241 +step:5587 train loss:3.581788 +step:5588 train loss:3.560756 +step:5589 train loss:3.560162 +step:5590 train loss:3.546262 +step:5591 train loss:3.571074 +step:5592 train loss:3.626050 +step:5593 train loss:3.631559 +step:5594 train loss:3.583896 +step:5595 train loss:3.567486 +step:5596 train loss:3.625501 +step:5597 train loss:3.597845 +step:5598 train loss:3.611526 +step:5599 train loss:3.704817 +step:5600 train loss:3.600694 +step:5601 train loss:3.649400 +step:5602 train loss:3.569199 +step:5603 train loss:3.591390 +step:5604 train loss:3.516413 +step:5605 train loss:3.515562 +step:5606 train loss:3.527969 +step:5607 train loss:3.541589 +step:5608 train loss:3.609383 +step:5609 train loss:3.607578 +step:5610 train loss:3.634130 +step:5611 train loss:3.674052 +step:5612 train loss:3.602417 +step:5613 train loss:3.571737 +step:5614 train loss:3.584854 +step:5615 train loss:3.642782 +step:5616 train loss:3.569218 +step:5617 train loss:3.531291 +step:5618 train loss:3.544935 +step:5619 train loss:3.587088 +step:5620 train loss:3.584064 +step:5621 train loss:3.601603 +step:5622 train loss:3.559160 +step:5623 train loss:3.617142 +step:5624 train loss:3.549698 +step:5625 train loss:3.582385 +step:5626 train loss:3.575836 +step:5627 train loss:3.598995 +step:5628 train loss:3.613471 +step:5629 train loss:3.591342 +step:5630 train loss:3.581921 +step:5631 train loss:3.596898 +step:5632 train loss:3.619497 +step:5633 train loss:3.582621 +step:5634 train loss:3.564458 +step:5635 train loss:3.586839 +step:5636 train loss:3.599304 +step:5637 train loss:3.559297 +step:5638 train loss:3.551988 +step:5639 train loss:3.530437 +step:5640 train loss:3.565271 +step:5641 train loss:3.619226 +step:5642 train loss:3.567126 +step:5643 train loss:3.580867 +step:5644 train loss:3.616244 +step:5645 train loss:3.570666 +step:5646 train loss:3.569473 +step:5647 train loss:3.526133 +step:5648 train loss:3.594419 +step:5649 train loss:3.596250 +step:5650 train loss:3.553503 +step:5651 train loss:3.649324 +step:5652 train loss:3.532479 +step:5653 train loss:3.529155 +step:5654 train loss:3.584852 +step:5655 train loss:3.576190 +step:5656 train loss:3.601757 +step:5657 train loss:3.574284 +step:5658 train loss:3.596581 +step:5659 train loss:3.610413 +step:5660 train loss:3.549862 +step:5661 train loss:3.564239 +step:5662 train loss:3.668712 +step:5663 train loss:3.576890 +step:5664 train loss:3.562854 +step:5665 train loss:3.572912 +step:5666 train loss:3.557526 +step:5667 train loss:3.553389 +step:5668 train loss:3.552814 +step:5669 train loss:3.585385 +step:5670 train loss:3.574976 +step:5671 train loss:3.541401 +step:5672 train loss:3.569943 +step:5673 train loss:3.541910 +step:5674 train loss:3.565391 +step:5675 train loss:3.556743 +step:5676 train loss:3.548985 +step:5677 train loss:3.560031 +step:5678 train loss:3.579370 +step:5679 train loss:3.558091 +step:5680 train loss:3.541914 +step:5681 train loss:3.583988 +step:5682 train loss:3.593662 +step:5683 train loss:3.557208 +step:5684 train loss:3.617661 +step:5685 train loss:3.653057 +step:5686 train loss:3.534395 +step:5687 train loss:3.556166 +step:5688 train loss:3.553568 +step:5689 train loss:3.610596 +step:5690 train loss:3.515135 +step:5691 train loss:3.523413 +step:5692 train loss:3.551945 +step:5693 train loss:3.503149 +step:5694 train loss:3.579782 +step:5695 train loss:3.567179 +step:5696 train loss:3.506525 +step:5697 train loss:3.575016 +step:5698 train loss:3.570817 +step:5699 train loss:3.612662 +step:5700 train loss:3.550227 +step:5701 train loss:3.594699 +step:5702 train loss:3.557490 +step:5703 train loss:3.577494 +step:5704 train loss:3.552105 +step:5705 train loss:3.583548 +step:5706 train loss:3.582780 +step:5707 train loss:3.593008 +step:5708 train loss:3.558493 +step:5709 train loss:3.637029 +step:5710 train loss:3.627540 +step:5711 train loss:3.553635 +step:5712 train loss:3.585479 +step:5713 train loss:3.559382 +step:5714 train loss:3.552413 +step:5715 train loss:3.576209 +step:5716 train loss:3.584480 +step:5717 train loss:3.606493 +step:5718 train loss:3.553625 +step:5719 train loss:3.561884 +step:5720 train loss:3.547538 +step:5721 train loss:3.532051 +step:5722 train loss:3.614223 +step:5723 train loss:3.542481 +step:5724 train loss:3.522392 +step:5725 train loss:3.604609 +step:5726 train loss:3.589864 +step:5727 train loss:3.543067 +step:5728 train loss:3.590454 +step:5729 train loss:3.595549 +step:5730 train loss:3.572213 +step:5731 train loss:3.522258 +step:5732 train loss:3.557185 +step:5733 train loss:3.564565 +step:5734 train loss:3.549044 +step:5735 train loss:3.666730 +step:5736 train loss:3.529179 +step:5737 train loss:3.565926 +step:5738 train loss:3.582672 +step:5739 train loss:3.567096 +step:5740 train loss:3.649086 +step:5741 train loss:3.565743 +step:5742 train loss:3.534733 +step:5743 train loss:3.544824 +step:5744 train loss:3.558617 +step:5745 train loss:3.555213 +step:5746 train loss:3.614635 +step:5747 train loss:3.521268 +step:5748 train loss:3.557841 +step:5749 train loss:3.526490 +step:5750 validation loss:3.510717 +step:5750 train loss:3.542665 +step:5751 train loss:3.572300 +step:5752 train loss:3.600334 +step:5753 train loss:3.546318 +step:5754 train loss:3.597528 +step:5755 train loss:3.557274 +step:5756 train loss:3.627017 +step:5757 train loss:3.505801 +step:5758 train loss:3.557798 +step:5759 train loss:3.567892 +step:5760 train loss:3.557769 +step:5761 train loss:3.643138 +step:5762 train loss:3.616473 +step:5763 train loss:3.619470 +step:5764 train loss:3.579060 +step:5765 train loss:3.510184 +step:5766 train loss:3.579198 +step:5767 train loss:3.535424 +step:5768 train loss:3.607504 +step:5769 train loss:3.604960 +step:5770 train loss:3.586048 +step:5771 train loss:3.595700 +step:5772 train loss:3.552889 +step:5773 train loss:3.464572 +step:5774 train loss:3.517526 +step:5775 train loss:3.519201 +step:5776 train loss:3.577143 +step:5777 train loss:3.611666 +step:5778 train loss:3.678719 +step:5779 train loss:3.635195 +step:5780 train loss:3.621796 +step:5781 train loss:3.630226 +step:5782 train loss:3.586334 +step:5783 train loss:3.561397 +step:5784 train loss:3.526377 +step:5785 train loss:3.574016 +step:5786 train loss:3.570871 +step:5787 train loss:3.535999 +step:5788 train loss:3.581115 +step:5789 train loss:3.571149 +step:5790 train loss:3.529973 +step:5791 train loss:3.579221 +step:5792 train loss:3.606510 +step:5793 train loss:3.562417 +step:5794 train loss:3.582453 +step:5795 train loss:3.569838 +step:5796 train loss:3.574156 +step:5797 train loss:3.577687 +step:5798 train loss:3.609434 +step:5799 train loss:3.547627 +step:5800 train loss:3.578780 +step:5801 train loss:3.549302 +step:5802 train loss:3.573776 +step:5803 train loss:3.553218 +step:5804 train loss:3.563222 +step:5805 train loss:3.534487 +step:5806 train loss:3.590726 +step:5807 train loss:3.543006 +step:5808 train loss:3.561287 +step:5809 train loss:3.501760 +step:5810 train loss:3.621203 +step:5811 train loss:3.577344 +step:5812 train loss:3.614652 +step:5813 train loss:3.577712 +step:5814 train loss:3.720481 +step:5815 train loss:3.653897 +step:5816 train loss:3.579318 +step:5817 train loss:3.612565 +step:5818 train loss:3.514768 +step:5819 train loss:3.561388 +step:5820 train loss:3.598296 +step:5821 train loss:3.526928 +step:5822 train loss:3.597331 +step:5823 train loss:3.568279 +step:5824 train loss:3.558636 +step:5825 train loss:3.537714 +step:5826 train loss:3.599379 +step:5827 train loss:3.539957 +step:5828 train loss:3.566296 +step:5829 train loss:3.562562 +step:5830 train loss:3.562331 +step:5831 train loss:3.585575 +step:5832 train loss:3.467856 +step:5833 train loss:3.608961 +step:5834 train loss:3.580517 +step:5835 train loss:3.533965 +step:5836 train loss:3.586547 +step:5837 train loss:3.560902 +step:5838 train loss:3.549100 +step:5839 train loss:3.548504 +step:5840 train loss:3.610723 +step:5841 train loss:3.604798 +step:5842 train loss:3.577250 +step:5843 train loss:3.534865 +step:5844 train loss:3.536659 +step:5845 train loss:3.570218 +step:5846 train loss:3.536539 +step:5847 train loss:3.508503 +step:5848 train loss:3.559318 +step:5849 train loss:3.509924 +step:5850 train loss:3.594523 +step:5851 train loss:3.622503 +step:5852 train loss:3.599430 +step:5853 train loss:3.596139 +step:5854 train loss:3.578532 +step:5855 train loss:3.545834 +step:5856 train loss:3.576162 +step:5857 train loss:3.608671 +step:5858 train loss:3.556895 +step:5859 train loss:3.582035 +step:5860 train loss:3.565615 +step:5861 train loss:3.564798 +step:5862 train loss:3.532170 +step:5863 train loss:3.539614 +step:5864 train loss:3.587837 +step:5865 train loss:3.531688 +step:5866 train loss:3.530790 +step:5867 train loss:3.536627 +step:5868 train loss:3.527113 +step:5869 train loss:3.558294 +step:5870 train loss:3.559783 +step:5871 train loss:3.576469 +step:5872 train loss:3.550945 +step:5873 train loss:3.518942 +step:5874 train loss:3.557307 +step:5875 train loss:3.549827 +step:5876 train loss:3.571305 +step:5877 train loss:3.544276 +step:5878 train loss:3.575157 +step:5879 train loss:3.575027 +step:5880 train loss:3.579643 +step:5881 train loss:3.600834 +step:5882 train loss:3.522125 +step:5883 train loss:3.563313 +step:5884 train loss:3.547161 +step:5885 train loss:3.553573 +step:5886 train loss:3.555953 +step:5887 train loss:3.536422 +step:5888 train loss:3.576464 +step:5889 train loss:3.550339 +step:5890 train loss:3.504943 +step:5891 train loss:3.499891 +step:5892 train loss:3.560377 +step:5893 train loss:3.543906 +step:5894 train loss:3.515841 +step:5895 train loss:3.571951 +step:5896 train loss:3.565322 +step:5897 train loss:3.513689 +step:5898 train loss:3.564328 +step:5899 train loss:3.540924 +step:5900 train loss:3.569730 +step:5901 train loss:3.524503 +step:5902 train loss:3.532995 +step:5903 train loss:3.627657 +step:5904 train loss:3.564024 +step:5905 train loss:3.587956 +step:5906 train loss:3.544110 +step:5907 train loss:3.574397 +step:5908 train loss:3.538135 +step:5909 train loss:3.571041 +step:5910 train loss:3.525439 +step:5911 train loss:3.645542 +step:5912 train loss:3.663651 +step:5913 train loss:3.571239 +step:5914 train loss:3.547188 +step:5915 train loss:3.544849 +step:5916 train loss:3.564229 +step:5917 train loss:3.543937 +step:5918 train loss:3.503832 +step:5919 train loss:3.573655 +step:5920 train loss:3.535494 +step:5921 train loss:3.522878 +step:5922 train loss:3.538315 +step:5923 train loss:3.583285 +step:5924 train loss:3.587656 +step:5925 train loss:3.623355 +step:5926 train loss:3.548712 +step:5927 train loss:3.553770 +step:5928 train loss:3.577068 +step:5929 train loss:3.564700 +step:5930 train loss:3.571328 +step:5931 train loss:3.585618 +step:5932 train loss:3.576502 +step:5933 train loss:3.623148 +step:5934 train loss:3.538261 +step:5935 train loss:3.528548 +step:5936 train loss:3.532234 +step:5937 train loss:3.511333 +step:5938 train loss:3.548636 +step:5939 train loss:3.576145 +step:5940 train loss:3.606766 +step:5941 train loss:3.553149 +step:5942 train loss:3.604422 +step:5943 train loss:3.562160 +step:5944 train loss:3.535067 +step:5945 train loss:3.546160 +step:5946 train loss:3.539113 +step:5947 train loss:3.531132 +step:5948 train loss:3.615476 +step:5949 train loss:3.550719 +step:5950 train loss:3.558077 +step:5951 train loss:3.607381 +step:5952 train loss:3.469019 +step:5953 train loss:3.613255 +step:5954 train loss:3.529246 +step:5955 train loss:3.510946 +step:5956 train loss:3.583155 +step:5957 train loss:3.519882 +step:5958 train loss:3.587009 +step:5959 train loss:3.510022 +step:5960 train loss:3.530123 +step:5961 train loss:3.535647 +step:5962 train loss:3.530754 +step:5963 train loss:3.610736 +step:5964 train loss:3.550233 +step:5965 train loss:3.573388 +step:5966 train loss:3.551696 +step:5967 train loss:3.546687 +step:5968 train loss:3.558292 +step:5969 train loss:3.557884 +step:5970 train loss:3.574179 +step:5971 train loss:3.555996 +step:5972 train loss:3.558885 +step:5973 train loss:3.570447 +step:5974 train loss:3.543087 +step:5975 train loss:3.554394 +step:5976 train loss:3.571738 +step:5977 train loss:3.527689 +step:5978 train loss:3.607554 +step:5979 train loss:3.653785 +step:5980 train loss:3.598309 +step:5981 train loss:3.563541 +step:5982 train loss:3.586719 +step:5983 train loss:3.555967 +step:5984 train loss:3.541560 +step:5985 train loss:3.580189 +step:5986 train loss:3.561419 +step:5987 train loss:3.576540 +step:5988 train loss:3.518836 +step:5989 train loss:3.560541 +step:5990 train loss:3.549003 +step:5991 train loss:3.603363 +step:5992 train loss:3.573062 +step:5993 train loss:3.587640 +step:5994 train loss:3.606555 +step:5995 train loss:3.565483 +step:5996 train loss:3.544734 +step:5997 train loss:3.506254 +step:5998 train loss:3.580584 +step:5999 train loss:3.557779 +step:6000 validation loss:3.507333 total_sharp:6.0076e-03 L1_sharp:5.3010e-03 L2_sharp:5.7573e-04 L3_sharp:1.3996e-03 L4_sharp:1.3244e-03 L5_sharp:7.8986e-04 L6_sharp:9.4900e-04 L7_sharp:1.3316e-03 L8_sharp:1.9969e-03 L9_sharp:1.9316e-03 L10_sharp:1.0961e-03 L11_sharp:1.0160e-03 L12_sharp:3.4157e-03 total_fnorm:2.2173e+00 total_l1_linf:1.9684e+04 total_spectral:2.2173e+00 L1_fnorm:5.0134e-01 L2_fnorm:5.0041e-01 L3_fnorm:5.0067e-01 L4_fnorm:4.9945e-01 L5_fnorm:4.9071e-01 L6_fnorm:5.0206e-01 L7_fnorm:5.0753e-01 L8_fnorm:5.0776e-01 L9_fnorm:5.1311e-01 L10_fnorm:5.3336e-01 L11_fnorm:5.3794e-01 L12_fnorm:5.2664e-01 L1_l1linf:6.4435e-01 L2_l1linf:5.9083e-01 L3_l1linf:6.4913e-01 L4_l1linf:5.9343e-01 L5_l1linf:5.2374e-01 L6_l1linf:5.8241e-01 L7_l1linf:6.0113e-01 L8_l1linf:5.8381e-01 L9_l1linf:5.3393e-01 L10_l1linf:5.4967e-01 L11_l1linf:5.7028e-01 L12_l1linf:5.9502e-01 L1_spectral:8.1848e-02 L2_spectral:7.9990e-02 L3_spectral:8.5615e-02 L4_spectral:8.7660e-02 L5_spectral:7.0919e-02 L6_spectral:8.3711e-02 L7_spectral:8.0628e-02 L8_spectral:7.3444e-02 L9_spectral:6.3523e-02 L10_spectral:4.9699e-02 L11_spectral:5.3797e-02 L12_spectral:7.4620e-02 v_norm:2.2173e+00 cos_v_-g_hvp:6.6297e-02 g_hvp_norm:3.6991e-01 cos_v_-g_t:7.9171e-02 g_t_norm:3.1223e-01 hv_norm:4.6362e-01 cos_v_hv:2.8732e-02 hg_norm:5.0337e+00 cos_g_hg:6.2627e-01 v_par:9.1868e-03 v_perp:2.2173e+00 L1_cos_v_neg_g:1.2764e-01 L1_v_norm:5.0134e-01 L2_cos_v_neg_g:5.8230e-02 L2_v_norm:5.0041e-01 L3_cos_v_neg_g:4.8893e-02 L3_v_norm:5.0067e-01 L4_cos_v_neg_g:5.8214e-02 L4_v_norm:4.9945e-01 L5_cos_v_neg_g:3.8074e-02 L5_v_norm:4.9071e-01 L6_cos_v_neg_g:5.3493e-02 L6_v_norm:5.0206e-01 L7_cos_v_neg_g:6.3977e-02 L7_v_norm:5.0753e-01 L8_cos_v_neg_g:6.4728e-02 L8_v_norm:5.0776e-01 L9_cos_v_neg_g:6.2413e-02 L9_v_norm:5.1311e-01 L10_cos_v_neg_g:7.2969e-02 L10_v_norm:5.3336e-01 L11_cos_v_neg_g:8.4975e-02 L11_v_norm:5.3794e-01 L12_cos_v_neg_g:1.3636e-01 L12_v_norm:5.2664e-01 +step:6000 train loss:3.538389 +step:6001 train loss:3.545313 +step:6002 train loss:3.570114 +step:6003 train loss:3.490779 +step:6004 train loss:3.451734 +step:6005 train loss:3.486489 +step:6006 train loss:3.579401 +step:6007 train loss:3.531757 +step:6008 train loss:3.566596 +step:6009 train loss:3.609031 +step:6010 train loss:3.569088 +step:6011 train loss:3.552474 +step:6012 train loss:3.546314 +step:6013 train loss:3.566495 +step:6014 train loss:3.516175 +step:6015 train loss:3.491868 +step:6016 train loss:3.526515 +step:6017 train loss:3.534341 +step:6018 train loss:3.574557 +step:6019 train loss:3.529657 +step:6020 train loss:3.585900 +step:6021 train loss:3.568974 +step:6022 train loss:3.565974 +step:6023 train loss:3.538007 +step:6024 train loss:3.616918 +step:6025 train loss:3.525348 +step:6026 train loss:3.595161 +step:6027 train loss:3.539345 +step:6028 train loss:3.589759 +step:6029 train loss:3.662013 +step:6030 train loss:3.538323 +step:6031 train loss:3.516952 +step:6032 train loss:3.594195 +step:6033 train loss:3.561362 +step:6034 train loss:3.559939 +step:6035 train loss:3.611531 +step:6036 train loss:3.560983 +step:6037 train loss:3.561173 +step:6038 train loss:3.537262 +step:6039 train loss:3.603879 +step:6040 train loss:3.527803 +step:6041 train loss:3.595947 +step:6042 train loss:3.522759 +step:6043 train loss:3.625162 +step:6044 train loss:3.620841 +step:6045 train loss:3.568325 +step:6046 train loss:3.633594 +step:6047 train loss:3.782292 +step:6048 train loss:3.584538 +step:6049 train loss:3.587770 +step:6050 train loss:3.562419 +step:6051 train loss:3.545071 +step:6052 train loss:3.617651 +step:6053 train loss:3.563736 +step:6054 train loss:3.599272 +step:6055 train loss:3.711344 +step:6056 train loss:3.706694 +step:6057 train loss:3.509429 +step:6058 train loss:3.563740 +step:6059 train loss:3.604188 +step:6060 train loss:3.590904 +step:6061 train loss:3.589171 +step:6062 train loss:3.568550 +step:6063 train loss:3.562184 +step:6064 train loss:3.517543 +step:6065 train loss:3.586483 +step:6066 train loss:3.562430 +step:6067 train loss:3.579889 +step:6068 train loss:3.494863 +step:6069 train loss:3.604233 +step:6070 train loss:3.542456 +step:6071 train loss:3.584318 +step:6072 train loss:3.620230 +step:6073 train loss:3.583601 +step:6074 train loss:3.560486 +step:6075 train loss:3.649359 +step:6076 train loss:3.581994 +step:6077 train loss:3.498461 +step:6078 train loss:3.575515 +step:6079 train loss:3.593217 +step:6080 train loss:3.606556 +step:6081 train loss:3.595368 +step:6082 train loss:3.547505 +step:6083 train loss:3.584164 +step:6084 train loss:3.522800 +step:6085 train loss:3.547540 +step:6086 train loss:3.576311 +step:6087 train loss:3.547252 +step:6088 train loss:3.515721 +step:6089 train loss:3.491286 +step:6090 train loss:3.560397 +step:6091 train loss:3.551222 +step:6092 train loss:3.525842 +step:6093 train loss:3.542785 +step:6094 train loss:3.587421 +step:6095 train loss:3.581000 +step:6096 train loss:3.572306 +step:6097 train loss:3.538603 +step:6098 train loss:3.560310 +step:6099 train loss:3.534934 +step:6100 train loss:3.538024 +step:6101 train loss:3.619480 +step:6102 train loss:3.575768 +step:6103 train loss:3.573695 +step:6104 train loss:3.537809 +step:6105 train loss:3.600199 +step:6106 train loss:3.570566 +step:6107 train loss:3.566921 +step:6108 train loss:3.606479 +step:6109 train loss:3.594591 +step:6110 train loss:3.596768 +step:6111 train loss:3.659404 +step:6112 train loss:3.785327 +step:6113 train loss:3.535552 +step:6114 train loss:3.600830 +step:6115 train loss:3.548007 +step:6116 train loss:3.566163 +step:6117 train loss:3.535917 +step:6118 train loss:3.550669 +step:6119 train loss:3.571514 +step:6120 train loss:3.543874 +step:6121 train loss:3.582825 +step:6122 train loss:3.539750 +step:6123 train loss:3.562550 +step:6124 train loss:3.567733 +step:6125 train loss:3.549135 +step:6126 train loss:3.558956 +step:6127 train loss:3.584052 +step:6128 train loss:3.540926 +step:6129 train loss:3.716434 +step:6130 train loss:3.759633 +step:6131 train loss:3.538397 +step:6132 train loss:3.582409 +step:6133 train loss:3.613865 +step:6134 train loss:3.608222 +step:6135 train loss:3.631258 +step:6136 train loss:3.632222 +step:6137 train loss:3.654877 +step:6138 train loss:3.554754 +step:6139 train loss:3.542929 +step:6140 train loss:3.555406 +step:6141 train loss:3.602925 +step:6142 train loss:3.563574 +step:6143 train loss:3.673062 +step:6144 train loss:3.654699 +step:6145 train loss:3.528505 +step:6146 train loss:3.504457 +step:6147 train loss:3.572518 +step:6148 train loss:3.501290 +step:6149 train loss:3.576042 +step:6150 train loss:3.570588 +step:6151 train loss:3.556067 +step:6152 train loss:3.533872 +step:6153 train loss:3.546876 +step:6154 train loss:3.591591 +step:6155 train loss:3.605834 +step:6156 train loss:3.561503 +step:6157 train loss:3.588768 +step:6158 train loss:3.602639 +step:6159 train loss:3.559415 +step:6160 train loss:3.564006 +step:6161 train loss:3.581021 +step:6162 train loss:3.536098 +step:6163 train loss:3.546866 +step:6164 train loss:3.636976 +step:6165 train loss:3.613652 +step:6166 train loss:3.592264 +step:6167 train loss:3.525831 +step:6168 train loss:3.557750 +step:6169 train loss:3.566849 +step:6170 train loss:3.577813 +step:6171 train loss:3.534169 +step:6172 train loss:3.572782 +step:6173 train loss:3.546597 +step:6174 train loss:3.549805 +step:6175 train loss:3.575307 +step:6176 train loss:3.582139 +step:6177 train loss:3.540638 +step:6178 train loss:3.549161 +step:6179 train loss:3.560466 +step:6180 train loss:3.579576 +step:6181 train loss:3.543512 +step:6182 train loss:3.560206 +step:6183 train loss:3.526330 +step:6184 train loss:3.516917 +step:6185 train loss:3.569324 +step:6186 train loss:3.537623 +step:6187 train loss:3.555360 +step:6188 train loss:3.565350 +step:6189 train loss:3.536400 +step:6190 train loss:3.564824 +step:6191 train loss:3.536291 +step:6192 train loss:3.583248 +step:6193 train loss:3.592225 +step:6194 train loss:3.525114 +step:6195 train loss:3.479853 +step:6196 train loss:3.572044 +step:6197 train loss:3.535578 +step:6198 train loss:3.591233 +step:6199 train loss:3.577613 +step:6200 train loss:3.556058 +step:6201 train loss:3.530916 +step:6202 train loss:3.566343 +step:6203 train loss:3.562357 +step:6204 train loss:3.513563 +step:6205 train loss:3.510112 +step:6206 train loss:3.562407 +step:6207 train loss:3.551360 +step:6208 train loss:3.556986 +step:6209 train loss:3.511579 +step:6210 train loss:3.547796 +step:6211 train loss:3.499561 +step:6212 train loss:3.540176 +step:6213 train loss:3.531343 +step:6214 train loss:3.703379 +step:6215 train loss:3.539316 +step:6216 train loss:3.549424 +step:6217 train loss:3.533881 +step:6218 train loss:3.511938 +step:6219 train loss:3.493752 +step:6220 train loss:3.535110 +step:6221 train loss:3.571658 +step:6222 train loss:3.554451 +step:6223 train loss:3.552228 +step:6224 train loss:3.532373 +step:6225 train loss:3.555057 +step:6226 train loss:3.554762 +step:6227 train loss:3.516726 +step:6228 train loss:3.528838 +step:6229 train loss:3.541485 +step:6230 train loss:3.581453 +step:6231 train loss:3.541861 +step:6232 train loss:3.550682 +step:6233 train loss:3.589211 +step:6234 train loss:3.570375 +step:6235 train loss:3.513552 +step:6236 train loss:3.516827 +step:6237 train loss:3.514534 +step:6238 train loss:3.549632 +step:6239 train loss:3.568555 +step:6240 train loss:3.541449 +step:6241 train loss:3.510043 +step:6242 train loss:3.578867 +step:6243 train loss:3.527380 +step:6244 train loss:3.570516 +step:6245 train loss:3.468062 +step:6246 train loss:3.491117 +step:6247 train loss:3.568357 +step:6248 train loss:3.530809 +step:6249 train loss:3.529016 +step:6250 validation loss:3.496027 +step:6250 train loss:3.563157 +step:6251 train loss:3.615161 +step:6252 train loss:3.719412 +step:6253 train loss:3.524333 +step:6254 train loss:3.516677 +step:6255 train loss:3.578501 +step:6256 train loss:3.588430 +step:6257 train loss:3.580010 +step:6258 train loss:3.566252 +step:6259 train loss:3.558483 +step:6260 train loss:3.603933 +step:6261 train loss:3.546302 +step:6262 train loss:3.621423 +step:6263 train loss:3.515985 +step:6264 train loss:3.680371 +step:6265 train loss:3.670360 +step:6266 train loss:3.562620 +step:6267 train loss:3.517919 +step:6268 train loss:3.579042 +step:6269 train loss:3.582994 +step:6270 train loss:3.592497 +step:6271 train loss:3.565913 +step:6272 train loss:3.531729 +step:6273 train loss:3.475765 +step:6274 train loss:3.539205 +step:6275 train loss:3.490052 +step:6276 train loss:3.526676 +step:6277 train loss:3.489178 +step:6278 train loss:3.546138 +step:6279 train loss:3.529719 +step:6280 train loss:3.523885 +step:6281 train loss:3.534101 +step:6282 train loss:3.661350 +step:6283 train loss:3.767036 +step:6284 train loss:3.490761 +step:6285 train loss:3.502570 +step:6286 train loss:3.540444 +step:6287 train loss:3.524242 +step:6288 train loss:3.536803 +step:6289 train loss:3.530967 +step:6290 train loss:3.566998 +step:6291 train loss:3.535861 +step:6292 train loss:3.578684 +step:6293 train loss:3.555892 +step:6294 train loss:3.543142 +step:6295 train loss:3.550755 +step:6296 train loss:3.534669 +step:6297 train loss:3.532528 +step:6298 train loss:3.489387 +step:6299 train loss:3.566625 +step:6300 train loss:3.518860 +step:6301 train loss:3.553843 +step:6302 train loss:3.567902 +step:6303 train loss:3.560004 +step:6304 train loss:3.531514 +step:6305 train loss:3.507465 +step:6306 train loss:3.574607 +step:6307 train loss:3.625080 +step:6308 train loss:3.563099 +step:6309 train loss:3.632658 +step:6310 train loss:3.605555 +step:6311 train loss:3.523505 +step:6312 train loss:3.616338 +step:6313 train loss:3.568546 +step:6314 train loss:3.511036 +step:6315 train loss:3.505428 +step:6316 train loss:3.562905 +step:6317 train loss:3.506538 +step:6318 train loss:3.517172 +step:6319 train loss:3.557979 +step:6320 train loss:3.514506 +step:6321 train loss:3.559835 +step:6322 train loss:3.590395 +step:6323 train loss:3.585676 +step:6324 train loss:3.534529 +step:6325 train loss:3.563246 +step:6326 train loss:3.566254 +step:6327 train loss:3.561330 +step:6328 train loss:3.542171 +step:6329 train loss:3.566983 +step:6330 train loss:3.602620 +step:6331 train loss:3.599978 +step:6332 train loss:3.558368 +step:6333 train loss:3.571932 +step:6334 train loss:3.528360 +step:6335 train loss:3.533323 +step:6336 train loss:3.569392 +step:6337 train loss:3.563044 +step:6338 train loss:3.558807 +step:6339 train loss:3.573081 +step:6340 train loss:3.587809 +step:6341 train loss:3.542126 +step:6342 train loss:3.625588 +step:6343 train loss:3.580399 +step:6344 train loss:3.506477 +step:6345 train loss:3.537528 +step:6346 train loss:3.521956 +step:6347 train loss:3.570428 +step:6348 train loss:3.569343 +step:6349 train loss:3.546831 +step:6350 train loss:3.612795 +step:6351 train loss:3.531978 +step:6352 train loss:3.530912 +step:6353 train loss:3.519402 +step:6354 train loss:3.571194 +step:6355 train loss:3.471123 +step:6356 train loss:3.476449 +step:6357 train loss:3.505443 +step:6358 train loss:3.560357 +step:6359 train loss:3.566921 +step:6360 train loss:3.656308 +step:6361 train loss:3.526062 +step:6362 train loss:3.602730 +step:6363 train loss:3.559933 +step:6364 train loss:3.533064 +step:6365 train loss:3.596473 +step:6366 train loss:3.559851 +step:6367 train loss:3.530019 +step:6368 train loss:3.556073 +step:6369 train loss:3.534522 +step:6370 train loss:3.531173 +step:6371 train loss:3.527865 +step:6372 train loss:3.514739 +step:6373 train loss:3.643674 +step:6374 train loss:3.577362 +step:6375 train loss:3.545045 +step:6376 train loss:3.603187 +step:6377 train loss:3.572472 +step:6378 train loss:3.575167 +step:6379 train loss:3.563270 +step:6380 train loss:3.555099 +step:6381 train loss:3.562285 +step:6382 train loss:3.560862 +step:6383 train loss:3.500539 +step:6384 train loss:3.487218 +step:6385 train loss:3.511157 +step:6386 train loss:3.594010 +step:6387 train loss:3.561308 +step:6388 train loss:3.584531 +step:6389 train loss:3.557379 +step:6390 train loss:3.524746 +step:6391 train loss:3.521636 +step:6392 train loss:3.572227 +step:6393 train loss:3.611880 +step:6394 train loss:3.795277 +step:6395 train loss:3.565104 +step:6396 train loss:3.568213 +step:6397 train loss:3.599718 +step:6398 train loss:3.538326 +step:6399 train loss:3.582927 +step:6400 train loss:3.525324 +step:6401 train loss:3.530695 +step:6402 train loss:3.614471 +step:6403 train loss:3.553440 +step:6404 train loss:3.551402 +step:6405 train loss:3.568792 +step:6406 train loss:3.633663 +step:6407 train loss:3.614048 +step:6408 train loss:3.530882 +step:6409 train loss:3.567207 +step:6410 train loss:3.571957 +step:6411 train loss:3.589729 +step:6412 train loss:3.558657 +step:6413 train loss:3.577315 +step:6414 train loss:3.556477 +step:6415 train loss:3.542924 +step:6416 train loss:3.600883 +step:6417 train loss:3.606539 +step:6418 train loss:3.598816 +step:6419 train loss:3.544093 +step:6420 train loss:3.537340 +step:6421 train loss:3.539811 +step:6422 train loss:3.565020 +step:6423 train loss:3.630808 +step:6424 train loss:3.585962 +step:6425 train loss:3.587609 +step:6426 train loss:3.568753 +step:6427 train loss:3.580186 +step:6428 train loss:3.592174 +step:6429 train loss:3.613711 +step:6430 train loss:3.572337 +step:6431 train loss:3.570756 +step:6432 train loss:3.541415 +step:6433 train loss:3.546235 +step:6434 train loss:3.536765 +step:6435 train loss:3.550430 +step:6436 train loss:3.556034 +step:6437 train loss:3.519889 +step:6438 train loss:3.540082 +step:6439 train loss:3.536670 +step:6440 train loss:3.553767 +step:6441 train loss:3.521817 +step:6442 train loss:3.577566 +step:6443 train loss:3.560737 +step:6444 train loss:3.537001 +step:6445 train loss:3.574034 +step:6446 train loss:3.755082 +step:6447 train loss:3.556334 +step:6448 train loss:3.535994 +step:6449 train loss:3.608720 +step:6450 train loss:3.565647 +step:6451 train loss:3.531867 +step:6452 train loss:3.530433 +step:6453 train loss:3.595986 +step:6454 train loss:3.594677 +step:6455 train loss:3.531554 +step:6456 train loss:3.576301 +step:6457 train loss:3.546994 +step:6458 train loss:3.528921 +step:6459 train loss:3.532198 +step:6460 train loss:3.568725 +step:6461 train loss:3.596254 +step:6462 train loss:3.539468 +step:6463 train loss:3.551899 +step:6464 train loss:3.594282 +step:6465 train loss:3.575416 +step:6466 train loss:3.550598 +step:6467 train loss:3.623075 +step:6468 train loss:3.566854 +step:6469 train loss:3.541972 +step:6470 train loss:3.560882 +step:6471 train loss:3.611172 +step:6472 train loss:3.570037 +step:6473 train loss:3.560140 +step:6474 train loss:3.504812 +step:6475 train loss:3.556790 +step:6476 train loss:3.570490 +step:6477 train loss:3.574145 +step:6478 train loss:3.526893 +step:6479 train loss:3.546220 +step:6480 train loss:3.578118 +step:6481 train loss:3.534619 +step:6482 train loss:3.534151 +step:6483 train loss:3.498316 +step:6484 train loss:3.515841 +step:6485 train loss:3.594199 +step:6486 train loss:3.521423 +step:6487 train loss:3.515012 +step:6488 train loss:3.618783 +step:6489 train loss:3.476358 +step:6490 train loss:3.526713 +step:6491 train loss:3.529054 +step:6492 train loss:3.593099 +step:6493 train loss:3.624359 +step:6494 train loss:3.550502 +step:6495 train loss:3.521521 +step:6496 train loss:3.561108 +step:6497 train loss:3.542498 +step:6498 train loss:3.574584 +step:6499 train loss:3.589488 +step:6500 validation loss:3.488799 total_sharp:6.5350e-03 L1_sharp:4.7172e-03 L2_sharp:4.1207e-04 L3_sharp:9.1003e-04 L4_sharp:1.1203e-03 L5_sharp:9.8213e-04 L6_sharp:1.0470e-03 L7_sharp:1.5888e-03 L8_sharp:2.1405e-03 L9_sharp:1.9377e-03 L10_sharp:1.1341e-03 L11_sharp:1.1600e-03 L12_sharp:6.6779e-03 total_fnorm:2.2290e+00 total_l1_linf:1.9836e+04 total_spectral:2.2290e+00 L1_fnorm:5.0798e-01 L2_fnorm:5.0848e-01 L3_fnorm:5.0194e-01 L4_fnorm:5.0176e-01 L5_fnorm:4.9892e-01 L6_fnorm:5.0789e-01 L7_fnorm:5.1446e-01 L8_fnorm:5.1350e-01 L9_fnorm:5.2172e-01 L10_fnorm:5.3808e-01 L11_fnorm:5.4171e-01 L12_fnorm:5.3359e-01 L1_l1linf:5.7983e-01 L2_l1linf:5.8987e-01 L3_l1linf:7.1099e-01 L4_l1linf:6.7097e-01 L5_l1linf:5.4246e-01 L6_l1linf:6.5469e-01 L7_l1linf:5.8372e-01 L8_l1linf:5.7959e-01 L9_l1linf:5.8128e-01 L10_l1linf:5.5853e-01 L11_l1linf:5.6171e-01 L12_l1linf:6.0699e-01 L1_spectral:8.2388e-02 L2_spectral:7.8270e-02 L3_spectral:7.8645e-02 L4_spectral:7.9741e-02 L5_spectral:6.9607e-02 L6_spectral:8.1702e-02 L7_spectral:8.0870e-02 L8_spectral:7.6632e-02 L9_spectral:6.3860e-02 L10_spectral:4.6142e-02 L11_spectral:5.4457e-02 L12_spectral:8.8461e-02 v_norm:2.2290e+00 cos_v_-g_hvp:6.4935e-02 g_hvp_norm:3.9213e-01 cos_v_-g_t:7.6836e-02 g_t_norm:3.3400e-01 hv_norm:5.2705e-01 cos_v_hv:2.7638e-02 hg_norm:5.6506e+00 cos_g_hg:6.9688e-01 v_par:9.2155e-03 v_perp:2.2290e+00 L1_cos_v_neg_g:1.2657e-01 L1_v_norm:5.0798e-01 L2_cos_v_neg_g:6.4100e-02 L2_v_norm:5.0848e-01 L3_cos_v_neg_g:5.2183e-02 L3_v_norm:5.0194e-01 L4_cos_v_neg_g:5.9342e-02 L4_v_norm:5.0176e-01 L5_cos_v_neg_g:4.2723e-02 L5_v_norm:4.9892e-01 L6_cos_v_neg_g:5.3710e-02 L6_v_norm:5.0789e-01 L7_cos_v_neg_g:6.4754e-02 L7_v_norm:5.1446e-01 L8_cos_v_neg_g:6.2876e-02 L8_v_norm:5.1350e-01 L9_cos_v_neg_g:5.9667e-02 L9_v_norm:5.2172e-01 L10_cos_v_neg_g:6.9588e-02 L10_v_norm:5.3808e-01 L11_cos_v_neg_g:8.4253e-02 L11_v_norm:5.4171e-01 L12_cos_v_neg_g:1.2322e-01 L12_v_norm:5.3359e-01 +step:6500 train loss:3.611929 +step:6501 train loss:3.534917 +step:6502 train loss:3.557754 +step:6503 train loss:3.547027 +step:6504 train loss:3.652750 +step:6505 train loss:3.538987 +step:6506 train loss:3.542257 +step:6507 train loss:3.586628 +step:6508 train loss:3.538486 +step:6509 train loss:3.598010 +step:6510 train loss:3.571490 +step:6511 train loss:3.555575 +step:6512 train loss:3.566516 +step:6513 train loss:3.549678 +step:6514 train loss:3.588958 +step:6515 train loss:3.602621 +step:6516 train loss:3.556262 +step:6517 train loss:3.527535 +step:6518 train loss:3.575530 +step:6519 train loss:3.511077 +step:6520 train loss:3.518044 +step:6521 train loss:3.567611 +step:6522 train loss:3.589757 +step:6523 train loss:3.547981 +step:6524 train loss:3.544953 +step:6525 train loss:3.517395 +step:6526 train loss:3.670628 +step:6527 train loss:3.603356 +step:6528 train loss:3.547213 +step:6529 train loss:3.511232 +step:6530 train loss:3.565268 +step:6531 train loss:3.572145 +step:6532 train loss:3.506818 +step:6533 train loss:3.562580 +step:6534 train loss:3.510059 +step:6535 train loss:3.530330 +step:6536 train loss:3.532522 +step:6537 train loss:3.553697 +step:6538 train loss:3.539358 +step:6539 train loss:3.559876 +step:6540 train loss:3.543744 +step:6541 train loss:3.604045 +step:6542 train loss:3.583468 +step:6543 train loss:3.568169 +step:6544 train loss:3.505861 +step:6545 train loss:3.483539 +step:6546 train loss:3.515071 +step:6547 train loss:3.554766 +step:6548 train loss:3.585850 +step:6549 train loss:3.531923 +step:6550 train loss:3.548099 +step:6551 train loss:3.508142 +step:6552 train loss:3.498264 +step:6553 train loss:3.556876 +step:6554 train loss:3.623859 +step:6555 train loss:3.598644 +step:6556 train loss:3.581256 +step:6557 train loss:3.599868 +step:6558 train loss:3.630941 +step:6559 train loss:3.563712 +step:6560 train loss:3.558840 +step:6561 train loss:3.524270 +step:6562 train loss:3.547132 +step:6563 train loss:3.617292 +step:6564 train loss:3.569109 +step:6565 train loss:3.599765 +step:6566 train loss:3.624767 +step:6567 train loss:3.623425 +step:6568 train loss:3.581611 +step:6569 train loss:3.579746 +step:6570 train loss:3.548479 +step:6571 train loss:3.563392 +step:6572 train loss:3.568252 +step:6573 train loss:3.561422 +step:6574 train loss:3.525379 +step:6575 train loss:3.541585 +step:6576 train loss:3.540692 +step:6577 train loss:3.593693 +step:6578 train loss:3.549479 +step:6579 train loss:3.601471 +step:6580 train loss:3.557755 +step:6581 train loss:3.516795 +step:6582 train loss:3.519417 +step:6583 train loss:3.544089 +step:6584 train loss:3.581339 +step:6585 train loss:3.620421 +step:6586 train loss:3.602096 +step:6587 train loss:3.579586 +step:6588 train loss:3.540192 +step:6589 train loss:3.514646 +step:6590 train loss:3.530547 +step:6591 train loss:3.499232 +step:6592 train loss:3.524064 +step:6593 train loss:3.499352 +step:6594 train loss:3.515210 +step:6595 train loss:3.542860 +step:6596 train loss:3.503638 +step:6597 train loss:3.537791 +step:6598 train loss:3.539978 +step:6599 train loss:3.489955 +step:6600 train loss:3.437949 +step:6601 train loss:3.491710 +step:6602 train loss:3.504601 +step:6603 train loss:3.477763 +step:6604 train loss:3.475237 +step:6605 train loss:3.491738 +step:6606 train loss:3.534474 +step:6607 train loss:3.477691 +step:6608 train loss:3.504871 +step:6609 train loss:3.507342 +step:6610 train loss:3.467194 +step:6611 train loss:3.524440 +step:6612 train loss:3.494585 +step:6613 train loss:3.514941 +step:6614 train loss:3.570483 +step:6615 train loss:3.552276 +step:6616 train loss:3.577988 +step:6617 train loss:3.459768 +step:6618 train loss:3.501107 +step:6619 train loss:3.557353 +step:6620 train loss:3.526571 +step:6621 train loss:3.526356 +step:6622 train loss:3.530699 +step:6623 train loss:3.507169 +step:6624 train loss:3.537415 +step:6625 train loss:3.541079 +step:6626 train loss:3.541536 +step:6627 train loss:3.542151 +step:6628 train loss:3.578939 +step:6629 train loss:3.577786 +step:6630 train loss:3.499262 +step:6631 train loss:3.433108 +step:6632 train loss:3.593782 +step:6633 train loss:3.493071 +step:6634 train loss:3.546074 +step:6635 train loss:3.492790 +step:6636 train loss:3.496991 +step:6637 train loss:3.547047 +step:6638 train loss:3.569791 +step:6639 train loss:3.534587 +step:6640 train loss:3.533597 +step:6641 train loss:3.478077 +step:6642 train loss:3.518904 +step:6643 train loss:3.494903 +step:6644 train loss:3.536638 +step:6645 train loss:3.587925 +step:6646 train loss:3.456442 +step:6647 train loss:3.535968 +step:6648 train loss:3.509539 +step:6649 train loss:3.516028 +step:6650 train loss:3.547201 +step:6651 train loss:3.580988 +step:6652 train loss:3.533563 +step:6653 train loss:3.532755 +step:6654 train loss:3.478601 +step:6655 train loss:3.535938 +step:6656 train loss:3.490854 +step:6657 train loss:3.547646 +step:6658 train loss:3.498708 +step:6659 train loss:3.478264 +step:6660 train loss:3.523157 +step:6661 train loss:3.483065 +step:6662 train loss:3.532298 +step:6663 train loss:3.521758 +step:6664 train loss:3.511539 +step:6665 train loss:3.517963 +step:6666 train loss:3.490982 +step:6667 train loss:3.521003 +step:6668 train loss:3.508317 +step:6669 train loss:3.526348 +step:6670 train loss:3.556624 +step:6671 train loss:3.503798 +step:6672 train loss:3.524155 +step:6673 train loss:3.482502 +step:6674 train loss:3.497888 +step:6675 train loss:3.541584 +step:6676 train loss:3.487091 +step:6677 train loss:3.467260 +step:6678 train loss:3.461375 +step:6679 train loss:3.544772 +step:6680 train loss:3.518587 +step:6681 train loss:3.499446 +step:6682 train loss:3.508683 +step:6683 train loss:3.447880 +step:6684 train loss:3.498343 +step:6685 train loss:3.573278 +step:6686 train loss:3.464406 +step:6687 train loss:3.532751 +step:6688 train loss:3.524562 +step:6689 train loss:3.470057 +step:6690 train loss:3.553242 +step:6691 train loss:3.500967 +step:6692 train loss:3.515274 +step:6693 train loss:3.521195 +step:6694 train loss:3.551938 +step:6695 train loss:3.491479 +step:6696 train loss:3.497819 +step:6697 train loss:3.534099 +step:6698 train loss:3.512582 +step:6699 train loss:3.502493 +step:6700 train loss:3.450409 +step:6701 train loss:3.456068 +step:6702 train loss:3.503446 +step:6703 train loss:3.524709 +step:6704 train loss:3.538691 +step:6705 train loss:3.496244 +step:6706 train loss:3.545856 +step:6707 train loss:3.547688 +step:6708 train loss:3.503234 +step:6709 train loss:3.561573 +step:6710 train loss:3.480627 +step:6711 train loss:3.479779 +step:6712 train loss:3.492720 +step:6713 train loss:3.504626 +step:6714 train loss:3.505225 +step:6715 train loss:3.525498 +step:6716 train loss:3.505209 +step:6717 train loss:3.580296 +step:6718 train loss:3.509659 +step:6719 train loss:3.506722 +step:6720 train loss:3.577693 +step:6721 train loss:3.529956 +step:6722 train loss:3.477224 +step:6723 train loss:3.467502 +step:6724 train loss:3.496688 +step:6725 train loss:3.498940 +step:6726 train loss:3.482469 +step:6727 train loss:3.538968 +step:6728 train loss:3.467128 +step:6729 train loss:3.547531 +step:6730 train loss:3.525504 +step:6731 train loss:3.501555 +step:6732 train loss:3.631029 +step:6733 train loss:3.565617 +step:6734 train loss:3.528481 +step:6735 train loss:3.591389 +step:6736 train loss:3.499108 +step:6737 train loss:3.577714 +step:6738 train loss:3.518742 +step:6739 train loss:3.536194 +step:6740 train loss:3.480846 +step:6741 train loss:3.529162 +step:6742 train loss:3.533168 +step:6743 train loss:3.457258 +step:6744 train loss:3.568244 +step:6745 train loss:3.571664 +step:6746 train loss:3.525723 +step:6747 train loss:3.600879 +step:6748 train loss:3.702002 +step:6749 train loss:3.646249 +step:6750 validation loss:3.492083 +step:6750 train loss:3.540659 +step:6751 train loss:3.485904 +step:6752 train loss:3.520135 +step:6753 train loss:3.501634 +step:6754 train loss:3.552643 +step:6755 train loss:3.563573 +step:6756 train loss:3.514014 +step:6757 train loss:3.590893 +step:6758 train loss:3.476920 +step:6759 train loss:3.504375 +step:6760 train loss:3.475793 +step:6761 train loss:3.499690 +step:6762 train loss:3.560234 +step:6763 train loss:3.496490 +step:6764 train loss:3.490582 +step:6765 train loss:3.557997 +step:6766 train loss:3.542066 +step:6767 train loss:3.500502 +step:6768 train loss:3.505825 +step:6769 train loss:3.477399 +step:6770 train loss:3.534086 +step:6771 train loss:3.445036 +step:6772 train loss:3.548563 +step:6773 train loss:3.496134 +step:6774 train loss:3.465963 +step:6775 train loss:3.506984 +step:6776 train loss:3.454172 +step:6777 train loss:3.565299 +step:6778 train loss:3.449330 +step:6779 train loss:3.496312 +step:6780 train loss:3.500709 +step:6781 train loss:3.499541 +step:6782 train loss:3.487469 +step:6783 train loss:3.499991 +step:6784 train loss:3.543198 +step:6785 train loss:3.500470 +step:6786 train loss:3.507824 +step:6787 train loss:3.521430 +step:6788 train loss:3.522621 +step:6789 train loss:3.526367 +step:6790 train loss:3.521235 +step:6791 train loss:3.576573 +step:6792 train loss:3.520897 +step:6793 train loss:3.569736 +step:6794 train loss:3.529141 +step:6795 train loss:3.461031 +step:6796 train loss:3.529585 +step:6797 train loss:3.517342 +step:6798 train loss:3.530479 +step:6799 train loss:3.536487 +step:6800 train loss:3.529967 +step:6801 train loss:3.494080 +step:6802 train loss:3.531267 +step:6803 train loss:3.569200 +step:6804 train loss:3.517559 +step:6805 train loss:3.511469 +step:6806 train loss:3.542076 +step:6807 train loss:3.551823 +step:6808 train loss:3.500309 +step:6809 train loss:3.475892 +step:6810 train loss:3.514913 +step:6811 train loss:3.502960 +step:6812 train loss:3.453210 +step:6813 train loss:3.480538 +step:6814 train loss:3.451567 +step:6815 train loss:3.471312 +step:6816 train loss:3.528590 +step:6817 train loss:3.508907 +step:6818 train loss:3.516280 +step:6819 train loss:3.572376 +step:6820 train loss:3.550542 +step:6821 train loss:3.645475 +step:6822 train loss:3.496423 +step:6823 train loss:3.538250 +step:6824 train loss:3.516997 +step:6825 train loss:3.549806 +step:6826 train loss:3.460170 +step:6827 train loss:3.570789 +step:6828 train loss:3.500833 +step:6829 train loss:3.475468 +step:6830 train loss:3.488006 +step:6831 train loss:3.444384 +step:6832 train loss:3.508138 +step:6833 train loss:3.453462 +step:6834 train loss:3.525172 +step:6835 train loss:3.438495 +step:6836 train loss:3.518976 +step:6837 train loss:3.499402 +step:6838 train loss:3.524182 +step:6839 train loss:3.520361 +step:6840 train loss:3.529298 +step:6841 train loss:3.522714 +step:6842 train loss:3.460084 +step:6843 train loss:3.491067 +step:6844 train loss:3.500262 +step:6845 train loss:3.550561 +step:6846 train loss:3.496569 +step:6847 train loss:3.482069 +step:6848 train loss:3.596863 +step:6849 train loss:3.492008 +step:6850 train loss:3.550474 +step:6851 train loss:3.517652 +step:6852 train loss:3.497515 +step:6853 train loss:3.501166 +step:6854 train loss:3.502352 +step:6855 train loss:3.492515 +step:6856 train loss:3.519905 +step:6857 train loss:3.529583 +step:6858 train loss:3.453941 +step:6859 train loss:3.433669 +step:6860 train loss:3.482227 +step:6861 train loss:3.534212 +step:6862 train loss:3.485540 +step:6863 train loss:3.493479 +step:6864 train loss:3.546142 +step:6865 train loss:3.539772 +step:6866 train loss:3.523997 +step:6867 train loss:3.481824 +step:6868 train loss:3.547392 +step:6869 train loss:3.521958 +step:6870 train loss:3.550943 +step:6871 train loss:3.474771 +step:6872 train loss:3.476139 +step:6873 train loss:3.525143 +step:6874 train loss:3.463588 +step:6875 train loss:3.508119 +step:6876 train loss:3.480991 +step:6877 train loss:3.514468 +step:6878 train loss:3.479932 +step:6879 train loss:3.481648 +step:6880 train loss:3.457791 +step:6881 train loss:3.457113 +step:6882 train loss:3.453736 +step:6883 train loss:3.489914 +step:6884 train loss:3.486873 +step:6885 train loss:3.493733 +step:6886 train loss:3.418960 +step:6887 train loss:3.476981 +step:6888 train loss:3.528639 +step:6889 train loss:3.497149 +step:6890 train loss:3.513144 +step:6891 train loss:3.515023 +step:6892 train loss:3.526549 +step:6893 train loss:3.474459 +step:6894 train loss:3.499329 +step:6895 train loss:3.504841 +step:6896 train loss:3.480089 +step:6897 train loss:3.485928 +step:6898 train loss:3.523928 +step:6899 train loss:3.486426 +step:6900 train loss:3.489110 +step:6901 train loss:3.462228 +step:6902 train loss:3.521193 +step:6903 train loss:3.533136 +step:6904 train loss:3.539713 +step:6905 train loss:3.537995 +step:6906 train loss:3.590414 +step:6907 train loss:3.522666 +step:6908 train loss:3.534654 +step:6909 train loss:3.488213 +step:6910 train loss:3.456589 +step:6911 train loss:3.518806 +step:6912 train loss:3.457542 +step:6913 train loss:3.521893 +step:6914 train loss:3.449142 +step:6915 train loss:3.502804 +step:6916 train loss:3.507650 +step:6917 train loss:3.499937 +step:6918 train loss:3.497660 +step:6919 train loss:3.463853 +step:6920 train loss:3.535092 +step:6921 train loss:3.464636 +step:6922 train loss:3.483144 +step:6923 train loss:3.485773 +step:6924 train loss:3.500761 +step:6925 train loss:3.452326 +step:6926 train loss:3.522911 +step:6927 train loss:3.423560 +step:6928 train loss:3.513167 +step:6929 train loss:3.485276 +step:6930 train loss:3.503597 +step:6931 train loss:3.549098 +step:6932 train loss:3.455463 +step:6933 train loss:3.478085 +step:6934 train loss:3.556899 +step:6935 train loss:3.588340 +step:6936 train loss:3.478007 +step:6937 train loss:3.516010 +step:6938 train loss:3.499530 +step:6939 train loss:3.524412 +step:6940 train loss:3.563329 +step:6941 train loss:3.482717 +step:6942 train loss:3.472157 +step:6943 train loss:3.447858 +step:6944 train loss:3.481427 +step:6945 train loss:3.530781 +step:6946 train loss:3.539545 +step:6947 train loss:3.487896 +step:6948 train loss:3.516335 +step:6949 train loss:3.417016 +step:6950 train loss:3.529434 +step:6951 train loss:3.518229 +step:6952 train loss:3.532549 +step:6953 train loss:3.442348 +step:6954 train loss:3.491843 +step:6955 train loss:3.498788 +step:6956 train loss:3.494537 +step:6957 train loss:3.526471 +step:6958 train loss:3.510788 +step:6959 train loss:3.503939 +step:6960 train loss:3.523245 +step:6961 train loss:3.545887 +step:6962 train loss:3.464332 +step:6963 train loss:3.568401 +step:6964 train loss:3.470665 +step:6965 train loss:3.499912 +step:6966 train loss:3.489745 +step:6967 train loss:3.568477 +step:6968 train loss:3.618695 +step:6969 train loss:3.570177 +step:6970 train loss:3.551055 +step:6971 train loss:3.532821 +step:6972 train loss:3.524730 +step:6973 train loss:3.606841 +step:6974 train loss:3.507040 +step:6975 train loss:3.525876 +step:6976 train loss:3.535463 +step:6977 train loss:3.519714 +step:6978 train loss:3.538748 +step:6979 train loss:3.503337 +step:6980 train loss:3.534184 +step:6981 train loss:3.487081 +step:6982 train loss:3.474583 +step:6983 train loss:3.464715 +step:6984 train loss:3.548743 +step:6985 train loss:3.488978 +step:6986 train loss:3.491494 +step:6987 train loss:3.491823 +step:6988 train loss:3.559085 +step:6989 train loss:3.473635 +step:6990 train loss:3.440634 +step:6991 train loss:3.470313 +step:6992 train loss:3.467266 +step:6993 train loss:3.481659 +step:6994 train loss:3.513024 +step:6995 train loss:3.531902 +step:6996 train loss:3.454939 +step:6997 train loss:3.551279 +step:6998 train loss:3.500673 +step:6999 train loss:3.544237 +step:7000 validation loss:3.478995 total_sharp:4.7451e-03 L1_sharp:4.7323e-03 L2_sharp:4.9152e-04 L3_sharp:9.5990e-04 L4_sharp:1.2945e-03 L5_sharp:9.2560e-04 L6_sharp:1.0319e-03 L7_sharp:1.2743e-03 L8_sharp:1.8660e-03 L9_sharp:1.5957e-03 L10_sharp:8.5507e-04 L11_sharp:8.9421e-04 L12_sharp:2.4428e-03 total_fnorm:2.0791e+00 total_l1_linf:1.8069e+04 total_spectral:2.0791e+00 L1_fnorm:3.3328e-01 L2_fnorm:3.6300e-01 L3_fnorm:4.0197e-01 L4_fnorm:4.2845e-01 L5_fnorm:4.0932e-01 L6_fnorm:4.6519e-01 L7_fnorm:4.9040e-01 L8_fnorm:4.9910e-01 L9_fnorm:5.1226e-01 L10_fnorm:5.3219e-01 L11_fnorm:5.3757e-01 L12_fnorm:5.3021e-01 L1_l1linf:4.6532e-01 L2_l1linf:4.9212e-01 L3_l1linf:5.2396e-01 L4_l1linf:5.5733e-01 L5_l1linf:4.4006e-01 L6_l1linf:5.6190e-01 L7_l1linf:6.0933e-01 L8_l1linf:5.3337e-01 L9_l1linf:5.6009e-01 L10_l1linf:5.4221e-01 L11_l1linf:5.5059e-01 L12_l1linf:6.1266e-01 L1_spectral:6.0141e-02 L2_spectral:7.1697e-02 L3_spectral:6.7963e-02 L4_spectral:7.2933e-02 L5_spectral:6.2474e-02 L6_spectral:7.9665e-02 L7_spectral:7.7038e-02 L8_spectral:6.9852e-02 L9_spectral:5.7918e-02 L10_spectral:5.3797e-02 L11_spectral:5.0366e-02 L12_spectral:6.5647e-02 v_norm:2.0791e+00 cos_v_-g_hvp:6.4431e-02 g_hvp_norm:3.5723e-01 cos_v_-g_t:7.8457e-02 g_t_norm:2.9618e-01 hv_norm:4.0436e-01 cos_v_hv:2.4398e-02 hg_norm:5.4172e+00 cos_g_hg:6.2527e-01 v_par:9.1792e-03 v_perp:2.0790e+00 L1_cos_v_neg_g:1.1329e-01 L1_v_norm:3.3328e-01 L2_cos_v_neg_g:5.7476e-02 L2_v_norm:3.6300e-01 L3_cos_v_neg_g:4.5672e-02 L3_v_norm:4.0197e-01 L4_cos_v_neg_g:5.6604e-02 L4_v_norm:4.2845e-01 L5_cos_v_neg_g:4.1641e-02 L5_v_norm:4.0932e-01 L6_cos_v_neg_g:5.1957e-02 L6_v_norm:4.6519e-01 L7_cos_v_neg_g:6.2222e-02 L7_v_norm:4.9040e-01 L8_cos_v_neg_g:6.2692e-02 L8_v_norm:4.9910e-01 L9_cos_v_neg_g:6.1446e-02 L9_v_norm:5.1226e-01 L10_cos_v_neg_g:7.2641e-02 L10_v_norm:5.3219e-01 L11_cos_v_neg_g:8.6195e-02 L11_v_norm:5.3757e-01 L12_cos_v_neg_g:1.2255e-01 L12_v_norm:5.3021e-01 +step:7000 train loss:3.469887 +step:7001 train loss:3.521708 +step:7002 train loss:3.456302 +step:7003 train loss:3.505131 +step:7004 train loss:3.449911 +step:7005 train loss:3.520432 +step:7006 train loss:3.471377 +step:7007 train loss:3.499428 +step:7008 train loss:3.456671 +step:7009 train loss:3.538143 +step:7010 train loss:3.497132 +step:7011 train loss:3.501435 +step:7012 train loss:3.486930 +step:7013 train loss:3.492610 +step:7014 train loss:3.496790 +step:7015 train loss:3.480892 +step:7016 train loss:3.524790 +step:7017 train loss:3.428222 +step:7018 train loss:3.552707 +step:7019 train loss:3.501622 +step:7020 train loss:3.552877 +step:7021 train loss:3.485455 +step:7022 train loss:3.487370 +step:7023 train loss:3.498654 +step:7024 train loss:3.509294 +step:7025 train loss:3.508501 +step:7026 train loss:3.535049 +step:7027 train loss:3.512739 +step:7028 train loss:3.539733 +step:7029 train loss:3.557281 +step:7030 train loss:3.595915 +step:7031 train loss:3.477448 +step:7032 train loss:3.484992 +step:7033 train loss:3.519939 +step:7034 train loss:3.497016 +step:7035 train loss:3.499301 +step:7036 train loss:3.478085 +step:7037 train loss:3.522134 +step:7038 train loss:3.489160 +step:7039 train loss:3.565516 +step:7040 train loss:3.471845 +step:7041 train loss:3.486042 +step:7042 train loss:3.462502 +step:7043 train loss:3.444535 +step:7044 train loss:3.491359 +step:7045 train loss:3.507352 +step:7046 train loss:3.487957 +step:7047 train loss:3.444402 +step:7048 train loss:3.534476 +step:7049 train loss:3.512634 +step:7050 train loss:3.522424 +step:7051 train loss:3.525620 +step:7052 train loss:3.470635 +step:7053 train loss:3.458099 +step:7054 train loss:3.607069 +step:7055 train loss:3.495357 +step:7056 train loss:3.505516 +step:7057 train loss:3.406456 +step:7058 train loss:3.529274 +step:7059 train loss:3.485651 +step:7060 train loss:3.472087 +step:7061 train loss:3.470998 +step:7062 train loss:3.572864 +step:7063 train loss:3.476374 +step:7064 train loss:3.545872 +step:7065 train loss:3.477697 +step:7066 train loss:3.462931 +step:7067 train loss:3.468968 +step:7068 train loss:3.501411 +step:7069 train loss:3.515440 +step:7070 train loss:3.501736 +step:7071 train loss:3.563629 +step:7072 train loss:3.495515 +step:7073 train loss:3.500373 +step:7074 train loss:3.467476 +step:7075 train loss:3.565168 +step:7076 train loss:3.442305 +step:7077 train loss:3.538123 +step:7078 train loss:3.456044 +step:7079 train loss:3.503591 +step:7080 train loss:3.508034 +step:7081 train loss:3.484870 +step:7082 train loss:3.466873 +step:7083 train loss:3.502353 +step:7084 train loss:3.495755 +step:7085 train loss:3.467950 +step:7086 train loss:3.462083 +step:7087 train loss:3.495925 +step:7088 train loss:3.479968 +step:7089 train loss:3.509578 +step:7090 train loss:3.493408 +step:7091 train loss:3.535804 +step:7092 train loss:3.476954 +step:7093 train loss:3.484866 +step:7094 train loss:3.498508 +step:7095 train loss:3.539691 +step:7096 train loss:3.470158 +step:7097 train loss:3.518051 +step:7098 train loss:3.503700 +step:7099 train loss:3.529474 +step:7100 train loss:3.503675 +step:7101 train loss:3.517604 +step:7102 train loss:3.503394 +step:7103 train loss:3.513825 +step:7104 train loss:3.527157 +step:7105 train loss:3.524321 +step:7106 train loss:3.469852 +step:7107 train loss:3.457168 +step:7108 train loss:3.508774 +step:7109 train loss:3.602205 +step:7110 train loss:3.515942 +step:7111 train loss:3.512267 +step:7112 train loss:3.557389 +step:7113 train loss:3.566780 +step:7114 train loss:3.636278 +step:7115 train loss:3.537650 +step:7116 train loss:3.523335 +step:7117 train loss:3.462780 +step:7118 train loss:3.520554 +step:7119 train loss:3.475491 +step:7120 train loss:3.535557 +step:7121 train loss:3.533438 +step:7122 train loss:3.621322 +step:7123 train loss:3.528158 +step:7124 train loss:3.577453 +step:7125 train loss:3.509547 +step:7126 train loss:3.454864 +step:7127 train loss:3.442952 +step:7128 train loss:3.505864 +step:7129 train loss:3.480719 +step:7130 train loss:3.530581 +step:7131 train loss:3.512524 +step:7132 train loss:3.529373 +step:7133 train loss:3.483654 +step:7134 train loss:3.507797 +step:7135 train loss:3.517812 +step:7136 train loss:3.482101 +step:7137 train loss:3.473819 +step:7138 train loss:3.491861 +step:7139 train loss:3.483618 +step:7140 train loss:3.564703 +step:7141 train loss:3.494456 +step:7142 train loss:3.555256 +step:7143 train loss:3.456060 +step:7144 train loss:3.505631 +step:7145 train loss:3.555343 +step:7146 train loss:3.522714 +step:7147 train loss:3.544034 +step:7148 train loss:3.487686 +step:7149 train loss:3.515420 +step:7150 train loss:3.432519 +step:7151 train loss:3.478712 +step:7152 train loss:3.435304 +step:7153 train loss:3.511012 +step:7154 train loss:3.483649 +step:7155 train loss:3.496427 +step:7156 train loss:3.461079 +step:7157 train loss:3.542116 +step:7158 train loss:3.458033 +step:7159 train loss:3.490746 +step:7160 train loss:3.499483 +step:7161 train loss:3.478549 +step:7162 train loss:3.511173 +step:7163 train loss:3.483689 +step:7164 train loss:3.469834 +step:7165 train loss:3.494862 +step:7166 train loss:3.451817 +step:7167 train loss:3.542042 +step:7168 train loss:3.488500 +step:7169 train loss:3.570711 +step:7170 train loss:3.485546 +step:7171 train loss:3.526000 +step:7172 train loss:3.531648 +step:7173 train loss:3.549123 +step:7174 train loss:3.508578 +step:7175 train loss:3.554725 +step:7176 train loss:3.509663 +step:7177 train loss:3.458992 +step:7178 train loss:3.506905 +step:7179 train loss:3.558262 +step:7180 train loss:3.532782 +step:7181 train loss:3.536346 +step:7182 train loss:3.502933 +step:7183 train loss:3.541716 +step:7184 train loss:3.483654 +step:7185 train loss:3.486321 +step:7186 train loss:3.497138 +step:7187 train loss:3.585119 +step:7188 train loss:3.513113 +step:7189 train loss:3.495860 +step:7190 train loss:3.504910 +step:7191 train loss:3.472967 +step:7192 train loss:3.557350 +step:7193 train loss:3.604658 +step:7194 train loss:3.539881 +step:7195 train loss:3.574272 +step:7196 train loss:3.482405 +step:7197 train loss:3.500741 +step:7198 train loss:3.455923 +step:7199 train loss:3.481554 +step:7200 train loss:3.456503 +step:7201 train loss:3.473381 +step:7202 train loss:3.467366 +step:7203 train loss:3.517941 +step:7204 train loss:3.511304 +step:7205 train loss:3.527527 +step:7206 train loss:3.630750 +step:7207 train loss:3.459295 +step:7208 train loss:3.595646 +step:7209 train loss:3.600021 +step:7210 train loss:3.607309 +step:7211 train loss:3.624068 +step:7212 train loss:3.497357 +step:7213 train loss:3.488278 +step:7214 train loss:3.530369 +step:7215 train loss:3.494704 +step:7216 train loss:3.562589 +step:7217 train loss:3.488256 +step:7218 train loss:3.506613 +step:7219 train loss:3.518022 +step:7220 train loss:3.544034 +step:7221 train loss:3.490734 +step:7222 train loss:3.477592 +step:7223 train loss:3.527964 +step:7224 train loss:3.490353 +step:7225 train loss:3.534995 +step:7226 train loss:3.466331 +step:7227 train loss:3.459299 +step:7228 train loss:3.494507 +step:7229 train loss:3.495811 +step:7230 train loss:3.487799 +step:7231 train loss:3.503723 +step:7232 train loss:3.532545 +step:7233 train loss:3.480395 +step:7234 train loss:3.504802 +step:7235 train loss:3.543578 +step:7236 train loss:3.494383 +step:7237 train loss:3.535759 +step:7238 train loss:3.499792 +step:7239 train loss:3.543688 +step:7240 train loss:3.521395 +step:7241 train loss:3.565370 +step:7242 train loss:3.497693 +step:7243 train loss:3.489460 +step:7244 train loss:3.526699 +step:7245 train loss:3.473743 +step:7246 train loss:3.609601 +step:7247 train loss:3.460761 +step:7248 train loss:3.535686 +step:7249 train loss:3.485747 +step:7250 validation loss:3.475320 +step:7250 train loss:3.548332 +step:7251 train loss:3.542482 +step:7252 train loss:3.587857 +step:7253 train loss:3.477937 +step:7254 train loss:3.500145 +step:7255 train loss:3.545763 +step:7256 train loss:3.472524 +step:7257 train loss:3.557537 +step:7258 train loss:3.558573 +step:7259 train loss:3.507018 +step:7260 train loss:3.651282 +step:7261 train loss:3.514070 +step:7262 train loss:3.471887 +step:7263 train loss:3.521992 +step:7264 train loss:3.545415 +step:7265 train loss:3.558006 +step:7266 train loss:3.504488 +step:7267 train loss:3.520894 +step:7268 train loss:3.465875 +step:7269 train loss:3.505844 +step:7270 train loss:3.523788 +step:7271 train loss:3.475124 +step:7272 train loss:3.479380 +step:7273 train loss:3.506169 +step:7274 train loss:3.503652 +step:7275 train loss:3.505750 +step:7276 train loss:3.528930 +step:7277 train loss:3.530056 +step:7278 train loss:3.498726 +step:7279 train loss:3.537338 +step:7280 train loss:3.560224 +step:7281 train loss:3.527467 +step:7282 train loss:3.504134 +step:7283 train loss:3.466508 +step:7284 train loss:3.490752 +step:7285 train loss:3.522228 +step:7286 train loss:3.472352 +step:7287 train loss:3.508007 +step:7288 train loss:3.504394 +step:7289 train loss:3.471098 +step:7290 train loss:3.487330 +step:7291 train loss:3.550210 +step:7292 train loss:3.585244 +step:7293 train loss:3.587891 +step:7294 train loss:3.589203 +step:7295 train loss:3.474167 +step:7296 train loss:3.485897 +step:7297 train loss:3.504702 +step:7298 train loss:3.525937 +step:7299 train loss:3.535550 +step:7300 train loss:3.578213 +step:7301 train loss:3.506581 +step:7302 train loss:3.466976 +step:7303 train loss:3.473232 +step:7304 train loss:3.474031 +step:7305 train loss:3.488747 +step:7306 train loss:3.476267 +step:7307 train loss:3.469093 +step:7308 train loss:3.490507 +step:7309 train loss:3.494459 +step:7310 train loss:3.488873 +step:7311 train loss:3.468813 +step:7312 train loss:3.549891 +step:7313 train loss:3.512002 +step:7314 train loss:3.468358 +step:7315 train loss:3.518784 +step:7316 train loss:3.515652 +step:7317 train loss:3.561080 +step:7318 train loss:3.520929 +step:7319 train loss:3.468576 +step:7320 train loss:3.455166 +step:7321 train loss:3.541775 +step:7322 train loss:3.490346 +step:7323 train loss:3.476266 +step:7324 train loss:3.518407 +step:7325 train loss:3.449653 +step:7326 train loss:3.532468 +step:7327 train loss:3.511019 +step:7328 train loss:3.509335 +step:7329 train loss:3.506129 +step:7330 train loss:3.526604 +step:7331 train loss:3.539169 +step:7332 train loss:3.500400 +step:7333 train loss:3.494885 +step:7334 train loss:3.483712 +step:7335 train loss:3.512556 +step:7336 train loss:3.487449 +step:7337 train loss:3.480271 +step:7338 train loss:3.476221 +step:7339 train loss:3.492290 +step:7340 train loss:3.465079 +step:7341 train loss:3.492246 +step:7342 train loss:3.464972 +step:7343 train loss:3.525438 +step:7344 train loss:3.486738 +step:7345 train loss:3.487327 +step:7346 train loss:3.548050 +step:7347 train loss:3.500593 +step:7348 train loss:3.478389 +step:7349 train loss:3.469645 +step:7350 train loss:3.535823 +step:7351 train loss:3.533443 +step:7352 train loss:3.582413 +step:7353 train loss:3.535646 +step:7354 train loss:3.507670 +step:7355 train loss:3.532468 +step:7356 train loss:3.522182 +step:7357 train loss:3.497227 +step:7358 train loss:3.495034 +step:7359 train loss:3.465096 +step:7360 train loss:3.509710 +step:7361 train loss:3.492282 +step:7362 train loss:3.497013 +step:7363 train loss:3.651657 +step:7364 train loss:3.474335 +step:7365 train loss:3.538693 +step:7366 train loss:3.510222 +step:7367 train loss:3.508979 +step:7368 train loss:3.493799 +step:7369 train loss:3.536449 +step:7370 train loss:3.474442 +step:7371 train loss:3.536226 +step:7372 train loss:3.526923 +step:7373 train loss:3.517097 +step:7374 train loss:3.560663 +step:7375 train loss:3.490851 +step:7376 train loss:3.521157 +step:7377 train loss:3.530396 +step:7378 train loss:3.498677 +step:7379 train loss:3.543597 +step:7380 train loss:3.590466 +step:7381 train loss:3.493972 +step:7382 train loss:3.537648 +step:7383 train loss:3.501907 +step:7384 train loss:3.533570 +step:7385 train loss:3.498045 +step:7386 train loss:3.510414 +step:7387 train loss:3.536669 +step:7388 train loss:3.553383 +step:7389 train loss:3.562002 +step:7390 train loss:3.642154 +step:7391 train loss:3.615443 +step:7392 train loss:3.503928 +step:7393 train loss:3.453430 +step:7394 train loss:3.528763 +step:7395 train loss:3.477310 +step:7396 train loss:3.447384 +step:7397 train loss:3.463747 +step:7398 train loss:3.532952 +step:7399 train loss:3.517656 +step:7400 train loss:3.542055 +step:7401 train loss:3.509036 +step:7402 train loss:3.509553 +step:7403 train loss:3.533976 +step:7404 train loss:3.554270 +step:7405 train loss:3.546186 +step:7406 train loss:3.564526 +step:7407 train loss:3.605272 +step:7408 train loss:3.559063 +step:7409 train loss:3.513865 +step:7410 train loss:3.530837 +step:7411 train loss:3.549947 +step:7412 train loss:3.551413 +step:7413 train loss:3.591535 +step:7414 train loss:3.506055 +step:7415 train loss:3.599649 +step:7416 train loss:3.533228 +step:7417 train loss:3.526006 +step:7418 train loss:3.497139 +step:7419 train loss:3.473633 +step:7420 train loss:3.516892 +step:7421 train loss:3.499992 +step:7422 train loss:3.516552 +step:7423 train loss:3.564650 +step:7424 train loss:3.577796 +step:7425 train loss:3.487461 +step:7426 train loss:3.524995 +step:7427 train loss:3.528006 +step:7428 train loss:3.535957 +step:7429 train loss:3.500355 +step:7430 train loss:3.536793 +step:7431 train loss:3.484929 +step:7432 train loss:3.544652 +step:7433 train loss:3.498374 +step:7434 train loss:3.545560 +step:7435 train loss:3.574066 +step:7436 train loss:3.521791 +step:7437 train loss:3.509968 +step:7438 train loss:3.558124 +step:7439 train loss:3.498632 +step:7440 train loss:3.590216 +step:7441 train loss:3.548112 +step:7442 train loss:3.548117 +step:7443 train loss:3.518275 +step:7444 train loss:3.554431 +step:7445 train loss:3.495338 +step:7446 train loss:3.512167 +step:7447 train loss:3.501707 +step:7448 train loss:3.551509 +step:7449 train loss:3.530106 +step:7450 train loss:3.514346 +step:7451 train loss:3.558573 +step:7452 train loss:3.482156 +step:7453 train loss:3.476644 +step:7454 train loss:3.480516 +step:7455 train loss:3.555857 +step:7456 train loss:3.528502 +step:7457 train loss:3.500486 +step:7458 train loss:3.509586 +step:7459 train loss:3.516614 +step:7460 train loss:3.619996 +step:7461 train loss:3.572196 +step:7462 train loss:3.523557 +step:7463 train loss:3.533179 +step:7464 train loss:3.540781 +step:7465 train loss:3.551262 +step:7466 train loss:3.557086 +step:7467 train loss:3.600645 +step:7468 train loss:3.545069 +step:7469 train loss:3.560560 +step:7470 train loss:3.543682 +step:7471 train loss:3.490071 +step:7472 train loss:3.450633 +step:7473 train loss:3.480629 +step:7474 train loss:3.530193 +step:7475 train loss:3.552633 +step:7476 train loss:3.523171 +step:7477 train loss:3.588879 +step:7478 train loss:3.509398 +step:7479 train loss:3.526162 +step:7480 train loss:3.560495 +step:7481 train loss:3.479466 +step:7482 train loss:3.521437 +step:7483 train loss:3.588022 +step:7484 train loss:3.489919 +step:7485 train loss:3.529369 +step:7486 train loss:3.459318 +step:7487 train loss:3.423548 +step:7488 train loss:3.545350 +step:7489 train loss:3.571620 +step:7490 train loss:3.540136 +step:7491 train loss:3.513256 +step:7492 train loss:3.543753 +step:7493 train loss:3.482669 +step:7494 train loss:3.554398 +step:7495 train loss:3.534436 +step:7496 train loss:3.536460 +step:7497 train loss:3.597374 +step:7498 train loss:3.575428 +step:7499 train loss:3.548419 +step:7500 validation loss:3.473572 total_sharp:2.0022e-02 L1_sharp:2.0026e-02 L2_sharp:1.4969e-03 L3_sharp:3.2276e-03 L4_sharp:5.2817e-03 L5_sharp:2.6432e-03 L6_sharp:2.4069e-03 L7_sharp:4.0422e-03 L8_sharp:5.0782e-03 L9_sharp:4.4678e-03 L10_sharp:2.3803e-03 L11_sharp:1.8055e-03 L12_sharp:6.7328e-03 total_fnorm:2.2849e+00 total_l1_linf:2.0391e+04 total_spectral:2.2849e+00 L1_fnorm:5.2190e-01 L2_fnorm:5.3310e-01 L3_fnorm:5.2794e-01 L4_fnorm:5.3415e-01 L5_fnorm:5.2067e-01 L6_fnorm:5.2924e-01 L7_fnorm:5.3523e-01 L8_fnorm:5.3189e-01 L9_fnorm:5.3788e-01 L10_fnorm:5.5237e-01 L11_fnorm:5.5230e-01 L12_fnorm:5.4510e-01 L1_l1linf:7.0641e-01 L2_l1linf:6.5459e-01 L3_l1linf:6.6898e-01 L4_l1linf:6.6163e-01 L5_l1linf:7.1014e-01 L6_l1linf:6.5531e-01 L7_l1linf:6.2086e-01 L8_l1linf:5.9358e-01 L9_l1linf:5.6235e-01 L10_l1linf:5.8706e-01 L11_l1linf:5.9572e-01 L12_l1linf:6.1469e-01 L1_spectral:9.7196e-02 L2_spectral:8.0264e-02 L3_spectral:1.0166e-01 L4_spectral:1.0130e-01 L5_spectral:8.7978e-02 L6_spectral:9.8507e-02 L7_spectral:9.3117e-02 L8_spectral:8.7164e-02 L9_spectral:6.7778e-02 L10_spectral:5.5931e-02 L11_spectral:6.6274e-02 L12_spectral:9.4726e-02 v_norm:2.2849e+00 cos_v_-g_hvp:7.3070e-02 g_hvp_norm:5.0604e-01 cos_v_-g_t:8.6286e-02 g_t_norm:4.2948e-01 hv_norm:1.0210e+00 cos_v_hv:4.4809e-02 hg_norm:1.4800e+01 cos_g_hg:7.3963e-01 v_par:8.4214e-03 v_perp:2.2849e+00 L1_cos_v_neg_g:1.5908e-01 L1_v_norm:5.2190e-01 L2_cos_v_neg_g:5.9701e-02 L2_v_norm:5.3310e-01 L3_cos_v_neg_g:5.4647e-02 L3_v_norm:5.2794e-01 L4_cos_v_neg_g:7.6945e-02 L4_v_norm:5.3415e-01 L5_cos_v_neg_g:5.4971e-02 L5_v_norm:5.2067e-01 L6_cos_v_neg_g:7.0699e-02 L6_v_norm:5.2924e-01 L7_cos_v_neg_g:7.4880e-02 L7_v_norm:5.3523e-01 L8_cos_v_neg_g:7.8331e-02 L8_v_norm:5.3189e-01 L9_cos_v_neg_g:7.3376e-02 L9_v_norm:5.3788e-01 L10_cos_v_neg_g:7.9605e-02 L10_v_norm:5.5237e-01 L11_cos_v_neg_g:8.6046e-02 L11_v_norm:5.5230e-01 L12_cos_v_neg_g:1.3785e-01 L12_v_norm:5.4510e-01 +step:7500 train loss:3.550727 +step:7501 train loss:3.553214 +step:7502 train loss:3.552290 +step:7503 train loss:3.549495 +step:7504 train loss:3.513960 +step:7505 train loss:3.512241 +step:7506 train loss:3.505517 +step:7507 train loss:3.525737 +step:7508 train loss:3.543791 +step:7509 train loss:3.554888 +step:7510 train loss:3.525183 +step:7511 train loss:3.607593 +step:7512 train loss:3.535102 +step:7513 train loss:3.575606 +step:7514 train loss:3.502654 +step:7515 train loss:3.462433 +step:7516 train loss:3.475917 +step:7517 train loss:3.539281 +step:7518 train loss:3.523401 +step:7519 train loss:3.548101 +step:7520 train loss:3.506048 +step:7521 train loss:3.515382 +step:7522 train loss:3.511915 +step:7523 train loss:3.506918 +step:7524 train loss:3.549514 +step:7525 train loss:3.539381 +step:7526 train loss:3.516174 +step:7527 train loss:3.521519 +step:7528 train loss:3.566238 +step:7529 train loss:3.529781 +step:7530 train loss:3.470116 +step:7531 train loss:3.584920 +step:7532 train loss:3.527862 +step:7533 train loss:3.579551 +step:7534 train loss:3.591647 +step:7535 train loss:3.508254 +step:7536 train loss:3.516348 +step:7537 train loss:3.549632 +step:7538 train loss:3.522871 +step:7539 train loss:3.555928 +step:7540 train loss:3.528557 +step:7541 train loss:3.511804 +step:7542 train loss:3.557806 +step:7543 train loss:3.509560 +step:7544 train loss:3.491472 +step:7545 train loss:3.503058 +step:7546 train loss:3.465056 +step:7547 train loss:3.509495 +step:7548 train loss:3.454862 +step:7549 train loss:3.500983 +step:7550 train loss:3.428750 +step:7551 train loss:3.473617 +step:7552 train loss:3.481630 +step:7553 train loss:3.467521 +step:7554 train loss:3.481652 +step:7555 train loss:3.476106 +step:7556 train loss:3.531721 +step:7557 train loss:3.495126 +step:7558 train loss:3.485073 +step:7559 train loss:3.434095 +step:7560 train loss:3.503803 +step:7561 train loss:3.514461 +step:7562 train loss:3.502689 +step:7563 train loss:3.527807 +step:7564 train loss:3.516443 +step:7565 train loss:3.490320 +step:7566 train loss:3.477770 +step:7567 train loss:3.516732 +step:7568 train loss:3.548033 +step:7569 train loss:3.657327 +step:7570 train loss:3.570270 +step:7571 train loss:3.476264 +step:7572 train loss:3.511320 +step:7573 train loss:3.486397 +step:7574 train loss:3.498358 +step:7575 train loss:3.502831 +step:7576 train loss:3.517061 +step:7577 train loss:3.527915 +step:7578 train loss:3.495487 +step:7579 train loss:3.477578 +step:7580 train loss:3.483705 +step:7581 train loss:3.503690 +step:7582 train loss:3.473680 +step:7583 train loss:3.490157 +step:7584 train loss:3.511031 +step:7585 train loss:3.468728 +step:7586 train loss:3.535651 +step:7587 train loss:3.448040 +step:7588 train loss:3.471215 +step:7589 train loss:3.476341 +step:7590 train loss:3.496711 +step:7591 train loss:3.519699 +step:7592 train loss:3.577824 +step:7593 train loss:3.532627 +step:7594 train loss:3.460965 +step:7595 train loss:3.446691 +step:7596 train loss:3.470383 +step:7597 train loss:3.491200 +step:7598 train loss:3.505279 +step:7599 train loss:3.461204 +step:7600 train loss:3.466343 +step:7601 train loss:3.459974 +step:7602 train loss:3.517320 +step:7603 train loss:3.466250 +step:7604 train loss:3.516209 +step:7605 train loss:3.513407 +step:7606 train loss:3.493864 +step:7607 train loss:3.544179 +step:7608 train loss:3.484223 +step:7609 train loss:3.469551 +step:7610 train loss:3.479574 +step:7611 train loss:3.510541 +step:7612 train loss:3.459755 +step:7613 train loss:3.529644 +step:7614 train loss:3.524498 +step:7615 train loss:3.504340 +step:7616 train loss:3.487720 +step:7617 train loss:3.421489 +step:7618 train loss:3.465247 +step:7619 train loss:3.446928 +step:7620 train loss:3.471552 +step:7621 train loss:3.439763 +step:7622 train loss:3.539828 +step:7623 train loss:3.499439 +step:7624 train loss:3.528167 +step:7625 train loss:3.482547 +step:7626 train loss:3.474635 +step:7627 train loss:3.512132 +step:7628 train loss:3.476878 +step:7629 train loss:3.502452 +step:7630 train loss:3.456594 +step:7631 train loss:3.540940 +step:7632 train loss:3.505296 +step:7633 train loss:3.460357 +step:7634 train loss:3.440922 +step:7635 train loss:3.515817 +step:7636 train loss:3.474898 +step:7637 train loss:3.469100 +step:7638 train loss:3.542246 +step:7639 train loss:3.534845 +step:7640 train loss:3.589651 +step:7641 train loss:3.519659 +step:7642 train loss:3.541803 +step:7643 train loss:3.442466 +step:7644 train loss:3.504697 +step:7645 train loss:3.473641 +step:7646 train loss:3.524182 +step:7647 train loss:3.493431 +step:7648 train loss:3.559645 +step:7649 train loss:3.520038 +step:7650 train loss:3.444474 +step:7651 train loss:3.496846 +step:7652 train loss:3.508363 +step:7653 train loss:3.514472 +step:7654 train loss:3.478182 +step:7655 train loss:3.535289 +step:7656 train loss:3.506735 +step:7657 train loss:3.480401 +step:7658 train loss:3.494823 +step:7659 train loss:3.545946 +step:7660 train loss:3.525519 +step:7661 train loss:3.440987 +step:7662 train loss:3.508365 +step:7663 train loss:3.449643 +step:7664 train loss:3.481769 +step:7665 train loss:3.467522 +step:7666 train loss:3.484092 +step:7667 train loss:3.446841 +step:7668 train loss:3.490128 +step:7669 train loss:3.479292 +step:7670 train loss:3.475199 +step:7671 train loss:3.485641 +step:7672 train loss:3.511777 +step:7673 train loss:3.489820 +step:7674 train loss:3.524033 +step:7675 train loss:3.490911 +step:7676 train loss:3.485761 +step:7677 train loss:3.499733 +step:7678 train loss:3.487381 +step:7679 train loss:3.496207 +step:7680 train loss:3.519960 +step:7681 train loss:3.535110 +step:7682 train loss:3.510782 +step:7683 train loss:3.473922 +step:7684 train loss:3.494360 +step:7685 train loss:3.507281 +step:7686 train loss:3.458694 +step:7687 train loss:3.504942 +step:7688 train loss:3.515105 +step:7689 train loss:3.474010 +step:7690 train loss:3.434451 +step:7691 train loss:3.511163 +step:7692 train loss:3.511434 +step:7693 train loss:3.480720 +step:7694 train loss:3.524016 +step:7695 train loss:3.446918 +step:7696 train loss:3.480712 +step:7697 train loss:3.473640 +step:7698 train loss:3.484148 +step:7699 train loss:3.500288 +step:7700 train loss:3.519450 +step:7701 train loss:3.488812 +step:7702 train loss:3.524004 +step:7703 train loss:3.500254 +step:7704 train loss:3.480137 +step:7705 train loss:3.482016 +step:7706 train loss:3.511069 +step:7707 train loss:3.473926 +step:7708 train loss:3.497838 +step:7709 train loss:3.517125 +step:7710 train loss:3.496029 +step:7711 train loss:3.511320 +step:7712 train loss:3.512345 +step:7713 train loss:3.572210 +step:7714 train loss:3.505385 +step:7715 train loss:3.517484 +step:7716 train loss:3.497439 +step:7717 train loss:3.483937 +step:7718 train loss:3.494435 +step:7719 train loss:3.458090 +step:7720 train loss:3.488192 +step:7721 train loss:3.474691 +step:7722 train loss:3.486918 +step:7723 train loss:3.516689 +step:7724 train loss:3.515612 +step:7725 train loss:3.470824 +step:7726 train loss:3.443218 +step:7727 train loss:3.477347 +step:7728 train loss:3.493762 +step:7729 train loss:3.488944 +step:7730 train loss:3.494722 +step:7731 train loss:3.481142 +step:7732 train loss:3.481839 +step:7733 train loss:3.539721 +step:7734 train loss:3.509439 +step:7735 train loss:3.473861 +step:7736 train loss:3.556339 +step:7737 train loss:3.531920 +step:7738 train loss:3.635961 +step:7739 train loss:3.525137 +step:7740 train loss:3.541308 +step:7741 train loss:3.550426 +step:7742 train loss:3.547022 +step:7743 train loss:3.486408 +step:7744 train loss:3.508780 +step:7745 train loss:3.548162 +step:7746 train loss:3.543922 +step:7747 train loss:3.499336 +step:7748 train loss:3.529086 +step:7749 train loss:3.527484 +step:7750 validation loss:3.456220 +step:7750 train loss:3.564581 +step:7751 train loss:3.531044 +step:7752 train loss:3.508654 +step:7753 train loss:3.504206 +step:7754 train loss:3.480307 +step:7755 train loss:3.552485 +step:7756 train loss:3.534157 +step:7757 train loss:3.525236 +step:7758 train loss:3.524653 +step:7759 train loss:3.563363 +step:7760 train loss:3.560416 +step:7761 train loss:3.543639 +step:7762 train loss:3.513496 +step:7763 train loss:3.475158 +step:7764 train loss:3.504437 +step:7765 train loss:3.482388 +step:7766 train loss:3.535610 +step:7767 train loss:3.568074 +step:7768 train loss:3.523724 +step:7769 train loss:3.541276 +step:7770 train loss:3.577562 +step:7771 train loss:3.584821 +step:7772 train loss:3.487368 +step:7773 train loss:3.525858 +step:7774 train loss:3.555780 +step:7775 train loss:3.505044 +step:7776 train loss:3.456897 +step:7777 train loss:3.526999 +step:7778 train loss:3.573229 +step:7779 train loss:3.521184 +step:7780 train loss:3.499405 +step:7781 train loss:3.514285 +step:7782 train loss:3.502605 +step:7783 train loss:3.560648 +step:7784 train loss:3.494238 +step:7785 train loss:3.500159 +step:7786 train loss:3.526825 +step:7787 train loss:3.568743 +step:7788 train loss:3.494355 +step:7789 train loss:3.525188 +step:7790 train loss:3.554581 +step:7791 train loss:3.576295 +step:7792 train loss:3.560345 +step:7793 train loss:3.560704 +step:7794 train loss:3.527480 +step:7795 train loss:3.490070 +step:7796 train loss:3.571819 +step:7797 train loss:3.543023 +step:7798 train loss:3.514446 +step:7799 train loss:3.555943 +step:7800 train loss:3.578420 +step:7801 train loss:3.564830 +step:7802 train loss:3.546907 +step:7803 train loss:3.525024 +step:7804 train loss:3.559547 +step:7805 train loss:3.523843 +step:7806 train loss:3.534994 +step:7807 train loss:3.533408 +step:7808 train loss:3.496053 +step:7809 train loss:3.487253 +step:7810 train loss:3.495829 +step:7811 train loss:3.512221 +step:7812 train loss:3.531746 +step:7813 train loss:3.524389 +step:7814 train loss:3.608222 +step:7815 train loss:3.517500 +step:7816 train loss:3.533779 +step:7817 train loss:3.465275 +step:7818 train loss:3.466998 +step:7819 train loss:3.527618 +step:7820 train loss:3.456476 +step:7821 train loss:3.520689 +step:7822 train loss:3.580306 +step:7823 train loss:3.526388 +step:7824 train loss:3.479068 +step:7825 train loss:3.533891 +step:7826 train loss:3.511336 +step:7827 train loss:3.518912 +step:7828 train loss:3.584045 +step:7829 train loss:3.533648 +step:7830 train loss:3.489186 +step:7831 train loss:3.507746 +step:7832 train loss:3.557832 +step:7833 train loss:3.507721 +step:7834 train loss:3.524337 +step:7835 train loss:3.587204 +step:7836 train loss:3.503196 +step:7837 train loss:3.429702 +step:7838 train loss:3.540197 +step:7839 train loss:3.545276 +step:7840 train loss:3.460000 +step:7841 train loss:3.532411 +step:7842 train loss:3.499238 +step:7843 train loss:3.536290 +step:7844 train loss:3.539058 +step:7845 train loss:3.524777 +step:7846 train loss:3.572428 +step:7847 train loss:3.494504 +step:7848 train loss:3.455475 +step:7849 train loss:3.590057 +step:7850 train loss:3.518147 +step:7851 train loss:3.569129 +step:7852 train loss:3.558169 +step:7853 train loss:3.511996 +step:7854 train loss:3.519081 +step:7855 train loss:3.544288 +step:7856 train loss:3.551928 +step:7857 train loss:3.478352 +step:7858 train loss:3.543265 +step:7859 train loss:3.519523 +step:7860 train loss:3.543511 +step:7861 train loss:3.518007 +step:7862 train loss:3.532176 +step:7863 train loss:3.566472 +step:7864 train loss:3.537002 +step:7865 train loss:3.573842 +step:7866 train loss:3.471860 +step:7867 train loss:3.477780 +step:7868 train loss:3.481918 +step:7869 train loss:3.548190 +step:7870 train loss:3.464320 +step:7871 train loss:3.528620 +step:7872 train loss:3.521415 +step:7873 train loss:3.515622 +step:7874 train loss:3.471007 +step:7875 train loss:3.524029 +step:7876 train loss:3.473419 +step:7877 train loss:3.550531 +step:7878 train loss:3.571364 +step:7879 train loss:3.531126 +step:7880 train loss:3.546499 +step:7881 train loss:3.527824 +step:7882 train loss:3.517680 +step:7883 train loss:3.522923 +step:7884 train loss:3.517168 +step:7885 train loss:3.591155 +step:7886 train loss:3.503526 +step:7887 train loss:3.539925 +step:7888 train loss:3.530061 +step:7889 train loss:3.518235 +step:7890 train loss:3.506058 +step:7891 train loss:3.546716 +step:7892 train loss:3.631124 +step:7893 train loss:3.518111 +step:7894 train loss:3.558569 +step:7895 train loss:3.590850 +step:7896 train loss:3.516615 +step:7897 train loss:3.528557 +step:7898 train loss:3.543145 +step:7899 train loss:3.507267 +step:7900 train loss:3.469364 +step:7901 train loss:3.523385 +step:7902 train loss:3.553931 +step:7903 train loss:3.550756 +step:7904 train loss:3.530172 +step:7905 train loss:3.537071 +step:7906 train loss:3.535406 +step:7907 train loss:3.459989 +step:7908 train loss:3.547814 +step:7909 train loss:3.520098 +step:7910 train loss:3.477580 +step:7911 train loss:3.486489 +step:7912 train loss:3.541684 +step:7913 train loss:3.481140 +step:7914 train loss:3.461018 +step:7915 train loss:3.521666 +step:7916 train loss:3.485701 +step:7917 train loss:3.484677 +step:7918 train loss:3.530831 +step:7919 train loss:3.539375 +step:7920 train loss:3.513565 +step:7921 train loss:3.510301 +step:7922 train loss:3.578635 +step:7923 train loss:3.536443 +step:7924 train loss:3.504170 +step:7925 train loss:3.530672 +step:7926 train loss:3.550240 +step:7927 train loss:3.498557 +step:7928 train loss:3.498296 +step:7929 train loss:3.463589 +step:7930 train loss:3.502033 +step:7931 train loss:3.534114 +step:7932 train loss:3.493441 +step:7933 train loss:3.500113 +step:7934 train loss:3.509590 +step:7935 train loss:3.489621 +step:7936 train loss:3.509168 +step:7937 train loss:3.460621 +step:7938 train loss:3.479560 +step:7939 train loss:3.450429 +step:7940 train loss:3.457979 +step:7941 train loss:3.551484 +step:7942 train loss:3.437737 +step:7943 train loss:3.502933 +step:7944 train loss:3.474498 +step:7945 train loss:3.457554 +step:7946 train loss:3.549473 +step:7947 train loss:3.553089 +step:7948 train loss:3.526282 +step:7949 train loss:3.570574 +step:7950 train loss:3.536604 +step:7951 train loss:3.492774 +step:7952 train loss:3.535160 +step:7953 train loss:3.513844 +step:7954 train loss:3.494040 +step:7955 train loss:3.588972 +step:7956 train loss:3.527621 +step:7957 train loss:3.490424 +step:7958 train loss:3.503504 +step:7959 train loss:3.493070 +step:7960 train loss:3.511632 +step:7961 train loss:3.486469 +step:7962 train loss:3.437120 +step:7963 train loss:3.518519 +step:7964 train loss:3.520175 +step:7965 train loss:3.484716 +step:7966 train loss:3.500879 +step:7967 train loss:3.536435 +step:7968 train loss:3.492751 +step:7969 train loss:3.533151 +step:7970 train loss:3.514744 +step:7971 train loss:3.550482 +step:7972 train loss:3.509709 +step:7973 train loss:3.563633 +step:7974 train loss:3.517792 +step:7975 train loss:3.586380 +step:7976 train loss:3.562601 +step:7977 train loss:3.458603 +step:7978 train loss:3.521679 +step:7979 train loss:3.508829 +step:7980 train loss:3.496527 +step:7981 train loss:3.544699 +step:7982 train loss:3.499707 +step:7983 train loss:3.552626 +step:7984 train loss:3.546567 +step:7985 train loss:3.474954 +step:7986 train loss:3.512517 +step:7987 train loss:3.500271 +step:7988 train loss:3.519875 +step:7989 train loss:3.550467 +step:7990 train loss:3.529092 +step:7991 train loss:3.441386 +step:7992 train loss:3.445759 +step:7993 train loss:3.555144 +step:7994 train loss:3.494744 +step:7995 train loss:3.529113 +step:7996 train loss:3.523986 +step:7997 train loss:3.475001 +step:7998 train loss:3.589216 +step:7999 train loss:3.598368 +step:8000 validation loss:3.451751 total_sharp:5.2027e-03 L1_sharp:4.3540e-03 L2_sharp:4.4631e-04 L3_sharp:1.0968e-03 L4_sharp:1.2371e-03 L5_sharp:7.3889e-04 L6_sharp:8.7593e-04 L7_sharp:1.2973e-03 L8_sharp:1.9790e-03 L9_sharp:1.8222e-03 L10_sharp:9.9895e-04 L11_sharp:9.0651e-04 L12_sharp:2.0623e-03 total_fnorm:2.2461e+00 total_l1_linf:1.9959e+04 total_spectral:2.2461e+00 L1_fnorm:4.8098e-01 L2_fnorm:4.9992e-01 L3_fnorm:5.1285e-01 L4_fnorm:5.1638e-01 L5_fnorm:5.0270e-01 L6_fnorm:5.1701e-01 L7_fnorm:5.1916e-01 L8_fnorm:5.1842e-01 L9_fnorm:5.2869e-01 L10_fnorm:5.4927e-01 L11_fnorm:5.5623e-01 L12_fnorm:5.4697e-01 L1_l1linf:5.8829e-01 L2_l1linf:5.7766e-01 L3_l1linf:6.5195e-01 L4_l1linf:6.7322e-01 L5_l1linf:6.0870e-01 L6_l1linf:6.2822e-01 L7_l1linf:5.9588e-01 L8_l1linf:5.6197e-01 L9_l1linf:5.8078e-01 L10_l1linf:5.7791e-01 L11_l1linf:5.7341e-01 L12_l1linf:5.9820e-01 L1_spectral:8.5307e-02 L2_spectral:7.8639e-02 L3_spectral:8.8106e-02 L4_spectral:8.6241e-02 L5_spectral:8.0682e-02 L6_spectral:8.4307e-02 L7_spectral:7.5712e-02 L8_spectral:7.3542e-02 L9_spectral:6.4578e-02 L10_spectral:5.1154e-02 L11_spectral:5.5633e-02 L12_spectral:7.4348e-02 v_norm:2.2461e+00 cos_v_-g_hvp:5.9138e-02 g_hvp_norm:4.0480e-01 cos_v_-g_t:7.0829e-02 g_t_norm:3.4035e-01 hv_norm:4.8313e-01 cos_v_hv:2.4188e-02 hg_norm:7.3220e+00 cos_g_hg:6.8414e-01 v_par:6.5365e-03 v_perp:2.2460e+00 L1_cos_v_neg_g:1.1424e-01 L1_v_norm:4.8098e-01 L2_cos_v_neg_g:5.6211e-02 L2_v_norm:4.9992e-01 L3_cos_v_neg_g:4.5147e-02 L3_v_norm:5.1285e-01 L4_cos_v_neg_g:5.5239e-02 L4_v_norm:5.1638e-01 L5_cos_v_neg_g:3.6515e-02 L5_v_norm:5.0270e-01 L6_cos_v_neg_g:4.7886e-02 L6_v_norm:5.1701e-01 L7_cos_v_neg_g:5.7452e-02 L7_v_norm:5.1916e-01 L8_cos_v_neg_g:5.4779e-02 L8_v_norm:5.1842e-01 L9_cos_v_neg_g:5.4400e-02 L9_v_norm:5.2869e-01 L10_cos_v_neg_g:6.4740e-02 L10_v_norm:5.4927e-01 L11_cos_v_neg_g:7.2939e-02 L11_v_norm:5.5623e-01 L12_cos_v_neg_g:1.1486e-01 L12_v_norm:5.4697e-01 +step:8000 train loss:3.470186 +step:8001 train loss:3.544721 +step:8002 train loss:3.458920 +step:8003 train loss:3.485217 +step:8004 train loss:3.523364 +step:8005 train loss:3.642560 +step:8006 train loss:3.548382 +step:8007 train loss:3.516791 +step:8008 train loss:3.497494 +step:8009 train loss:3.500680 +step:8010 train loss:3.557734 +step:8011 train loss:3.537195 +step:8012 train loss:3.465625 +step:8013 train loss:3.538305 +step:8014 train loss:3.490629 +step:8015 train loss:3.507551 +step:8016 train loss:3.489505 +step:8017 train loss:3.473171 +step:8018 train loss:3.537292 +step:8019 train loss:3.491158 +step:8020 train loss:3.492831 +step:8021 train loss:3.498350 +step:8022 train loss:3.543017 +step:8023 train loss:3.622919 +step:8024 train loss:3.516427 +step:8025 train loss:3.546769 +step:8026 train loss:3.492668 +step:8027 train loss:3.515069 +step:8028 train loss:3.433245 +step:8029 train loss:3.550928 +step:8030 train loss:3.520352 +step:8031 train loss:3.534567 +step:8032 train loss:3.518845 +step:8033 train loss:3.521373 +step:8034 train loss:3.462046 +step:8035 train loss:3.443655 +step:8036 train loss:3.505281 +step:8037 train loss:3.429185 +step:8038 train loss:3.531840 +step:8039 train loss:3.557934 +step:8040 train loss:3.491982 +step:8041 train loss:3.462467 +step:8042 train loss:3.553112 +step:8043 train loss:3.561128 +step:8044 train loss:3.535748 +step:8045 train loss:3.539742 +step:8046 train loss:3.503940 +step:8047 train loss:3.584329 +step:8048 train loss:3.514951 +step:8049 train loss:3.536623 +step:8050 train loss:3.543505 +step:8051 train loss:3.484321 +step:8052 train loss:3.511304 +step:8053 train loss:3.569391 +step:8054 train loss:3.496082 +step:8055 train loss:3.505234 +step:8056 train loss:3.487519 +step:8057 train loss:3.516947 +step:8058 train loss:3.496226 +step:8059 train loss:3.493726 +step:8060 train loss:3.480490 +step:8061 train loss:3.491297 +step:8062 train loss:3.498101 +step:8063 train loss:3.497973 +step:8064 train loss:3.479011 +step:8065 train loss:3.495245 +step:8066 train loss:3.478629 +step:8067 train loss:3.507077 +step:8068 train loss:3.508929 +step:8069 train loss:3.528485 +step:8070 train loss:3.556807 +step:8071 train loss:3.515968 +step:8072 train loss:3.540106 +step:8073 train loss:3.488522 +step:8074 train loss:3.555961 +step:8075 train loss:3.531864 +step:8076 train loss:3.557265 +step:8077 train loss:3.486259 +step:8078 train loss:3.461866 +step:8079 train loss:3.498554 +step:8080 train loss:3.520377 +step:8081 train loss:3.455433 +step:8082 train loss:3.505068 +step:8083 train loss:3.442900 +step:8084 train loss:3.486034 +step:8085 train loss:3.466022 +step:8086 train loss:3.555175 +step:8087 train loss:3.459815 +step:8088 train loss:3.520494 +step:8089 train loss:3.589279 +step:8090 train loss:3.522138 +step:8091 train loss:3.611067 +step:8092 train loss:3.544631 +step:8093 train loss:3.496103 +step:8094 train loss:3.533824 +step:8095 train loss:3.528152 +step:8096 train loss:3.569413 +step:8097 train loss:3.496143 +step:8098 train loss:3.478388 +step:8099 train loss:3.512943 +step:8100 train loss:3.471883 +step:8101 train loss:3.440711 +step:8102 train loss:3.507470 +step:8103 train loss:3.446424 +step:8104 train loss:3.445757 +step:8105 train loss:3.551726 +step:8106 train loss:3.550069 +step:8107 train loss:3.547178 +step:8108 train loss:3.542670 +step:8109 train loss:3.508535 +step:8110 train loss:3.478249 +step:8111 train loss:3.513032 +step:8112 train loss:3.522905 +step:8113 train loss:3.505694 +step:8114 train loss:3.468195 +step:8115 train loss:3.551366 +step:8116 train loss:3.438128 +step:8117 train loss:3.497627 +step:8118 train loss:3.478950 +step:8119 train loss:3.566765 +step:8120 train loss:3.454576 +step:8121 train loss:3.424504 +step:8122 train loss:3.462954 +step:8123 train loss:3.451207 +step:8124 train loss:3.522764 +step:8125 train loss:3.482137 +step:8126 train loss:3.530765 +step:8127 train loss:3.470824 +step:8128 train loss:3.517906 +step:8129 train loss:3.436949 +step:8130 train loss:3.493844 +step:8131 train loss:3.480467 +step:8132 train loss:3.523254 +step:8133 train loss:3.491832 +step:8134 train loss:3.496918 +step:8135 train loss:3.507318 +step:8136 train loss:3.414116 +step:8137 train loss:3.377074 +step:8138 train loss:3.439264 +step:8139 train loss:3.472798 +step:8140 train loss:3.459177 +step:8141 train loss:3.509198 +step:8142 train loss:3.455999 +step:8143 train loss:3.477525 +step:8144 train loss:3.509333 +step:8145 train loss:3.455579 +step:8146 train loss:3.540922 +step:8147 train loss:3.502355 +step:8148 train loss:3.509686 +step:8149 train loss:3.458121 +step:8150 train loss:3.504453 +step:8151 train loss:3.469987 +step:8152 train loss:3.454904 +step:8153 train loss:3.437579 +step:8154 train loss:3.527219 +step:8155 train loss:3.476810 +step:8156 train loss:3.521616 +step:8157 train loss:3.421922 +step:8158 train loss:3.433979 +step:8159 train loss:3.462488 +step:8160 train loss:3.439156 +step:8161 train loss:3.490369 +step:8162 train loss:3.516489 +step:8163 train loss:3.402235 +step:8164 train loss:3.423692 +step:8165 train loss:3.504459 +step:8166 train loss:3.456931 +step:8167 train loss:3.447884 +step:8168 train loss:3.438651 +step:8169 train loss:3.394120 +step:8170 train loss:3.493592 +step:8171 train loss:3.425091 +step:8172 train loss:3.498344 +step:8173 train loss:3.428294 +step:8174 train loss:3.519142 +step:8175 train loss:3.457549 +step:8176 train loss:3.516922 +step:8177 train loss:3.404923 +step:8178 train loss:3.444656 +step:8179 train loss:3.450035 +step:8180 train loss:3.468763 +step:8181 train loss:3.463292 +step:8182 train loss:3.430816 +step:8183 train loss:3.461795 +step:8184 train loss:3.373291 +step:8185 train loss:3.488177 +step:8186 train loss:3.497571 +step:8187 train loss:3.520470 +step:8188 train loss:3.484841 +step:8189 train loss:3.453059 +step:8190 train loss:3.450893 +step:8191 train loss:3.423014 +step:8192 train loss:3.481840 +step:8193 train loss:3.450235 +step:8194 train loss:3.479984 +step:8195 train loss:3.441965 +step:8196 train loss:3.491272 +step:8197 train loss:3.430501 +step:8198 train loss:3.615350 +step:8199 train loss:3.616446 +step:8200 train loss:3.494449 +step:8201 train loss:3.466793 +step:8202 train loss:3.540350 +step:8203 train loss:3.438416 +step:8204 train loss:3.490440 +step:8205 train loss:3.458013 +step:8206 train loss:3.479620 +step:8207 train loss:3.477159 +step:8208 train loss:3.439004 +step:8209 train loss:3.477658 +step:8210 train loss:3.378816 +step:8211 train loss:3.466402 +step:8212 train loss:3.427894 +step:8213 train loss:3.492356 +step:8214 train loss:3.463291 +step:8215 train loss:3.458073 +step:8216 train loss:3.480766 +step:8217 train loss:3.386372 +step:8218 train loss:3.462389 +step:8219 train loss:3.432478 +step:8220 train loss:3.479528 +step:8221 train loss:3.422358 +step:8222 train loss:3.485660 +step:8223 train loss:3.487746 +step:8224 train loss:3.554857 +step:8225 train loss:3.450584 +step:8226 train loss:3.533063 +step:8227 train loss:3.480950 +step:8228 train loss:3.436756 +step:8229 train loss:3.424849 +step:8230 train loss:3.471659 +step:8231 train loss:3.552832 +step:8232 train loss:3.482023 +step:8233 train loss:3.473307 +step:8234 train loss:3.449190 +step:8235 train loss:3.484166 +step:8236 train loss:3.503721 +step:8237 train loss:3.484862 +step:8238 train loss:3.434624 +step:8239 train loss:3.509961 +step:8240 train loss:3.399320 +step:8241 train loss:3.548101 +step:8242 train loss:3.481661 +step:8243 train loss:3.575786 +step:8244 train loss:3.457859 +step:8245 train loss:3.493024 +step:8246 train loss:3.442303 +step:8247 train loss:3.518198 +step:8248 train loss:3.463652 +step:8249 train loss:3.456910 +step:8250 validation loss:3.439936 +step:8250 train loss:3.443061 +step:8251 train loss:3.433451 +step:8252 train loss:3.497934 +step:8253 train loss:3.397971 +step:8254 train loss:3.416455 +step:8255 train loss:3.442482 +step:8256 train loss:3.477847 +step:8257 train loss:3.440478 +step:8258 train loss:3.446756 +step:8259 train loss:3.485178 +step:8260 train loss:3.465300 +step:8261 train loss:3.514686 +step:8262 train loss:3.491647 +step:8263 train loss:3.510835 +step:8264 train loss:3.478651 +step:8265 train loss:3.526867 +step:8266 train loss:3.473453 +step:8267 train loss:3.491019 +step:8268 train loss:3.469769 +step:8269 train loss:3.604733 +step:8270 train loss:3.417296 +step:8271 train loss:3.505652 +step:8272 train loss:3.439824 +step:8273 train loss:3.454588 +step:8274 train loss:3.510109 +step:8275 train loss:3.481318 +step:8276 train loss:3.568581 +step:8277 train loss:3.452900 +step:8278 train loss:3.464771 +step:8279 train loss:3.408144 +step:8280 train loss:3.494184 +step:8281 train loss:3.436295 +step:8282 train loss:3.517497 +step:8283 train loss:3.474843 +step:8284 train loss:3.464020 +step:8285 train loss:3.563070 +step:8286 train loss:3.495983 +step:8287 train loss:3.479656 +step:8288 train loss:3.441877 +step:8289 train loss:3.505568 +step:8290 train loss:3.455199 +step:8291 train loss:3.524107 +step:8292 train loss:3.496406 +step:8293 train loss:3.544338 +step:8294 train loss:3.588837 +step:8295 train loss:3.508978 +step:8296 train loss:3.471184 +step:8297 train loss:3.451577 +step:8298 train loss:3.507303 +step:8299 train loss:3.419718 +step:8300 train loss:3.444888 +step:8301 train loss:3.491737 +step:8302 train loss:3.518436 +step:8303 train loss:3.400343 +step:8304 train loss:3.496573 +step:8305 train loss:3.513380 +step:8306 train loss:3.475056 +step:8307 train loss:3.451407 +step:8308 train loss:3.465001 +step:8309 train loss:3.456981 +step:8310 train loss:3.495203 +step:8311 train loss:3.441735 +step:8312 train loss:3.481994 +step:8313 train loss:3.475065 +step:8314 train loss:3.497110 +step:8315 train loss:3.473056 +step:8316 train loss:3.526159 +step:8317 train loss:3.450901 +step:8318 train loss:3.507165 +step:8319 train loss:3.449683 +step:8320 train loss:3.395433 +step:8321 train loss:3.542667 +step:8322 train loss:3.406503 +step:8323 train loss:3.517554 +step:8324 train loss:3.489922 +step:8325 train loss:3.422990 +step:8326 train loss:3.471842 +step:8327 train loss:3.415285 +step:8328 train loss:3.579651 +step:8329 train loss:3.439893 +step:8330 train loss:3.452871 +step:8331 train loss:3.534282 +step:8332 train loss:3.484689 +step:8333 train loss:3.507369 +step:8334 train loss:3.458687 +step:8335 train loss:3.512302 +step:8336 train loss:3.448466 +step:8337 train loss:3.451450 +step:8338 train loss:3.471796 +step:8339 train loss:3.453290 +step:8340 train loss:3.468392 +step:8341 train loss:3.432089 +step:8342 train loss:3.475033 +step:8343 train loss:3.484198 +step:8344 train loss:3.446087 +step:8345 train loss:3.459728 +step:8346 train loss:3.446842 +step:8347 train loss:3.508703 +step:8348 train loss:3.455619 +step:8349 train loss:3.498813 +step:8350 train loss:3.450718 +step:8351 train loss:3.441589 +step:8352 train loss:3.441684 +step:8353 train loss:3.460439 +step:8354 train loss:3.486507 +step:8355 train loss:3.469893 +step:8356 train loss:3.459588 +step:8357 train loss:3.462316 +step:8358 train loss:3.460954 +step:8359 train loss:3.496122 +step:8360 train loss:3.479191 +step:8361 train loss:3.390558 +step:8362 train loss:3.464088 +step:8363 train loss:3.468207 +step:8364 train loss:3.503100 +step:8365 train loss:3.497287 +step:8366 train loss:3.429760 +step:8367 train loss:3.426254 +step:8368 train loss:3.455909 +step:8369 train loss:3.490016 +step:8370 train loss:3.400303 +step:8371 train loss:3.439155 +step:8372 train loss:3.441539 +step:8373 train loss:3.437296 +step:8374 train loss:3.465206 +step:8375 train loss:3.451050 +step:8376 train loss:3.439549 +step:8377 train loss:3.435844 +step:8378 train loss:3.409468 +step:8379 train loss:3.462818 +step:8380 train loss:3.438041 +step:8381 train loss:3.466528 +step:8382 train loss:3.451519 +step:8383 train loss:3.488968 +step:8384 train loss:3.499045 +step:8385 train loss:3.472221 +step:8386 train loss:3.512832 +step:8387 train loss:3.411625 +step:8388 train loss:3.445944 +step:8389 train loss:3.399825 +step:8390 train loss:3.482850 +step:8391 train loss:3.462813 +step:8392 train loss:3.420989 +step:8393 train loss:3.513981 +step:8394 train loss:3.490886 +step:8395 train loss:3.438396 +step:8396 train loss:3.623908 +step:8397 train loss:3.434138 +step:8398 train loss:3.506534 +step:8399 train loss:3.442730 +step:8400 train loss:3.450243 +step:8401 train loss:3.468229 +step:8402 train loss:3.428878 +step:8403 train loss:3.493725 +step:8404 train loss:3.425713 +step:8405 train loss:3.457295 +step:8406 train loss:3.451006 +step:8407 train loss:3.509465 +step:8408 train loss:3.424000 +step:8409 train loss:3.374839 +step:8410 train loss:3.459622 +step:8411 train loss:3.498636 +step:8412 train loss:3.493306 +step:8413 train loss:3.447546 +step:8414 train loss:3.422382 +step:8415 train loss:3.451415 +step:8416 train loss:3.434943 +step:8417 train loss:3.452332 +step:8418 train loss:3.501228 +step:8419 train loss:3.422437 +step:8420 train loss:3.468327 +step:8421 train loss:3.444476 +step:8422 train loss:3.489057 +step:8423 train loss:3.450152 +step:8424 train loss:3.444821 +step:8425 train loss:3.487266 +step:8426 train loss:3.426338 +step:8427 train loss:3.497934 +step:8428 train loss:3.396420 +step:8429 train loss:3.428949 +step:8430 train loss:3.463348 +step:8431 train loss:3.435032 +step:8432 train loss:3.472313 +step:8433 train loss:3.434664 +step:8434 train loss:3.456580 +step:8435 train loss:3.445968 +step:8436 train loss:3.464347 +step:8437 train loss:3.475928 +step:8438 train loss:3.410198 +step:8439 train loss:3.484107 +step:8440 train loss:3.498400 +step:8441 train loss:3.520907 +step:8442 train loss:3.465230 +step:8443 train loss:3.516125 +step:8444 train loss:3.459492 +step:8445 train loss:3.404898 +step:8446 train loss:3.450800 +step:8447 train loss:3.505225 +step:8448 train loss:3.393331 +step:8449 train loss:3.450980 +step:8450 train loss:3.399623 +step:8451 train loss:3.458895 +step:8452 train loss:3.452235 +step:8453 train loss:3.436386 +step:8454 train loss:3.503326 +step:8455 train loss:3.409804 +step:8456 train loss:3.473413 +step:8457 train loss:3.442962 +step:8458 train loss:3.428476 +step:8459 train loss:3.512206 +step:8460 train loss:3.445493 +step:8461 train loss:3.473403 +step:8462 train loss:3.459828 +step:8463 train loss:3.425013 +step:8464 train loss:3.447197 +step:8465 train loss:3.470381 +step:8466 train loss:3.545459 +step:8467 train loss:3.428095 +step:8468 train loss:3.423499 +step:8469 train loss:3.448885 +step:8470 train loss:3.469154 +step:8471 train loss:3.511177 +step:8472 train loss:3.411738 +step:8473 train loss:3.486416 +step:8474 train loss:3.467332 +step:8475 train loss:3.446741 +step:8476 train loss:3.474995 +step:8477 train loss:3.452286 +step:8478 train loss:3.475195 +step:8479 train loss:3.466942 +step:8480 train loss:3.427832 +step:8481 train loss:3.493741 +step:8482 train loss:3.446035 +step:8483 train loss:3.540330 +step:8484 train loss:3.453874 +step:8485 train loss:3.399668 +step:8486 train loss:3.466849 +step:8487 train loss:3.406323 +step:8488 train loss:3.433909 +step:8489 train loss:3.542059 +step:8490 train loss:3.461046 +step:8491 train loss:3.439260 +step:8492 train loss:3.491302 +step:8493 train loss:3.409631 +step:8494 train loss:3.428591 +step:8495 train loss:3.388910 +step:8496 train loss:3.559366 +step:8497 train loss:3.628473 +step:8498 train loss:3.675423 +step:8499 train loss:3.616780 +step:8500 validation loss:3.429813 total_sharp:9.8130e-03 L1_sharp:1.6918e-02 L2_sharp:2.6362e-03 L3_sharp:2.4620e-03 L4_sharp:1.3928e-03 L5_sharp:7.5720e-04 L6_sharp:1.3452e-03 L7_sharp:2.1381e-03 L8_sharp:3.0972e-03 L9_sharp:2.6814e-03 L10_sharp:1.6040e-03 L11_sharp:1.6191e-03 L12_sharp:4.8955e-03 total_fnorm:1.7811e+00 total_l1_linf:1.5923e+04 total_spectral:1.7811e+00 L1_fnorm:4.1927e-01 L2_fnorm:4.2259e-01 L3_fnorm:4.1888e-01 L4_fnorm:4.1630e-01 L5_fnorm:4.1185e-01 L6_fnorm:4.2063e-01 L7_fnorm:4.1719e-01 L8_fnorm:4.1510e-01 L9_fnorm:4.1968e-01 L10_fnorm:4.3760e-01 L11_fnorm:4.3826e-01 L12_fnorm:4.3163e-01 L1_l1linf:5.6642e-01 L2_l1linf:4.9514e-01 L3_l1linf:5.8580e-01 L4_l1linf:5.1366e-01 L5_l1linf:4.8867e-01 L6_l1linf:5.3281e-01 L7_l1linf:5.2938e-01 L8_l1linf:4.9479e-01 L9_l1linf:4.8448e-01 L10_l1linf:4.5166e-01 L11_l1linf:4.5968e-01 L12_l1linf:4.9026e-01 L1_spectral:8.2922e-02 L2_spectral:6.4606e-02 L3_spectral:8.1810e-02 L4_spectral:8.2300e-02 L5_spectral:7.2381e-02 L6_spectral:8.2674e-02 L7_spectral:7.8911e-02 L8_spectral:7.5768e-02 L9_spectral:6.1299e-02 L10_spectral:4.7409e-02 L11_spectral:5.9252e-02 L12_spectral:7.1013e-02 v_norm:1.7811e+00 cos_v_-g_hvp:5.6241e-02 g_hvp_norm:5.2453e-01 cos_v_-g_t:6.6747e-02 g_t_norm:4.4277e-01 hv_norm:5.2477e-01 cos_v_hv:3.3306e-02 hg_norm:1.7707e+01 cos_g_hg:7.3329e-01 v_par:3.8862e-03 v_perp:1.7811e+00 L1_cos_v_neg_g:1.1718e-01 L1_v_norm:4.1927e-01 L2_cos_v_neg_g:4.6341e-02 L2_v_norm:4.2259e-01 L3_cos_v_neg_g:4.7317e-02 L3_v_norm:4.1888e-01 L4_cos_v_neg_g:5.1720e-02 L4_v_norm:4.1630e-01 L5_cos_v_neg_g:3.2040e-02 L5_v_norm:4.1185e-01 L6_cos_v_neg_g:4.1844e-02 L6_v_norm:4.2063e-01 L7_cos_v_neg_g:5.1300e-02 L7_v_norm:4.1719e-01 L8_cos_v_neg_g:4.9440e-02 L8_v_norm:4.1510e-01 L9_cos_v_neg_g:5.1808e-02 L9_v_norm:4.1968e-01 L10_cos_v_neg_g:5.8339e-02 L10_v_norm:4.3760e-01 L11_cos_v_neg_g:6.9579e-02 L11_v_norm:4.3826e-01 L12_cos_v_neg_g:1.2665e-01 L12_v_norm:4.3163e-01 +step:8500 train loss:3.464358 +step:8501 train loss:3.492690 +step:8502 train loss:3.488536 +step:8503 train loss:3.494578 +step:8504 train loss:3.426755 +step:8505 train loss:3.474794 +step:8506 train loss:3.416730 +step:8507 train loss:3.463435 +step:8508 train loss:3.447759 +step:8509 train loss:3.451116 +step:8510 train loss:3.473389 +step:8511 train loss:3.514903 +step:8512 train loss:3.483835 +step:8513 train loss:3.474007 +step:8514 train loss:3.439116 +step:8515 train loss:3.469867 +step:8516 train loss:3.429220 +step:8517 train loss:3.480647 +step:8518 train loss:3.480220 +step:8519 train loss:3.475005 +step:8520 train loss:3.487607 +step:8521 train loss:3.457830 +step:8522 train loss:3.484560 +step:8523 train loss:3.475209 +step:8524 train loss:3.381842 +step:8525 train loss:3.429843 +step:8526 train loss:3.486215 +step:8527 train loss:3.484430 +step:8528 train loss:3.475782 +step:8529 train loss:3.506827 +step:8530 train loss:3.444824 +step:8531 train loss:3.527033 +step:8532 train loss:3.475568 +step:8533 train loss:3.477527 +step:8534 train loss:3.431827 +step:8535 train loss:3.485811 +step:8536 train loss:3.473310 +step:8537 train loss:3.485557 +step:8538 train loss:3.499123 +step:8539 train loss:3.495632 +step:8540 train loss:3.449117 +step:8541 train loss:3.540152 +step:8542 train loss:3.598445 +step:8543 train loss:3.548137 +step:8544 train loss:3.492246 +step:8545 train loss:3.447524 +step:8546 train loss:3.503276 +step:8547 train loss:3.419361 +step:8548 train loss:3.510808 +step:8549 train loss:3.386766 +step:8550 train loss:3.463860 +step:8551 train loss:3.459597 +step:8552 train loss:3.471451 +step:8553 train loss:3.478688 +step:8554 train loss:3.422611 +step:8555 train loss:3.456845 +step:8556 train loss:3.465481 +step:8557 train loss:3.495217 +step:8558 train loss:3.493135 +step:8559 train loss:3.459250 +step:8560 train loss:3.495100 +step:8561 train loss:3.500199 +step:8562 train loss:3.437481 +step:8563 train loss:3.488165 +step:8564 train loss:3.460858 +step:8565 train loss:3.459872 +step:8566 train loss:3.496963 +step:8567 train loss:3.442531 +step:8568 train loss:3.445727 +step:8569 train loss:3.471334 +step:8570 train loss:3.404323 +step:8571 train loss:3.447828 +step:8572 train loss:3.461288 +step:8573 train loss:3.529875 +step:8574 train loss:3.445720 +step:8575 train loss:3.482944 +step:8576 train loss:3.454757 +step:8577 train loss:3.438325 +step:8578 train loss:3.493304 +step:8579 train loss:3.501773 +step:8580 train loss:3.475986 +step:8581 train loss:3.503670 +step:8582 train loss:3.462085 +step:8583 train loss:3.426759 +step:8584 train loss:3.513397 +step:8585 train loss:3.417047 +step:8586 train loss:3.437555 +step:8587 train loss:3.475346 +step:8588 train loss:3.416869 +step:8589 train loss:3.472862 +step:8590 train loss:3.460140 +step:8591 train loss:3.443100 +step:8592 train loss:3.452210 +step:8593 train loss:3.436571 +step:8594 train loss:3.470367 +step:8595 train loss:3.433597 +step:8596 train loss:3.457335 +step:8597 train loss:3.488091 +step:8598 train loss:3.455822 +step:8599 train loss:3.516034 +step:8600 train loss:3.471120 +step:8601 train loss:3.413309 +step:8602 train loss:3.503716 +step:8603 train loss:3.429752 +step:8604 train loss:3.522541 +step:8605 train loss:3.451213 +step:8606 train loss:3.422268 +step:8607 train loss:3.440578 +step:8608 train loss:3.399828 +step:8609 train loss:3.375632 +step:8610 train loss:3.499878 +step:8611 train loss:3.430135 +step:8612 train loss:3.459455 +step:8613 train loss:3.462266 +step:8614 train loss:3.409275 +step:8615 train loss:3.444637 +step:8616 train loss:3.489920 +step:8617 train loss:3.538407 +step:8618 train loss:3.498410 +step:8619 train loss:3.462628 +step:8620 train loss:3.498976 +step:8621 train loss:3.447508 +step:8622 train loss:3.472089 +step:8623 train loss:3.472924 +step:8624 train loss:3.460299 +step:8625 train loss:3.497097 +step:8626 train loss:3.514803 +step:8627 train loss:3.462016 +step:8628 train loss:3.486762 +step:8629 train loss:3.432796 +step:8630 train loss:3.471950 +step:8631 train loss:3.526605 +step:8632 train loss:3.492656 +step:8633 train loss:3.441566 +step:8634 train loss:3.427114 +step:8635 train loss:3.448954 +step:8636 train loss:3.475561 +step:8637 train loss:3.442152 +step:8638 train loss:3.502430 +step:8639 train loss:3.477846 +step:8640 train loss:3.434569 +step:8641 train loss:3.426713 +step:8642 train loss:3.445832 +step:8643 train loss:3.479549 +step:8644 train loss:3.508075 +step:8645 train loss:3.434088 +step:8646 train loss:3.483934 +step:8647 train loss:3.518563 +step:8648 train loss:3.453454 +step:8649 train loss:3.468877 +step:8650 train loss:3.429704 +step:8651 train loss:3.437561 +step:8652 train loss:3.420912 +step:8653 train loss:3.429302 +step:8654 train loss:3.540182 +step:8655 train loss:3.475953 +step:8656 train loss:3.475889 +step:8657 train loss:3.482015 +step:8658 train loss:3.470898 +step:8659 train loss:3.442070 +step:8660 train loss:3.539195 +step:8661 train loss:3.430574 +step:8662 train loss:3.515878 +step:8663 train loss:3.436241 +step:8664 train loss:3.473160 +step:8665 train loss:3.513446 +step:8666 train loss:3.471397 +step:8667 train loss:3.467849 +step:8668 train loss:3.400559 +step:8669 train loss:3.437339 +step:8670 train loss:3.448357 +step:8671 train loss:3.469983 +step:8672 train loss:3.466361 +step:8673 train loss:3.492273 +step:8674 train loss:3.441348 +step:8675 train loss:3.447291 +step:8676 train loss:3.469772 +step:8677 train loss:3.457453 +step:8678 train loss:3.470145 +step:8679 train loss:3.476330 +step:8680 train loss:3.447973 +step:8681 train loss:3.479065 +step:8682 train loss:3.472420 +step:8683 train loss:3.464069 +step:8684 train loss:3.460152 +step:8685 train loss:3.409907 +step:8686 train loss:3.433233 +step:8687 train loss:3.478353 +step:8688 train loss:3.484764 +step:8689 train loss:3.475442 +step:8690 train loss:3.494650 +step:8691 train loss:3.425165 +step:8692 train loss:3.422495 +step:8693 train loss:3.465017 +step:8694 train loss:3.474864 +step:8695 train loss:3.443978 +step:8696 train loss:3.435800 +step:8697 train loss:3.397174 +step:8698 train loss:3.413478 +step:8699 train loss:3.390571 +step:8700 train loss:3.381815 +step:8701 train loss:3.431247 +step:8702 train loss:3.475270 +step:8703 train loss:3.483753 +step:8704 train loss:3.574565 +step:8705 train loss:3.424963 +step:8706 train loss:3.517101 +step:8707 train loss:3.448568 +step:8708 train loss:3.476573 +step:8709 train loss:3.437958 +step:8710 train loss:3.434140 +step:8711 train loss:3.473775 +step:8712 train loss:3.379686 +step:8713 train loss:3.411687 +step:8714 train loss:3.406816 +step:8715 train loss:3.400388 +step:8716 train loss:3.427720 +step:8717 train loss:3.368371 +step:8718 train loss:3.482847 +step:8719 train loss:3.394511 +step:8720 train loss:3.427778 +step:8721 train loss:3.498040 +step:8722 train loss:3.437015 +step:8723 train loss:3.441835 +step:8724 train loss:3.436042 +step:8725 train loss:3.388719 +step:8726 train loss:3.480812 +step:8727 train loss:3.422122 +step:8728 train loss:3.418375 +step:8729 train loss:3.437954 +step:8730 train loss:3.358015 +step:8731 train loss:3.444714 +step:8732 train loss:3.500469 +step:8733 train loss:3.500067 +step:8734 train loss:3.440671 +step:8735 train loss:3.440598 +step:8736 train loss:3.451123 +step:8737 train loss:3.456670 +step:8738 train loss:3.410753 +step:8739 train loss:3.368961 +step:8740 train loss:3.452973 +step:8741 train loss:3.357802 +step:8742 train loss:3.478226 +step:8743 train loss:3.445983 +step:8744 train loss:3.493880 +step:8745 train loss:3.423846 +step:8746 train loss:3.446266 +step:8747 train loss:3.457509 +step:8748 train loss:3.413459 +step:8749 train loss:3.377409 +step:8750 validation loss:3.409314 +step:8750 train loss:3.517468 +step:8751 train loss:3.399216 +step:8752 train loss:3.452458 +step:8753 train loss:3.492024 +step:8754 train loss:3.443959 +step:8755 train loss:3.496230 +step:8756 train loss:3.556479 +step:8757 train loss:3.421503 +step:8758 train loss:3.427472 +step:8759 train loss:3.451002 +step:8760 train loss:3.452766 +step:8761 train loss:3.433606 +step:8762 train loss:3.433488 +step:8763 train loss:3.446404 +step:8764 train loss:3.405527 +step:8765 train loss:3.383126 +step:8766 train loss:3.448560 +step:8767 train loss:3.422263 +step:8768 train loss:3.484422 +step:8769 train loss:3.418120 +step:8770 train loss:3.388893 +step:8771 train loss:3.438625 +step:8772 train loss:3.522067 +step:8773 train loss:3.403656 +step:8774 train loss:3.444161 +step:8775 train loss:3.422350 +step:8776 train loss:3.460907 +step:8777 train loss:3.516054 +step:8778 train loss:3.399277 +step:8779 train loss:3.466627 +step:8780 train loss:3.377140 +step:8781 train loss:3.446911 +step:8782 train loss:3.452278 +step:8783 train loss:3.364062 +step:8784 train loss:3.509912 +step:8785 train loss:3.421788 +step:8786 train loss:3.457665 +step:8787 train loss:3.439154 +step:8788 train loss:3.407815 +step:8789 train loss:3.490922 +step:8790 train loss:3.380718 +step:8791 train loss:3.460932 +step:8792 train loss:3.436152 +step:8793 train loss:3.432748 +step:8794 train loss:3.431475 +step:8795 train loss:3.436527 +step:8796 train loss:3.421206 +step:8797 train loss:3.406420 +step:8798 train loss:3.451837 +step:8799 train loss:3.464226 +step:8800 train loss:3.448071 +step:8801 train loss:3.426421 +step:8802 train loss:3.345268 +step:8803 train loss:3.520489 +step:8804 train loss:3.436082 +step:8805 train loss:3.405245 +step:8806 train loss:3.455277 +step:8807 train loss:3.455539 +step:8808 train loss:3.445652 +step:8809 train loss:3.416227 +step:8810 train loss:3.394260 +step:8811 train loss:3.502940 +step:8812 train loss:3.393827 +step:8813 train loss:3.425074 +step:8814 train loss:3.487216 +step:8815 train loss:3.409097 +step:8816 train loss:3.431842 +step:8817 train loss:3.437168 +step:8818 train loss:3.407161 +step:8819 train loss:3.455524 +step:8820 train loss:3.427590 +step:8821 train loss:3.435472 +step:8822 train loss:3.471511 +step:8823 train loss:3.370020 +step:8824 train loss:3.516986 +step:8825 train loss:3.418534 +step:8826 train loss:3.403080 +step:8827 train loss:3.429473 +step:8828 train loss:3.366561 +step:8829 train loss:3.405293 +step:8830 train loss:3.415869 +step:8831 train loss:3.367692 +step:8832 train loss:3.443492 +step:8833 train loss:3.448257 +step:8834 train loss:3.455578 +step:8835 train loss:3.445986 +step:8836 train loss:3.455736 +step:8837 train loss:3.408951 +step:8838 train loss:3.445351 +step:8839 train loss:3.356879 +step:8840 train loss:3.414443 +step:8841 train loss:3.432698 +step:8842 train loss:3.421542 +step:8843 train loss:3.490433 +step:8844 train loss:3.399860 +step:8845 train loss:3.418015 +step:8846 train loss:3.440704 +step:8847 train loss:3.398612 +step:8848 train loss:3.388535 +step:8849 train loss:3.423578 +step:8850 train loss:3.493788 +step:8851 train loss:3.411116 +step:8852 train loss:3.556081 +step:8853 train loss:3.420358 +step:8854 train loss:3.457646 +step:8855 train loss:3.445140 +step:8856 train loss:3.405809 +step:8857 train loss:3.401040 +step:8858 train loss:3.395853 +step:8859 train loss:3.392088 +step:8860 train loss:3.442755 +step:8861 train loss:3.476414 +step:8862 train loss:3.378039 +step:8863 train loss:3.453196 +step:8864 train loss:3.452812 +step:8865 train loss:3.381182 +step:8866 train loss:3.464067 +step:8867 train loss:3.381343 +step:8868 train loss:3.423516 +step:8869 train loss:3.467505 +step:8870 train loss:3.403017 +step:8871 train loss:3.503920 +step:8872 train loss:3.381086 +step:8873 train loss:3.382406 +step:8874 train loss:3.450988 +step:8875 train loss:3.433722 +step:8876 train loss:3.341013 +step:8877 train loss:3.432531 +step:8878 train loss:3.361218 +step:8879 train loss:3.390642 +step:8880 train loss:3.436227 +step:8881 train loss:3.379143 +step:8882 train loss:3.436583 +step:8883 train loss:3.407176 +step:8884 train loss:3.446643 +step:8885 train loss:3.419200 +step:8886 train loss:3.452577 +step:8887 train loss:3.416033 +step:8888 train loss:3.412056 +step:8889 train loss:3.414503 +step:8890 train loss:3.423134 +step:8891 train loss:3.434841 +step:8892 train loss:3.361777 +step:8893 train loss:3.429769 +step:8894 train loss:3.381004 +step:8895 train loss:3.417696 +step:8896 train loss:3.418154 +step:8897 train loss:3.420656 +step:8898 train loss:3.408600 +step:8899 train loss:3.496546 +step:8900 train loss:3.382636 +step:8901 train loss:3.466186 +step:8902 train loss:3.411046 +step:8903 train loss:3.467518 +step:8904 train loss:3.377549 +step:8905 train loss:3.435031 +step:8906 train loss:3.367656 +step:8907 train loss:3.471393 +step:8908 train loss:3.382298 +step:8909 train loss:3.528548 +step:8910 train loss:3.380183 +step:8911 train loss:3.438148 +step:8912 train loss:3.400318 +step:8913 train loss:3.424940 +step:8914 train loss:3.351832 +step:8915 train loss:3.429990 +step:8916 train loss:3.376932 +step:8917 train loss:3.429351 +step:8918 train loss:3.465569 +step:8919 train loss:3.394174 +step:8920 train loss:3.380857 +step:8921 train loss:3.461396 +step:8922 train loss:3.389253 +step:8923 train loss:3.464849 +step:8924 train loss:3.395589 +step:8925 train loss:3.408995 +step:8926 train loss:3.377238 +step:8927 train loss:3.389411 +step:8928 train loss:3.382504 +step:8929 train loss:3.439129 +step:8930 train loss:3.363545 +step:8931 train loss:3.426584 +step:8932 train loss:3.422000 +step:8933 train loss:3.442086 +step:8934 train loss:3.461429 +step:8935 train loss:3.361600 +step:8936 train loss:3.446726 +step:8937 train loss:3.372828 +step:8938 train loss:3.441792 +step:8939 train loss:3.392751 +step:8940 train loss:3.483643 +step:8941 train loss:3.325877 +step:8942 train loss:3.363404 +step:8943 train loss:3.362082 +step:8944 train loss:3.376723 +step:8945 train loss:3.369275 +step:8946 train loss:3.449561 +step:8947 train loss:3.429017 +step:8948 train loss:3.508581 +step:8949 train loss:3.416812 +step:8950 train loss:3.449850 +step:8951 train loss:3.391090 +step:8952 train loss:3.474374 +step:8953 train loss:3.376523 +step:8954 train loss:3.443491 +step:8955 train loss:3.462040 +step:8956 train loss:3.428131 +step:8957 train loss:3.485219 +step:8958 train loss:3.420921 +step:8959 train loss:3.405447 +step:8960 train loss:3.380052 +step:8961 train loss:3.436768 +step:8962 train loss:3.440706 +step:8963 train loss:3.356167 +step:8964 train loss:3.499895 +step:8965 train loss:3.395663 +step:8966 train loss:3.410983 +step:8967 train loss:3.350188 +step:8968 train loss:3.418592 +step:8969 train loss:3.320253 +step:8970 train loss:3.415109 +step:8971 train loss:3.421642 +step:8972 train loss:3.414277 +step:8973 train loss:3.418224 +step:8974 train loss:3.401019 +step:8975 train loss:3.437205 +step:8976 train loss:3.420475 +step:8977 train loss:3.378467 +step:8978 train loss:3.425988 +step:8979 train loss:3.371872 +step:8980 train loss:3.475120 +step:8981 train loss:3.382706 +step:8982 train loss:3.457820 +step:8983 train loss:3.387480 +step:8984 train loss:3.405978 +step:8985 train loss:3.438186 +step:8986 train loss:3.455223 +step:8987 train loss:3.411605 +step:8988 train loss:3.432591 +step:8989 train loss:3.343284 +step:8990 train loss:3.430610 +step:8991 train loss:3.381078 +step:8992 train loss:3.488636 +step:8993 train loss:3.425732 +step:8994 train loss:3.598026 +step:8995 train loss:3.405388 +step:8996 train loss:3.400722 +step:8997 train loss:3.405334 +step:8998 train loss:3.475358 +step:8999 train loss:3.367082 +step:9000 validation loss:3.396796 total_sharp:6.7242e-03 L1_sharp:4.3380e-03 L2_sharp:9.5976e-04 L3_sharp:1.3838e-03 L4_sharp:1.2186e-03 L5_sharp:7.6924e-04 L6_sharp:1.5425e-03 L7_sharp:2.0465e-03 L8_sharp:3.4651e-03 L9_sharp:2.7794e-03 L10_sharp:1.2433e-03 L11_sharp:1.0560e-03 L12_sharp:2.3659e-03 total_fnorm:1.1642e+00 total_l1_linf:1.0402e+04 total_spectral:1.1642e+00 L1_fnorm:2.7286e-01 L2_fnorm:2.7364e-01 L3_fnorm:2.7174e-01 L4_fnorm:2.7035e-01 L5_fnorm:2.6516e-01 L6_fnorm:2.7178e-01 L7_fnorm:2.7187e-01 L8_fnorm:2.7052e-01 L9_fnorm:2.7632e-01 L10_fnorm:2.8247e-01 L11_fnorm:2.8593e-01 L12_fnorm:2.8375e-01 L1_l1linf:3.4259e-01 L2_l1linf:3.1788e-01 L3_l1linf:3.5603e-01 L4_l1linf:3.2661e-01 L5_l1linf:3.2643e-01 L6_l1linf:3.4027e-01 L7_l1linf:3.2317e-01 L8_l1linf:3.1957e-01 L9_l1linf:3.3005e-01 L10_l1linf:3.1532e-01 L11_l1linf:3.0534e-01 L12_l1linf:3.2622e-01 L1_spectral:4.7934e-02 L2_spectral:4.2013e-02 L3_spectral:5.0558e-02 L4_spectral:5.0860e-02 L5_spectral:4.4547e-02 L6_spectral:5.1090e-02 L7_spectral:4.7174e-02 L8_spectral:4.5371e-02 L9_spectral:3.8362e-02 L10_spectral:2.8229e-02 L11_spectral:2.8707e-02 L12_spectral:3.6765e-02 v_norm:1.1642e+00 cos_v_-g_hvp:6.0640e-02 g_hvp_norm:3.6934e-01 cos_v_-g_t:7.6659e-02 g_t_norm:2.9541e-01 hv_norm:3.2926e-01 cos_v_hv:2.3776e-02 hg_norm:6.1748e+00 cos_g_hg:6.0534e-01 v_par:3.7054e-03 v_perp:1.1642e+00 L1_cos_v_neg_g:1.0855e-01 L1_v_norm:2.7286e-01 L2_cos_v_neg_g:5.7279e-02 L2_v_norm:2.7364e-01 L3_cos_v_neg_g:4.8503e-02 L3_v_norm:2.7174e-01 L4_cos_v_neg_g:5.5634e-02 L4_v_norm:2.7035e-01 L5_cos_v_neg_g:3.6050e-02 L5_v_norm:2.6516e-01 L6_cos_v_neg_g:4.9051e-02 L6_v_norm:2.7178e-01 L7_cos_v_neg_g:5.4333e-02 L7_v_norm:2.7187e-01 L8_cos_v_neg_g:5.5642e-02 L8_v_norm:2.7052e-01 L9_cos_v_neg_g:5.6197e-02 L9_v_norm:2.7632e-01 L10_cos_v_neg_g:6.6158e-02 L10_v_norm:2.8247e-01 L11_cos_v_neg_g:7.9619e-02 L11_v_norm:2.8593e-01 L12_cos_v_neg_g:1.2422e-01 L12_v_norm:2.8375e-01 +step:9000 train loss:3.403840 +step:9001 train loss:3.380732 +step:9002 train loss:3.439316 +step:9003 train loss:3.390734 +step:9004 train loss:3.414481 +step:9005 train loss:3.368168 +step:9006 train loss:3.464559 +step:9007 train loss:3.393266 +step:9008 train loss:3.458872 +step:9009 train loss:3.399031 +step:9010 train loss:3.417867 +step:9011 train loss:3.378258 +step:9012 train loss:3.435071 +step:9013 train loss:3.386112 +step:9014 train loss:3.472584 +step:9015 train loss:3.405946 +step:9016 train loss:3.450378 +step:9017 train loss:3.435764 +step:9018 train loss:3.527904 +step:9019 train loss:3.392554 +step:9020 train loss:3.432760 +step:9021 train loss:3.386120 +step:9022 train loss:3.413516 +step:9023 train loss:3.343442 +step:9024 train loss:3.425272 +step:9025 train loss:3.377956 +step:9026 train loss:3.394333 +step:9027 train loss:3.433581 +step:9028 train loss:3.464303 +step:9029 train loss:3.393075 +step:9030 train loss:3.459275 +step:9031 train loss:3.428998 +step:9032 train loss:3.472063 +step:9033 train loss:3.399918 +step:9034 train loss:3.397633 +step:9035 train loss:3.339430 +step:9036 train loss:3.446103 +step:9037 train loss:3.469712 +step:9038 train loss:3.450252 +step:9039 train loss:3.392404 +step:9040 train loss:3.415079 +step:9041 train loss:3.431078 +step:9042 train loss:3.462926 +step:9043 train loss:3.454293 +step:9044 train loss:3.432517 +step:9045 train loss:3.411223 +step:9046 train loss:3.403310 +step:9047 train loss:3.373451 +step:9048 train loss:3.413452 +step:9049 train loss:3.380479 +step:9050 train loss:3.438173 +step:9051 train loss:3.403593 +step:9052 train loss:3.403283 +step:9053 train loss:3.398053 +step:9054 train loss:3.434541 +step:9055 train loss:3.416962 +step:9056 train loss:3.374673 +step:9057 train loss:3.477692 +step:9058 train loss:3.331657 +step:9059 train loss:3.415330 +step:9060 train loss:3.355934 +step:9061 train loss:3.379471 +step:9062 train loss:3.348251 +step:9063 train loss:3.450205 +step:9064 train loss:3.404055 +step:9065 train loss:3.407711 +step:9066 train loss:3.400387 +step:9067 train loss:3.431717 +step:9068 train loss:3.433941 +step:9069 train loss:3.464351 +step:9070 train loss:3.413462 +step:9071 train loss:3.447970 +step:9072 train loss:3.402731 +step:9073 train loss:3.469712 +step:9074 train loss:3.389181 +step:9075 train loss:3.476564 +step:9076 train loss:3.402168 +step:9077 train loss:3.410704 +step:9078 train loss:3.437151 +step:9079 train loss:3.499788 +step:9080 train loss:3.484895 +step:9081 train loss:3.517792 +step:9082 train loss:3.415334 +step:9083 train loss:3.488801 +step:9084 train loss:3.457016 +step:9085 train loss:3.451615 +step:9086 train loss:3.516752 +step:9087 train loss:3.363827 +step:9088 train loss:3.531248 +step:9089 train loss:3.409865 +step:9090 train loss:3.476995 +step:9091 train loss:3.462910 +step:9092 train loss:3.458352 +step:9093 train loss:3.404198 +step:9094 train loss:3.439510 +step:9095 train loss:3.425581 +step:9096 train loss:3.438489 +step:9097 train loss:3.476928 +step:9098 train loss:3.444571 +step:9099 train loss:3.462344 +step:9100 train loss:3.404596 +step:9101 train loss:3.456023 +step:9102 train loss:3.494410 +step:9103 train loss:3.412529 +step:9104 train loss:3.414230 +step:9105 train loss:3.433108 +step:9106 train loss:3.473156 +step:9107 train loss:3.433293 +step:9108 train loss:3.482504 +step:9109 train loss:3.392697 +step:9110 train loss:3.476471 +step:9111 train loss:3.412335 +step:9112 train loss:3.420577 +step:9113 train loss:3.418849 +step:9114 train loss:3.466622 +step:9115 train loss:3.448522 +step:9116 train loss:3.432477 +step:9117 train loss:3.494820 +step:9118 train loss:3.470665 +step:9119 train loss:3.429227 +step:9120 train loss:3.416093 +step:9121 train loss:3.477350 +step:9122 train loss:3.416044 +step:9123 train loss:3.421495 +step:9124 train loss:3.446659 +step:9125 train loss:3.435962 +step:9126 train loss:3.403123 +step:9127 train loss:3.421000 +step:9128 train loss:3.387605 +step:9129 train loss:3.463905 +step:9130 train loss:3.441098 +step:9131 train loss:3.432607 +step:9132 train loss:3.440870 +step:9133 train loss:3.458802 +step:9134 train loss:3.415396 +step:9135 train loss:3.553212 +step:9136 train loss:3.431504 +step:9137 train loss:3.437329 +step:9138 train loss:3.466821 +step:9139 train loss:3.397373 +step:9140 train loss:3.457618 +step:9141 train loss:3.380638 +step:9142 train loss:3.434544 +step:9143 train loss:3.446363 +step:9144 train loss:3.448392 +step:9145 train loss:3.401030 +step:9146 train loss:3.513966 +step:9147 train loss:3.455897 +step:9148 train loss:3.466240 +step:9149 train loss:3.474229 +step:9150 train loss:3.400064 +step:9151 train loss:3.430426 +step:9152 train loss:3.400152 +step:9153 train loss:3.502519 +step:9154 train loss:3.464217 +step:9155 train loss:3.443908 +step:9156 train loss:3.484585 +step:9157 train loss:3.456888 +step:9158 train loss:3.572470 +step:9159 train loss:3.379088 +step:9160 train loss:3.466658 +step:9161 train loss:3.426220 +step:9162 train loss:3.458387 +step:9163 train loss:3.380776 +step:9164 train loss:3.432922 +step:9165 train loss:3.475220 +step:9166 train loss:3.445486 +step:9167 train loss:3.489016 +step:9168 train loss:3.426920 +step:9169 train loss:3.394425 +step:9170 train loss:3.484568 +step:9171 train loss:3.427336 +step:9172 train loss:3.497200 +step:9173 train loss:3.446061 +step:9174 train loss:3.465906 +step:9175 train loss:3.420954 +step:9176 train loss:3.460810 +step:9177 train loss:3.463824 +step:9178 train loss:3.409876 +step:9179 train loss:3.421101 +step:9180 train loss:3.469103 +step:9181 train loss:3.434481 +step:9182 train loss:3.447854 +step:9183 train loss:3.412683 +step:9184 train loss:3.485501 +step:9185 train loss:3.412984 +step:9186 train loss:3.411034 +step:9187 train loss:3.424581 +step:9188 train loss:3.372788 +step:9189 train loss:3.408990 +step:9190 train loss:3.401131 +step:9191 train loss:3.421779 +step:9192 train loss:3.383729 +step:9193 train loss:3.438451 +step:9194 train loss:3.410697 +step:9195 train loss:3.402830 +step:9196 train loss:3.505672 +step:9197 train loss:3.502368 +step:9198 train loss:3.428230 +step:9199 train loss:3.433986 +step:9200 train loss:3.448574 +step:9201 train loss:3.404948 +step:9202 train loss:3.401592 +step:9203 train loss:3.407421 +step:9204 train loss:3.436000 +step:9205 train loss:3.397605 +step:9206 train loss:3.470700 +step:9207 train loss:3.409714 +step:9208 train loss:3.473785 +step:9209 train loss:3.460523 +step:9210 train loss:3.424164 +step:9211 train loss:3.469952 +step:9212 train loss:3.442673 +step:9213 train loss:3.424226 +step:9214 train loss:3.438773 +step:9215 train loss:3.391287 +step:9216 train loss:3.389949 +step:9217 train loss:3.417280 +step:9218 train loss:3.408553 +step:9219 train loss:3.398394 +step:9220 train loss:3.460577 +step:9221 train loss:3.458615 +step:9222 train loss:3.432148 +step:9223 train loss:3.468761 +step:9224 train loss:3.357857 +step:9225 train loss:3.387003 +step:9226 train loss:3.378339 +step:9227 train loss:3.392915 +step:9228 train loss:3.448819 +step:9229 train loss:3.460290 +step:9230 train loss:3.454877 +step:9231 train loss:3.414568 +step:9232 train loss:3.454197 +step:9233 train loss:3.416502 +step:9234 train loss:3.457474 +step:9235 train loss:3.410803 +step:9236 train loss:3.435231 +step:9237 train loss:3.452200 +step:9238 train loss:3.472958 +step:9239 train loss:3.424417 +step:9240 train loss:3.470199 +step:9241 train loss:3.427314 +step:9242 train loss:3.477031 +step:9243 train loss:3.424224 +step:9244 train loss:3.407547 +step:9245 train loss:3.460813 +step:9246 train loss:3.443931 +step:9247 train loss:3.438193 +step:9248 train loss:3.357242 +step:9249 train loss:3.457792 +step:9250 validation loss:3.376177 +step:9250 train loss:3.485317 +step:9251 train loss:3.428543 +step:9252 train loss:3.463976 +step:9253 train loss:3.414535 +step:9254 train loss:3.508115 +step:9255 train loss:3.441498 +step:9256 train loss:3.523464 +step:9257 train loss:3.514740 +step:9258 train loss:3.367156 +step:9259 train loss:3.405996 +step:9260 train loss:3.506167 +step:9261 train loss:3.413814 +step:9262 train loss:3.334413 +step:9263 train loss:3.329753 +step:9264 train loss:3.427089 +step:9265 train loss:3.368508 +step:9266 train loss:3.424370 +step:9267 train loss:3.463825 +step:9268 train loss:3.460234 +step:9269 train loss:3.446854 +step:9270 train loss:3.501247 +step:9271 train loss:3.405990 +step:9272 train loss:3.473795 +step:9273 train loss:3.437863 +step:9274 train loss:3.390194 +step:9275 train loss:3.411327 +step:9276 train loss:3.432509 +step:9277 train loss:3.454921 +step:9278 train loss:3.378649 +step:9279 train loss:3.472037 +step:9280 train loss:3.463129 +step:9281 train loss:3.417796 +step:9282 train loss:3.445915 +step:9283 train loss:3.466044 +step:9284 train loss:3.454535 +step:9285 train loss:3.419811 +step:9286 train loss:3.458115 +step:9287 train loss:3.382713 +step:9288 train loss:3.444074 +step:9289 train loss:3.449051 +step:9290 train loss:3.380436 +step:9291 train loss:3.405065 +step:9292 train loss:3.444888 +step:9293 train loss:3.468527 +step:9294 train loss:3.377217 +step:9295 train loss:3.451080 +step:9296 train loss:3.436181 +step:9297 train loss:3.406416 +step:9298 train loss:3.374308 +step:9299 train loss:3.370843 +step:9300 train loss:3.415837 +step:9301 train loss:3.393190 +step:9302 train loss:3.413858 +step:9303 train loss:3.402678 +step:9304 train loss:3.428030 +step:9305 train loss:3.420369 +step:9306 train loss:3.439058 +step:9307 train loss:3.501431 +step:9308 train loss:3.417917 +step:9309 train loss:3.382659 +step:9310 train loss:3.436048 +step:9311 train loss:3.434609 +step:9312 train loss:3.416146 +step:9313 train loss:3.443715 +step:9314 train loss:3.461904 +step:9315 train loss:3.439100 +step:9316 train loss:3.428547 +step:9317 train loss:3.432534 +step:9318 train loss:3.418919 +step:9319 train loss:3.433004 +step:9320 train loss:3.455804 +step:9321 train loss:3.492645 +step:9322 train loss:3.374643 +step:9323 train loss:3.410254 +step:9324 train loss:3.410066 +step:9325 train loss:3.371407 +step:9326 train loss:3.494466 +step:9327 train loss:3.409307 +step:9328 train loss:3.382860 +step:9329 train loss:3.418929 +step:9330 train loss:3.482939 +step:9331 train loss:3.423767 +step:9332 train loss:3.434454 +step:9333 train loss:3.422734 +step:9334 train loss:3.409436 +step:9335 train loss:3.412675 +step:9336 train loss:3.416964 +step:9337 train loss:3.418807 +step:9338 train loss:3.480870 +step:9339 train loss:3.432858 +step:9340 train loss:3.402358 +step:9341 train loss:3.576059 +step:9342 train loss:3.385885 +step:9343 train loss:3.357728 +step:9344 train loss:3.470127 +step:9345 train loss:3.402704 +step:9346 train loss:3.418222 +step:9347 train loss:3.444805 +step:9348 train loss:3.427044 +step:9349 train loss:3.408540 +step:9350 train loss:3.481702 +step:9351 train loss:3.458496 +step:9352 train loss:3.426578 +step:9353 train loss:3.460965 +step:9354 train loss:3.448761 +step:9355 train loss:3.401803 +step:9356 train loss:3.419616 +step:9357 train loss:3.411652 +step:9358 train loss:3.474609 +step:9359 train loss:3.409595 +step:9360 train loss:3.443604 +step:9361 train loss:3.402041 +step:9362 train loss:3.406282 +step:9363 train loss:3.421257 +step:9364 train loss:3.375837 +step:9365 train loss:3.397514 +step:9366 train loss:3.483243 +step:9367 train loss:3.388829 +step:9368 train loss:3.441361 +step:9369 train loss:3.425127 +step:9370 train loss:3.457497 +step:9371 train loss:3.423801 +step:9372 train loss:3.390339 +step:9373 train loss:3.436996 +step:9374 train loss:3.377522 +step:9375 train loss:3.410781 +step:9376 train loss:3.393183 +step:9377 train loss:3.495645 +step:9378 train loss:3.409789 +step:9379 train loss:3.387784 +step:9380 train loss:3.415115 +step:9381 train loss:3.377399 +step:9382 train loss:3.382638 +step:9383 train loss:3.470840 +step:9384 train loss:3.389695 +step:9385 train loss:3.495999 +step:9386 train loss:3.362544 +step:9387 train loss:3.396715 +step:9388 train loss:3.426090 +step:9389 train loss:3.390996 +step:9390 train loss:3.388898 +step:9391 train loss:3.407827 +step:9392 train loss:3.435984 +step:9393 train loss:3.451476 +step:9394 train loss:3.506871 +step:9395 train loss:3.520931 +step:9396 train loss:3.435556 +step:9397 train loss:3.396072 +step:9398 train loss:3.449294 +step:9399 train loss:3.438918 +step:9400 train loss:3.381528 +step:9401 train loss:3.403972 +step:9402 train loss:3.393667 +step:9403 train loss:3.382811 +step:9404 train loss:3.414632 +step:9405 train loss:3.439688 +step:9406 train loss:3.382172 +step:9407 train loss:3.432499 +step:9408 train loss:3.441061 +step:9409 train loss:3.435805 +step:9410 train loss:3.381382 +step:9411 train loss:3.433270 +step:9412 train loss:3.424160 +step:9413 train loss:3.489375 +step:9414 train loss:3.550716 +step:9415 train loss:3.406725 +step:9416 train loss:3.422807 +step:9417 train loss:3.387423 +step:9418 train loss:3.288522 +step:9419 train loss:3.425579 +step:9420 train loss:3.467959 +step:9421 train loss:3.433246 +step:9422 train loss:3.483691 +step:9423 train loss:3.479869 +step:9424 train loss:3.449563 +step:9425 train loss:3.394978 +step:9426 train loss:3.421172 +step:9427 train loss:3.456375 +step:9428 train loss:3.431179 +step:9429 train loss:3.408659 +step:9430 train loss:3.420215 +step:9431 train loss:3.447629 +step:9432 train loss:3.380921 +step:9433 train loss:3.444703 +step:9434 train loss:3.370419 +step:9435 train loss:3.446037 +step:9436 train loss:3.417520 +step:9437 train loss:3.359195 +step:9438 train loss:3.480254 +step:9439 train loss:3.435730 +step:9440 train loss:3.414884 +step:9441 train loss:3.434206 +step:9442 train loss:3.454386 +step:9443 train loss:3.381596 +step:9444 train loss:3.367302 +step:9445 train loss:3.465046 +step:9446 train loss:3.419540 +step:9447 train loss:3.410405 +step:9448 train loss:3.409023 +step:9449 train loss:3.403407 +step:9450 train loss:3.414671 +step:9451 train loss:3.391609 +step:9452 train loss:3.404027 +step:9453 train loss:3.404489 +step:9454 train loss:3.343181 +step:9455 train loss:3.360706 +step:9456 train loss:3.430309 +step:9457 train loss:3.422078 +step:9458 train loss:3.397532 +step:9459 train loss:3.380894 +step:9460 train loss:3.418754 +step:9461 train loss:3.388459 +step:9462 train loss:3.451582 +step:9463 train loss:3.445710 +step:9464 train loss:3.343686 +step:9465 train loss:3.438371 +step:9466 train loss:3.380014 +step:9467 train loss:3.426275 +step:9468 train loss:3.445831 +step:9469 train loss:3.394228 +step:9470 train loss:3.347634 +step:9471 train loss:3.454569 +step:9472 train loss:3.362742 +step:9473 train loss:3.393234 +step:9474 train loss:3.390961 +step:9475 train loss:3.390162 +step:9476 train loss:3.342178 +step:9477 train loss:3.359532 +step:9478 train loss:3.376529 +step:9479 train loss:3.377331 +step:9480 train loss:3.391898 +step:9481 train loss:3.383868 +step:9482 train loss:3.535480 +step:9483 train loss:3.403660 +step:9484 train loss:3.507887 +step:9485 train loss:3.417362 +step:9486 train loss:3.369040 +step:9487 train loss:3.365296 +step:9488 train loss:3.416186 +step:9489 train loss:3.358760 +step:9490 train loss:3.396579 +step:9491 train loss:3.475371 +step:9492 train loss:3.425542 +step:9493 train loss:3.458312 +step:9494 train loss:3.339669 +step:9495 train loss:3.351712 +step:9496 train loss:3.355615 +step:9497 train loss:3.403289 +step:9498 train loss:3.340359 +step:9499 train loss:3.444658 +step:9500 validation loss:3.365556 total_sharp:6.7361e-03 L1_sharp:3.5507e-03 L2_sharp:3.4638e-04 L3_sharp:1.1841e-03 L4_sharp:1.1667e-03 L5_sharp:8.2320e-04 L6_sharp:1.4581e-03 L7_sharp:1.7694e-03 L8_sharp:3.1990e-03 L9_sharp:2.7121e-03 L10_sharp:1.3620e-03 L11_sharp:1.4573e-03 L12_sharp:3.2198e-03 total_fnorm:5.9714e-01 total_l1_linf:5.3466e+03 total_spectral:5.9714e-01 L1_fnorm:1.4101e-01 L2_fnorm:1.4072e-01 L3_fnorm:1.4087e-01 L4_fnorm:1.3978e-01 L5_fnorm:1.3750e-01 L6_fnorm:1.3970e-01 L7_fnorm:1.4022e-01 L8_fnorm:1.4013e-01 L9_fnorm:1.4260e-01 L10_fnorm:1.4565e-01 L11_fnorm:1.4687e-01 L12_fnorm:1.4610e-01 L1_l1linf:1.8089e-01 L2_l1linf:1.5502e-01 L3_l1linf:1.7474e-01 L4_l1linf:1.7889e-01 L5_l1linf:1.5873e-01 L6_l1linf:1.8532e-01 L7_l1linf:1.7176e-01 L8_l1linf:1.7850e-01 L9_l1linf:1.5799e-01 L10_l1linf:1.5381e-01 L11_l1linf:1.6430e-01 L12_l1linf:1.6723e-01 L1_spectral:2.5124e-02 L2_spectral:2.0953e-02 L3_spectral:2.4053e-02 L4_spectral:2.4379e-02 L5_spectral:2.3165e-02 L6_spectral:2.5993e-02 L7_spectral:2.4745e-02 L8_spectral:2.5115e-02 L9_spectral:2.0874e-02 L10_spectral:1.5017e-02 L11_spectral:1.5068e-02 L12_spectral:2.0626e-02 v_norm:5.9714e-01 cos_v_-g_hvp:6.2305e-02 g_hvp_norm:3.3946e-01 cos_v_-g_t:9.0007e-02 g_t_norm:2.3684e-01 hv_norm:1.6577e-01 cos_v_hv:2.4264e-02 hg_norm:9.0361e+00 cos_g_hg:5.7087e-01 v_par:2.2308e-03 v_perp:5.9714e-01 L1_cos_v_neg_g:1.0908e-01 L1_v_norm:1.4101e-01 L2_cos_v_neg_g:4.7598e-02 L2_v_norm:1.4072e-01 L3_cos_v_neg_g:4.7953e-02 L3_v_norm:1.4087e-01 L4_cos_v_neg_g:5.3957e-02 L4_v_norm:1.3978e-01 L5_cos_v_neg_g:3.4122e-02 L5_v_norm:1.3750e-01 L6_cos_v_neg_g:4.6750e-02 L6_v_norm:1.3970e-01 L7_cos_v_neg_g:5.5652e-02 L7_v_norm:1.4022e-01 L8_cos_v_neg_g:5.4959e-02 L8_v_norm:1.4013e-01 L9_cos_v_neg_g:5.7209e-02 L9_v_norm:1.4260e-01 L10_cos_v_neg_g:6.7334e-02 L10_v_norm:1.4565e-01 L11_cos_v_neg_g:7.9051e-02 L11_v_norm:1.4687e-01 L12_cos_v_neg_g:1.2332e-01 L12_v_norm:1.4610e-01 +step:9500 train loss:3.397503 +step:9501 train loss:3.453794 +step:9502 train loss:3.404042 +step:9503 train loss:3.469704 +step:9504 train loss:3.367387 +step:9505 train loss:3.367882 +step:9506 train loss:3.432041 +step:9507 train loss:3.416258 +step:9508 train loss:3.398643 +step:9509 train loss:3.446550 +step:9510 train loss:3.480135 +step:9511 train loss:3.354572 +step:9512 train loss:3.432696 +step:9513 train loss:3.412769 +step:9514 train loss:3.474643 +step:9515 train loss:3.374963 +step:9516 train loss:3.283036 +step:9517 train loss:3.353959 +step:9518 train loss:3.379235 +step:9519 train loss:3.384438 +step:9520 train loss:3.304023 +step:9521 train loss:3.397327 +step:9522 train loss:3.422675 +step:9523 train loss:3.358090 +step:9524 train loss:3.420838 +step:9525 train loss:3.407924 +step:9526 train loss:3.361354 +step:9527 train loss:3.351829 +step:9528 train loss:3.440604 +step:9529 train loss:3.339991 +step:9530 train loss:3.397909 +step:9531 train loss:3.431509 +step:9532 train loss:3.416823 +step:9533 train loss:3.401980 +step:9534 train loss:3.431214 +step:9535 train loss:3.365416 +step:9536 train loss:3.380621 +step:9537 train loss:3.468243 +step:9538 train loss:3.458743 +step:9539 train loss:3.371001 +step:9540 train loss:3.539445 +step:9541 train loss:3.367700 +step:9542 train loss:3.364516 +step:9543 train loss:3.361102 +step:9544 train loss:3.368774 +step:9545 train loss:3.333465 +step:9546 train loss:3.366752 +step:9547 train loss:3.488636 +step:9548 train loss:3.414312 +step:9549 train loss:3.407253 +step:9550 train loss:3.436586 +step:9551 train loss:3.344839 +step:9552 train loss:3.397023 +step:9553 train loss:3.427392 +step:9554 train loss:3.397802 +step:9555 train loss:3.342883 +step:9556 train loss:3.421636 +step:9557 train loss:3.393752 +step:9558 train loss:3.404630 +step:9559 train loss:3.400175 +step:9560 train loss:3.507364 +step:9561 train loss:3.386233 +step:9562 train loss:3.460920 +step:9563 train loss:3.599850 +step:9564 train loss:3.400572 +step:9565 train loss:3.387853 +step:9566 train loss:3.427035 +step:9567 train loss:3.367061 +step:9568 train loss:3.433235 +step:9569 train loss:3.421271 +step:9570 train loss:3.443330 +step:9571 train loss:3.420149 +step:9572 train loss:3.359964 +step:9573 train loss:3.517726 +step:9574 train loss:3.357418 +step:9575 train loss:3.432366 +step:9576 train loss:3.380623 +step:9577 train loss:3.375116 +step:9578 train loss:3.415343 +step:9579 train loss:3.406248 +step:9580 train loss:3.398659 +step:9581 train loss:3.435237 +step:9582 train loss:3.355601 +step:9583 train loss:3.483906 +step:9584 train loss:3.378078 +step:9585 train loss:3.403251 +step:9586 train loss:3.445719 +step:9587 train loss:3.425386 +step:9588 train loss:3.435467 +step:9589 train loss:3.412549 +step:9590 train loss:3.390016 +step:9591 train loss:3.357727 +step:9592 train loss:3.358821 +step:9593 train loss:3.405527 +step:9594 train loss:3.335525 +step:9595 train loss:3.389545 +step:9596 train loss:3.404859 +step:9597 train loss:3.389120 +step:9598 train loss:3.330658 +step:9599 train loss:3.340443 +step:9600 train loss:3.409526 +step:9601 train loss:3.378252 +step:9602 train loss:3.328129 +step:9603 train loss:3.444146 +step:9604 train loss:3.387896 +step:9605 train loss:3.388864 +step:9606 train loss:3.516462 +step:9607 train loss:3.401464 +step:9608 train loss:3.366624 +step:9609 train loss:3.474421 +step:9610 train loss:3.394886 +step:9611 train loss:3.343168 +step:9612 train loss:3.423084 +step:9613 train loss:3.412663 +step:9614 train loss:3.413544 +step:9615 train loss:3.350272 +step:9616 train loss:3.430320 +step:9617 train loss:3.385655 +step:9618 train loss:3.358031 +step:9619 train loss:3.339584 +step:9620 train loss:3.387499 +step:9621 train loss:3.382021 +step:9622 train loss:3.446105 +step:9623 train loss:3.377868 +step:9624 train loss:3.454961 +step:9625 train loss:3.350647 +step:9626 train loss:3.356285 +step:9627 train loss:3.407586 +step:9628 train loss:3.430733 +step:9629 train loss:3.484211 +step:9630 train loss:3.419358 +step:9631 train loss:3.427070 +step:9632 train loss:3.403212 +step:9633 train loss:3.375493 +step:9634 train loss:3.454097 +step:9635 train loss:3.419907 +step:9636 train loss:3.430711 +step:9637 train loss:3.397534 +step:9638 train loss:3.384868 +step:9639 train loss:3.405447 +step:9640 train loss:3.413237 +step:9641 train loss:3.356920 +step:9642 train loss:3.422351 +step:9643 train loss:3.379799 +step:9644 train loss:3.433193 +step:9645 train loss:3.422637 +step:9646 train loss:3.433643 +step:9647 train loss:3.399864 +step:9648 train loss:3.409821 +step:9649 train loss:3.424384 +step:9650 train loss:3.394554 +step:9651 train loss:3.404391 +step:9652 train loss:3.409221 +step:9653 train loss:3.429010 +step:9654 train loss:3.434320 +step:9655 train loss:3.340275 +step:9656 train loss:3.368007 +step:9657 train loss:3.367057 +step:9658 train loss:3.416725 +step:9659 train loss:3.418403 +step:9660 train loss:3.305463 +step:9661 train loss:3.517173 +step:9662 train loss:3.347710 +step:9663 train loss:3.409914 +step:9664 train loss:3.453425 +step:9665 train loss:3.344207 +step:9666 train loss:3.510434 +step:9667 train loss:3.411397 +step:9668 train loss:3.509459 +step:9669 train loss:3.388696 +step:9670 train loss:3.418154 +step:9671 train loss:3.379810 +step:9672 train loss:3.475648 +step:9673 train loss:3.392924 +step:9674 train loss:3.478056 +step:9675 train loss:3.424154 +step:9676 train loss:3.421201 +step:9677 train loss:3.448059 +step:9678 train loss:3.376539 +step:9679 train loss:3.414257 +step:9680 train loss:3.381572 +step:9681 train loss:3.397631 +step:9682 train loss:3.384173 +step:9683 train loss:3.419816 +step:9684 train loss:3.372158 +step:9685 train loss:3.445243 +step:9686 train loss:3.414655 +step:9687 train loss:3.394507 +step:9688 train loss:3.392210 +step:9689 train loss:3.360496 +step:9690 train loss:3.409392 +step:9691 train loss:3.401029 +step:9692 train loss:3.428405 +step:9693 train loss:3.398546 +step:9694 train loss:3.420311 +step:9695 train loss:3.466114 +step:9696 train loss:3.439872 +step:9697 train loss:3.445836 +step:9698 train loss:3.392040 +step:9699 train loss:3.409793 +step:9700 train loss:3.377481 +step:9701 train loss:3.378670 +step:9702 train loss:3.382968 +step:9703 train loss:3.376024 +step:9704 train loss:3.437732 +step:9705 train loss:3.432318 +step:9706 train loss:3.379640 +step:9707 train loss:3.384213 +step:9708 train loss:3.462719 +step:9709 train loss:3.417193 +step:9710 train loss:3.422683 +step:9711 train loss:3.407444 +step:9712 train loss:3.561474 +step:9713 train loss:3.420619 +step:9714 train loss:3.403610 +step:9715 train loss:3.368531 +step:9716 train loss:3.387224 +step:9717 train loss:3.379698 +step:9718 train loss:3.440982 +step:9719 train loss:3.402267 +step:9720 train loss:3.460796 +step:9721 train loss:3.420890 +step:9722 train loss:3.375242 +step:9723 train loss:3.365508 +step:9724 train loss:3.429110 +step:9725 train loss:3.431123 +step:9726 train loss:3.441122 +step:9727 train loss:3.396612 +step:9728 train loss:3.425088 +step:9729 train loss:3.382056 +step:9730 train loss:3.404630 +step:9731 train loss:3.401608 +step:9732 train loss:3.366611 +step:9733 train loss:3.456289 +step:9734 train loss:3.386033 +step:9735 train loss:3.443703 +step:9736 train loss:3.457585 +step:9737 train loss:3.369846 +step:9738 train loss:3.450414 +step:9739 train loss:3.399178 +step:9740 train loss:3.386475 +step:9741 train loss:3.455774 +step:9742 train loss:3.353148 +step:9743 train loss:3.403130 +step:9744 train loss:3.404619 +step:9745 train loss:3.376515 +step:9746 train loss:3.380358 +step:9747 train loss:3.375708 +step:9748 train loss:3.427666 +step:9749 train loss:3.354353 +step:9750 validation loss:3.352724 +step:9750 train loss:3.381325 +step:9751 train loss:3.455149 +step:9752 train loss:3.404194 +step:9753 train loss:3.380032 +step:9754 train loss:3.410090 +step:9755 train loss:3.339622 +step:9756 train loss:3.403416 +step:9757 train loss:3.363688 +step:9758 train loss:3.467345 +step:9759 train loss:3.402537 +step:9760 train loss:3.419524 +step:9761 train loss:3.412712 +step:9762 train loss:3.433441 +step:9763 train loss:3.421182 +step:9764 train loss:3.394165 +step:9765 train loss:3.418886 +step:9766 train loss:3.405439 +step:9767 train loss:3.361019 +step:9768 train loss:3.454038 +step:9769 train loss:3.373897 +step:9770 train loss:3.344502 +step:9771 train loss:3.418949 +step:9772 train loss:3.364557 +step:9773 train loss:3.409858 +step:9774 train loss:3.472477 +step:9775 train loss:3.412171 +step:9776 train loss:3.491222 +step:9777 train loss:3.348449 +step:9778 train loss:3.405440 +step:9779 train loss:3.406159 +step:9780 train loss:3.421344 +step:9781 train loss:3.416551 +step:9782 train loss:3.381305 +step:9783 train loss:3.433266 +step:9784 train loss:3.342540 +step:9785 train loss:3.373102 +step:9786 train loss:3.407571 +step:9787 train loss:3.417212 +step:9788 train loss:3.410412 +step:9789 train loss:3.419539 +step:9790 train loss:3.377161 +step:9791 train loss:3.424006 +step:9792 train loss:3.366311 +step:9793 train loss:3.410555 +step:9794 train loss:3.459481 +step:9795 train loss:3.439994 +step:9796 train loss:3.424972 +step:9797 train loss:3.333910 +step:9798 train loss:3.374138 +step:9799 train loss:3.462453 +step:9800 train loss:3.474825 +step:9801 train loss:3.400474 +step:9802 train loss:3.351099 +step:9803 train loss:3.372665 +step:9804 train loss:3.396986 +step:9805 train loss:3.365096 +step:9806 train loss:3.402080 +step:9807 train loss:3.398913 +step:9808 train loss:3.297873 +step:9809 train loss:3.372327 +step:9810 train loss:3.375701 +step:9811 train loss:3.426142 +step:9812 train loss:3.425144 +step:9813 train loss:3.413042 +step:9814 train loss:3.418330 +step:9815 train loss:3.397372 +step:9816 train loss:3.368526 +step:9817 train loss:3.359692 +step:9818 train loss:3.357065 +step:9819 train loss:3.436379 +step:9820 train loss:3.447408 +step:9821 train loss:3.353932 +step:9822 train loss:3.362664 +step:9823 train loss:3.418805 +step:9824 train loss:3.347285 +step:9825 train loss:3.429029 +step:9826 train loss:3.415466 +step:9827 train loss:3.390744 +step:9828 train loss:3.436223 +step:9829 train loss:3.422601 +step:9830 train loss:3.377693 +step:9831 train loss:3.341604 +step:9832 train loss:3.443261 +step:9833 train loss:3.358566 +step:9834 train loss:3.430373 +step:9835 train loss:3.407200 +step:9836 train loss:3.392020 +step:9837 train loss:3.376045 +step:9838 train loss:3.325747 +step:9839 train loss:3.386681 +step:9840 train loss:3.353169 +step:9841 train loss:3.346466 +step:9842 train loss:3.376035 +step:9843 train loss:3.405468 +step:9844 train loss:3.444215 +step:9845 train loss:3.344583 +step:9846 train loss:3.441966 +step:9847 train loss:3.412225 +step:9848 train loss:3.365876 +step:9849 train loss:3.399308 +step:9850 train loss:3.418983 +step:9851 train loss:3.422653 +step:9852 train loss:3.389398 +step:9853 train loss:3.421412 +step:9854 train loss:3.414085 +step:9855 train loss:3.383014 +step:9856 train loss:3.483914 +step:9857 train loss:3.463371 +step:9858 train loss:3.482155 +step:9859 train loss:3.376022 +step:9860 train loss:3.407790 +step:9861 train loss:3.482559 +step:9862 train loss:3.357271 +step:9863 train loss:3.334765 +step:9864 train loss:3.399679 +step:9865 train loss:3.379814 +step:9866 train loss:3.374130 +step:9867 train loss:3.420038 +step:9868 train loss:3.444338 +step:9869 train loss:3.433009 +step:9870 train loss:3.446735 +step:9871 train loss:3.411389 +step:9872 train loss:3.396673 +step:9873 train loss:3.420216 +step:9874 train loss:3.416377 +step:9875 train loss:3.463533 +step:9876 train loss:3.428411 +step:9877 train loss:3.419036 +step:9878 train loss:3.487370 +step:9879 train loss:3.422164 +step:9880 train loss:3.330631 +step:9881 train loss:3.394535 +step:9882 train loss:3.297273 +step:9883 train loss:3.433728 +step:9884 train loss:3.383854 +step:9885 train loss:3.418144 +step:9886 train loss:3.382377 +step:9887 train loss:3.405032 +step:9888 train loss:3.535661 +step:9889 train loss:3.382286 +step:9890 train loss:3.388224 +step:9891 train loss:3.426246 +step:9892 train loss:3.420020 +step:9893 train loss:3.380924 +step:9894 train loss:3.495780 +step:9895 train loss:3.410718 +step:9896 train loss:3.367613 +step:9897 train loss:3.364476 +step:9898 train loss:3.353904 +step:9899 train loss:3.357002 +step:9900 train loss:3.332225 +step:9901 train loss:3.359767 +step:9902 train loss:3.421880 +step:9903 train loss:3.400943 +step:9904 train loss:3.331595 +step:9905 train loss:3.433350 +step:9906 train loss:3.428990 +step:9907 train loss:3.321546 +step:9908 train loss:3.410205 +step:9909 train loss:3.379487 +step:9910 train loss:3.365734 +step:9911 train loss:3.378672 +step:9912 train loss:3.403032 +step:9913 train loss:3.372658 +step:9914 train loss:3.358782 +step:9915 train loss:3.388053 +step:9916 train loss:3.420429 +step:9917 train loss:3.333550 +step:9918 train loss:3.372714 +step:9919 train loss:3.398015 +step:9920 train loss:3.377226 +step:9921 train loss:3.416315 +step:9922 train loss:3.401096 +step:9923 train loss:3.440789 +step:9924 train loss:3.412018 +step:9925 train loss:3.426096 +step:9926 train loss:3.386119 +step:9927 train loss:3.410116 +step:9928 train loss:3.400128 +step:9929 train loss:3.428877 +step:9930 train loss:3.420776 +step:9931 train loss:3.410664 +step:9932 train loss:3.414159 +step:9933 train loss:3.390052 +step:9934 train loss:3.421258 +step:9935 train loss:3.434524 +step:9936 train loss:3.380849 +step:9937 train loss:3.386826 +step:9938 train loss:3.410789 +step:9939 train loss:3.507973 +step:9940 train loss:3.359782 +step:9941 train loss:3.396746 +step:9942 train loss:3.362790 +step:9943 train loss:3.427707 +step:9944 train loss:3.474872 +step:9945 train loss:3.465599 +step:9946 train loss:3.458895 +step:9947 train loss:3.370911 +step:9948 train loss:3.368095 +step:9949 train loss:3.412077 +step:9950 train loss:3.380108 +step:9951 train loss:3.384836 +step:9952 train loss:3.406534 +step:9953 train loss:3.440795 +step:9954 train loss:3.377646 +step:9955 train loss:3.404742 +step:9956 train loss:3.401713 +step:9957 train loss:3.376434 +step:9958 train loss:3.367515 +step:9959 train loss:3.338693 +step:9960 train loss:3.411444 +step:9961 train loss:3.450823 +step:9962 train loss:3.312748 +step:9963 train loss:3.395901 +step:9964 train loss:3.386386 +step:9965 train loss:3.367707 +step:9966 train loss:3.407094 +step:9967 train loss:3.394685 +step:9968 train loss:3.418985 +step:9969 train loss:3.391646 +step:9970 train loss:3.370337 +step:9971 train loss:3.405332 +step:9972 train loss:3.385575 +step:9973 train loss:3.366962 +step:9974 train loss:3.427722 +step:9975 train loss:3.441895 +step:9976 train loss:3.377637 +step:9977 train loss:3.368035 +step:9978 train loss:3.406434 +step:9979 train loss:3.410979 +step:9980 train loss:3.394671 +step:9981 train loss:3.419467 +step:9982 train loss:3.384080 +step:9983 train loss:3.435839 +step:9984 train loss:3.363858 +step:9985 train loss:3.379888 +step:9986 train loss:3.405906 +step:9987 train loss:3.428254 +step:9988 train loss:3.389890 +step:9989 train loss:3.403126 +step:9990 train loss:3.444554 +step:9991 train loss:3.555154 +step:9992 train loss:3.411117 +step:9993 train loss:3.377869 +step:9994 train loss:3.354326 +step:9995 train loss:3.414570 +step:9996 train loss:3.342729 +step:9997 train loss:3.374986 +step:9998 train loss:3.356479 +step:9999 train loss:3.400607 +step:10000 validation loss:3.344615 total_sharp:1.7555e-02 L1_sharp:8.8401e-03 L2_sharp:2.5425e-03 L3_sharp:6.3135e-03 L4_sharp:5.3709e-03 L5_sharp:1.9621e-03 L6_sharp:3.5547e-03 L7_sharp:4.7911e-03 L8_sharp:6.8846e-03 L9_sharp:5.6462e-03 L10_sharp:2.2842e-03 L11_sharp:2.0567e-03 L12_sharp:8.0915e-03 total_fnorm:1.2664e-03 total_l1_linf:1.1361e+01 total_spectral:1.2664e-03 L1_fnorm:3.0799e-04 L2_fnorm:3.0372e-04 L3_fnorm:3.0794e-04 L4_fnorm:3.0943e-04 L5_fnorm:3.0221e-04 L6_fnorm:3.0888e-04 L7_fnorm:3.0892e-04 L8_fnorm:3.0812e-04 L9_fnorm:3.0899e-04 L10_fnorm:3.0862e-04 L11_fnorm:3.1049e-04 L12_fnorm:3.1218e-04 L1_l1linf:4.3647e-04 L2_l1linf:3.7964e-04 L3_l1linf:4.3928e-04 L4_l1linf:4.9759e-04 L5_l1linf:4.5743e-04 L6_l1linf:5.8806e-04 L7_l1linf:5.3065e-04 L8_l1linf:4.6743e-04 L9_l1linf:3.7515e-04 L10_l1linf:3.4825e-04 L11_l1linf:3.3004e-04 L12_l1linf:3.9645e-04 L1_spectral:6.2777e-05 L2_spectral:5.1118e-05 L3_spectral:6.6804e-05 L4_spectral:7.5420e-05 L5_spectral:6.8054e-05 L6_spectral:8.3001e-05 L7_spectral:7.5278e-05 L8_spectral:7.1241e-05 L9_spectral:5.2188e-05 L10_spectral:3.6020e-05 L11_spectral:3.4082e-05 L12_spectral:5.6299e-05 v_norm:1.2664e-03 cos_v_-g_hvp:6.9231e-02 g_hvp_norm:2.9346e-01 cos_v_-g_t:1.3649e-01 g_t_norm:1.5019e-01 hv_norm:6.5331e-04 cos_v_hv:3.4029e-02 hg_norm:8.0618e+00 cos_g_hg:4.0138e-01 v_par:5.1519e-06 v_perp:1.2664e-03 L1_cos_v_neg_g:1.0920e-01 L1_v_norm:3.0799e-04 L2_cos_v_neg_g:6.5749e-02 L2_v_norm:3.0372e-04 L3_cos_v_neg_g:5.0262e-02 L3_v_norm:3.0794e-04 L4_cos_v_neg_g:5.9764e-02 L4_v_norm:3.0943e-04 L5_cos_v_neg_g:3.7605e-02 L5_v_norm:3.0222e-04 L6_cos_v_neg_g:5.2250e-02 L6_v_norm:3.0888e-04 L7_cos_v_neg_g:6.0228e-02 L7_v_norm:3.0892e-04 L8_cos_v_neg_g:6.1586e-02 L8_v_norm:3.0812e-04 L9_cos_v_neg_g:6.3468e-02 L9_v_norm:3.0899e-04 L10_cos_v_neg_g:7.6917e-02 L10_v_norm:3.0862e-04 L11_cos_v_neg_g:8.8076e-02 L11_v_norm:3.1049e-04 L12_cos_v_neg_g:1.2576e-01 L12_v_norm:3.1218e-04 diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/config.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/config.json new file mode 100644 index 0000000000000000000000000000000000000000..8462f9b48dc4d5ae6455c11b490465c4bf273912 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure_qk_nonorm_no_clip/layer_wise_new_code_rand", + "model": "d12", + "batch_size": 4, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 10000.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "shuffle_files": true, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "adam", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 45, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500 + }, + "run_uuid": "b308fe66-961b-4633-be8c-c20f705665a1", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..c0e4b7c2a74e33959ea9a6e9a3323633d30138fc --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.939970850944519, + "total_l1_linf_norm": 16928.31640625, + "total_spectral_norm": 1.939970850944519, + "embed_lm_head_update_fnorm": 1.3129730224609375, + "embed_lm_head_max_l1_linf_norm": 0.33611249923706055, + "embed_lm_head_max_spectral_norm": 0.3096851408481598, + "layer_1_update_fnorm": 0.363661527633667, + "layer_1_max_l1_linf_norm": 0.5385386347770691, + "layer_1_max_spectral_norm": 0.08423361927270889, + "layer_2_update_fnorm": 0.37358078360557556, + "layer_2_max_l1_linf_norm": 0.589921236038208, + "layer_2_max_spectral_norm": 0.09968167543411255, + "layer_3_update_fnorm": 0.3834537863731384, + "layer_3_max_l1_linf_norm": 0.6292232275009155, + "layer_3_max_spectral_norm": 0.09702213108539581, + "layer_4_update_fnorm": 0.37724006175994873, + "layer_4_max_l1_linf_norm": 0.6815167665481567, + "layer_4_max_spectral_norm": 0.10565966367721558, + "layer_5_update_fnorm": 0.3589664399623871, + "layer_5_max_l1_linf_norm": 0.5800632834434509, + "layer_5_max_spectral_norm": 0.09066049754619598, + "layer_6_update_fnorm": 0.3913280963897705, + "layer_6_max_l1_linf_norm": 0.6270619630813599, + "layer_6_max_spectral_norm": 0.10057400912046432, + "layer_7_update_fnorm": 0.4059821367263794, + "layer_7_max_l1_linf_norm": 0.570649266242981, + "layer_7_max_spectral_norm": 0.08827370405197144, + "layer_8_update_fnorm": 0.4325598180294037, + "layer_8_max_l1_linf_norm": 0.4904370903968811, + "layer_8_max_spectral_norm": 0.07272043079137802, + "layer_9_update_fnorm": 0.45925986766815186, + "layer_9_max_l1_linf_norm": 0.5070055723190308, + "layer_9_max_spectral_norm": 0.07677367329597473, + "layer_10_update_fnorm": 0.47228848934173584, + "layer_10_max_l1_linf_norm": 0.5214371085166931, + "layer_10_max_spectral_norm": 0.07713590562343597, + "layer_11_update_fnorm": 0.47164788842201233, + "layer_11_max_l1_linf_norm": 0.5299468040466309, + "layer_11_max_spectral_norm": 0.08007670938968658, + "layer_12_update_fnorm": 0.4336789846420288, + "layer_12_max_l1_linf_norm": 0.5535063147544861, + "layer_12_max_spectral_norm": 0.09166011214256287, + "block0_q_update_fnorm": 0.10222884267568588, + "block0_q_max_l1_linf_norm": 0.1881609410047531, + "block0_q_max_spectral_norm": 0.04893624410033226, + "block0_k_update_fnorm": 0.10240136086940765, + "block0_k_max_l1_linf_norm": 0.21844464540481567, + "block0_k_max_spectral_norm": 0.05945724621415138, + "block0_v_update_fnorm": 0.10583195835351944, + "block0_v_max_l1_linf_norm": 0.18968147039413452, + "block0_v_max_spectral_norm": 0.06263057142496109, + "block0_o_update_fnorm": 0.1196628287434578, + "block0_o_max_l1_linf_norm": 0.13305547833442688, + "block0_o_max_spectral_norm": 0.04083957150578499, + "block0_mlp_win_update_fnorm": 0.21981562674045563, + "block0_mlp_win_max_l1_linf_norm": 0.1395653784275055, + "block0_mlp_win_max_spectral_norm": 0.06965509057044983, + "block0_mlp_wout_update_fnorm": 0.19338248670101166, + "block0_mlp_wout_max_l1_linf_norm": 0.5385386347770691, + "block0_mlp_wout_max_spectral_norm": 0.08423361927270889, + "block3_q_update_fnorm": 0.11596017330884933, + "block3_q_max_l1_linf_norm": 0.1537279188632965, + "block3_q_max_spectral_norm": 0.05018679425120354, + "block3_k_update_fnorm": 0.10910617560148239, + "block3_k_max_l1_linf_norm": 0.18202748894691467, + "block3_k_max_spectral_norm": 0.03888930380344391, + "block3_v_update_fnorm": 0.10162054002285004, + "block3_v_max_l1_linf_norm": 0.14190465211868286, + "block3_v_max_spectral_norm": 0.057325929403305054, + "block3_o_update_fnorm": 0.10919985920190811, + "block3_o_max_l1_linf_norm": 0.14972227811813354, + "block3_o_max_spectral_norm": 0.058444999158382416, + "block3_mlp_win_update_fnorm": 0.24203458428382874, + "block3_mlp_win_max_l1_linf_norm": 0.18464991450309753, + "block3_mlp_win_max_spectral_norm": 0.07522675395011902, + "block3_mlp_wout_update_fnorm": 0.18984735012054443, + "block3_mlp_wout_max_l1_linf_norm": 0.6815167665481567, + "block3_mlp_wout_max_spectral_norm": 0.10565966367721558, + "block7_q_update_fnorm": 0.1384584903717041, + "block7_q_max_l1_linf_norm": 0.16111266613006592, + "block7_q_max_spectral_norm": 0.04403933137655258, + "block7_k_update_fnorm": 0.11687174439430237, + "block7_k_max_l1_linf_norm": 0.17384153604507446, + "block7_k_max_spectral_norm": 0.0294725950807333, + "block7_v_update_fnorm": 0.10645735263824463, + "block7_v_max_l1_linf_norm": 0.17143738269805908, + "block7_v_max_spectral_norm": 0.04996442049741745, + "block7_o_update_fnorm": 0.12174665927886963, + "block7_o_max_l1_linf_norm": 0.15131105482578278, + "block7_o_max_spectral_norm": 0.051067009568214417, + "block7_mlp_win_update_fnorm": 0.26134493947029114, + "block7_mlp_win_max_l1_linf_norm": 0.20309972763061523, + "block7_mlp_win_max_spectral_norm": 0.07272043079137802, + "block7_mlp_wout_update_fnorm": 0.24438205361366272, + "block7_mlp_wout_max_l1_linf_norm": 0.4904370903968811, + "block7_mlp_wout_max_spectral_norm": 0.06562680751085281, + "block11_q_update_fnorm": 0.12836037576198578, + "block11_q_max_l1_linf_norm": 0.17870816588401794, + "block11_q_max_spectral_norm": 0.04633026197552681, + "block11_k_update_fnorm": 0.11102329194545746, + "block11_k_max_l1_linf_norm": 0.1746281087398529, + "block11_k_max_spectral_norm": 0.032360903918743134, + "block11_v_update_fnorm": 0.10665559768676758, + "block11_v_max_l1_linf_norm": 0.1689661145210266, + "block11_v_max_spectral_norm": 0.05699920654296875, + "block11_o_update_fnorm": 0.14066004753112793, + "block11_o_max_l1_linf_norm": 0.1764642745256424, + "block11_o_max_spectral_norm": 0.06814105808734894, + "block11_mlp_win_update_fnorm": 0.255070298910141, + "block11_mlp_win_max_l1_linf_norm": 0.2115408033132553, + "block11_mlp_win_max_spectral_norm": 0.08297143876552582, + "block11_mlp_wout_update_fnorm": 0.25097405910491943, + "block11_mlp_wout_max_l1_linf_norm": 0.5535063147544861, + "block11_mlp_wout_max_spectral_norm": 0.09166011214256287, + "total_sharpness": 0.09026972949504852, + "block_total_sharpness": 0.15890447795391083, + "v_norm_block": 1.428141474723816, + "v_T_H_v_block": 0.32409968972206116, + "v_norm": 1.939970850944519, + "ip_v_neg_g_hvp": 0.18989494442939758, + "cos_v_neg_g_hvp": 0.17630013823509216, + "g_hvp_norm": 0.5552205443382263, + "ip_v_neg_g_t": 0.1903989464044571, + "cos_v_neg_g_t": 0.1880854219198227, + "g_t_norm": 0.5218121409416199, + "g_norm": 0.5552205443382263, + "hv_norm": 1.2433687448501587, + "cos_v_hv": 0.1408436894416809, + "hg_norm": 7.404033660888672, + "cos_g_hg": 0.6550762057304382, + "v_parallel_norm": 0.013303905725479126, + "v_perp_norm": 1.939925193786621, + "embed_lm_head_v_norm": 1.3129730224609375, + "embed_lm_head_cos_v_neg_g": 0.09079153090715408, + "layer_1_v_norm": 0.363661527633667, + "layer_1_cos_v_neg_g": 0.3186955153942108, + "layer_2_v_norm": 0.37358078360557556, + "layer_2_cos_v_neg_g": 0.19991706311702728, + "layer_3_v_norm": 0.3834538161754608, + "layer_3_cos_v_neg_g": 0.19137796759605408, + "layer_4_v_norm": 0.37724006175994873, + "layer_4_cos_v_neg_g": 0.20674607157707214, + "layer_5_v_norm": 0.3589664399623871, + "layer_5_cos_v_neg_g": 0.21674741804599762, + "layer_6_v_norm": 0.3913281261920929, + "layer_6_cos_v_neg_g": 0.23184041678905487, + "layer_7_v_norm": 0.4059821367263794, + "layer_7_cos_v_neg_g": 0.25777652859687805, + "layer_8_v_norm": 0.4325598478317261, + "layer_8_cos_v_neg_g": 0.25372886657714844, + "layer_9_v_norm": 0.45925986766815186, + "layer_9_cos_v_neg_g": 0.2571580111980438, + "layer_10_v_norm": 0.47228848934173584, + "layer_10_cos_v_neg_g": 0.30804765224456787, + "layer_11_v_norm": 0.47164785861968994, + "layer_11_cos_v_neg_g": 0.3273189961910248, + "layer_12_v_norm": 0.4336789846420288, + "layer_12_cos_v_neg_g": 0.3584454357624054, + "block0_q_v_norm": 0.10222884267568588, + "block0_q_cos_v_neg_g": 0.38223329186439514, + "block0_k_v_norm": 0.10240136086940765, + "block0_k_cos_v_neg_g": 0.38115230202674866, + "block0_v_v_norm": 0.10583195835351944, + "block0_v_cos_v_neg_g": 0.46501439809799194, + "block0_o_v_norm": 0.1196628287434578, + "block0_o_cos_v_neg_g": 0.37263303995132446, + "block0_mlp_win_v_norm": 0.21981562674045563, + "block0_mlp_win_cos_v_neg_g": 0.4713335931301117, + "block0_mlp_wout_v_norm": 0.19338248670101166, + "block0_mlp_wout_cos_v_neg_g": 0.5000712275505066, + "block3_q_v_norm": 0.11596017330884933, + "block3_q_cos_v_neg_g": 0.1651027798652649, + "block3_k_v_norm": 0.10910617560148239, + "block3_k_cos_v_neg_g": 0.14351730048656464, + "block3_v_v_norm": 0.10162054002285004, + "block3_v_cos_v_neg_g": 0.206251323223114, + "block3_o_v_norm": 0.10919985920190811, + "block3_o_cos_v_neg_g": 0.3544827103614807, + "block3_mlp_win_v_norm": 0.24203458428382874, + "block3_mlp_win_cos_v_neg_g": 0.25522923469543457, + "block3_mlp_wout_v_norm": 0.18984735012054443, + "block3_mlp_wout_cos_v_neg_g": 0.4752828776836395, + "block7_q_v_norm": 0.1384584903717041, + "block7_q_cos_v_neg_g": 0.24302561581134796, + "block7_k_v_norm": 0.11687174439430237, + "block7_k_cos_v_neg_g": 0.3229885697364807, + "block7_v_v_norm": 0.10645735263824463, + "block7_v_cos_v_neg_g": 0.29139429330825806, + "block7_o_v_norm": 0.12174665927886963, + "block7_o_cos_v_neg_g": 0.4005172848701477, + "block7_mlp_win_v_norm": 0.26134493947029114, + "block7_mlp_win_cos_v_neg_g": 0.330689936876297, + "block7_mlp_wout_v_norm": 0.24438205361366272, + "block7_mlp_wout_cos_v_neg_g": 0.3716108798980713, + "block11_q_v_norm": 0.12836037576198578, + "block11_q_cos_v_neg_g": 0.34773069620132446, + "block11_k_v_norm": 0.11102329194545746, + "block11_k_cos_v_neg_g": 0.3572063744068146, + "block11_v_v_norm": 0.10665559768676758, + "block11_v_cos_v_neg_g": 0.4499433636665344, + "block11_o_v_norm": 0.14066004753112793, + "block11_o_cos_v_neg_g": 0.43818050622940063, + "block11_mlp_win_v_norm": 0.255070298910141, + "block11_mlp_win_cos_v_neg_g": 0.3645400404930115, + "block11_mlp_wout_v_norm": 0.25097405910491943, + "block11_mlp_wout_cos_v_neg_g": 0.38345035910606384, + "embed_lm_head_sharpness": 0.0006354370270855725, + "layer_1_sharpness": 0.1283014416694641, + "layer_2_sharpness": 0.022719454020261765, + "layer_3_sharpness": 0.023001501336693764, + "layer_4_sharpness": 0.02449166774749756, + "layer_5_sharpness": 0.031769443303346634, + "layer_6_sharpness": 0.02072393149137497, + "layer_7_sharpness": 0.019304729998111725, + "layer_8_sharpness": 0.016338003799319267, + "layer_9_sharpness": 0.011372946202754974, + "layer_10_sharpness": 0.008866697549819946, + "layer_11_sharpness": 0.007802141830325127, + "layer_12_sharpness": 0.01172545924782753, + "block0_q_sharpness": 0.00941601861268282, + "block0_k_sharpness": 0.008996347896754742, + "block0_v_sharpness": 0.09477795660495758, + "block0_o_sharpness": 0.03336845710873604, + "block0_mlp_win_sharpness": 0.018027959391474724, + "block0_mlp_wout_sharpness": 0.055376794189214706, + "block3_q_sharpness": 0.009503709152340889, + "block3_k_sharpness": 0.007394029758870602, + "block3_v_sharpness": 0.014570745639503002, + "block3_o_sharpness": 0.009042298421263695, + "block3_mlp_win_sharpness": 0.0008169420761987567, + "block3_mlp_wout_sharpness": 0.009777837432920933, + "block7_q_sharpness": 0.001072299899533391, + "block7_k_sharpness": 0.010451314970850945, + "block7_v_sharpness": 0.028758134692907333, + "block7_o_sharpness": 0.006798625458031893, + "block7_mlp_win_sharpness": 0.004653703887015581, + "block7_mlp_wout_sharpness": 0.002971567213535309, + "block11_q_sharpness": 0.00034182556555606425, + "block11_k_sharpness": 0.001563337049447, + "block11_v_sharpness": 0.010198264382779598, + "block11_o_sharpness": 0.0017380616627633572, + "block11_mlp_win_sharpness": 0.004348797257989645, + "block11_mlp_wout_sharpness": 0.005238278303295374, + "sum_layer_numerators": 0.04882967189899271, + "block_diag_sharpness": 0.02394094796566886, + "cross_layer_sharpness": 0.13496352998824196 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_10000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..2e2e68175d62102310e3c751d40fdaff28ad0fd1 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_10000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.001271562883630395, + "total_l1_linf_norm": 11.407903671264648, + "total_spectral_norm": 0.0012715626507997513, + "embed_lm_head_update_fnorm": 0.0006830021156929433, + "embed_lm_head_max_l1_linf_norm": 0.00018862300203181803, + "embed_lm_head_max_spectral_norm": 0.0001295255933655426, + "layer_1_update_fnorm": 0.0003086666692979634, + "layer_1_max_l1_linf_norm": 0.00043238079524599016, + "layer_1_max_spectral_norm": 6.455799302784726e-05, + "layer_2_update_fnorm": 0.0003044260083697736, + "layer_2_max_l1_linf_norm": 0.00039669135003350675, + "layer_2_max_spectral_norm": 5.018807860324159e-05, + "layer_3_update_fnorm": 0.00030891556525602937, + "layer_3_max_l1_linf_norm": 0.0005117307300679386, + "layer_3_max_spectral_norm": 7.260435813805088e-05, + "layer_4_update_fnorm": 0.00030975660774856806, + "layer_4_max_l1_linf_norm": 0.0005168590578250587, + "layer_4_max_spectral_norm": 8.027384319575503e-05, + "layer_5_update_fnorm": 0.00030399567913264036, + "layer_5_max_l1_linf_norm": 0.0004034590092487633, + "layer_5_max_spectral_norm": 6.72999958624132e-05, + "layer_6_update_fnorm": 0.0003117906744591892, + "layer_6_max_l1_linf_norm": 0.00048734512529335916, + "layer_6_max_spectral_norm": 8.257829176727682e-05, + "layer_7_update_fnorm": 0.00031137181213125587, + "layer_7_max_l1_linf_norm": 0.0005394779145717621, + "layer_7_max_spectral_norm": 7.645611913176253e-05, + "layer_8_update_fnorm": 0.00031089683761820197, + "layer_8_max_l1_linf_norm": 0.0004463162331376225, + "layer_8_max_spectral_norm": 7.042643119348213e-05, + "layer_9_update_fnorm": 0.00031075297738425434, + "layer_9_max_l1_linf_norm": 0.000389553839340806, + "layer_9_max_spectral_norm": 5.409062578110024e-05, + "layer_10_update_fnorm": 0.00031065059010870755, + "layer_10_max_l1_linf_norm": 0.00034254888305440545, + "layer_10_max_spectral_norm": 3.658075365819968e-05, + "layer_11_update_fnorm": 0.0003108597593382001, + "layer_11_max_l1_linf_norm": 0.00035285070771351457, + "layer_11_max_spectral_norm": 3.5295448469696566e-05, + "layer_12_update_fnorm": 0.0003132257843390107, + "layer_12_max_l1_linf_norm": 0.0004173539928160608, + "layer_12_max_spectral_norm": 5.9899819461861625e-05, + "block0_q_update_fnorm": 8.806190453469753e-05, + "block0_q_max_l1_linf_norm": 0.0001036127214320004, + "block0_q_max_spectral_norm": 2.6741716283140704e-05, + "block0_k_update_fnorm": 8.893918129615486e-05, + "block0_k_max_l1_linf_norm": 0.00010145345004275441, + "block0_k_max_spectral_norm": 2.7911040888284333e-05, + "block0_v_update_fnorm": 8.884075214155018e-05, + "block0_v_max_l1_linf_norm": 0.00010269346239510924, + "block0_v_max_spectral_norm": 2.851792123692576e-05, + "block0_o_update_fnorm": 8.787021215539426e-05, + "block0_o_max_l1_linf_norm": 8.280594920506701e-05, + "block0_o_max_spectral_norm": 1.76170051418012e-05, + "block0_mlp_win_update_fnorm": 0.00017803416994865984, + "block0_mlp_win_max_l1_linf_norm": 0.00010159550583921373, + "block0_mlp_win_max_spectral_norm": 3.7905367207713425e-05, + "block0_mlp_wout_update_fnorm": 0.0001794618583517149, + "block0_mlp_wout_max_l1_linf_norm": 0.00043238079524599016, + "block0_mlp_wout_max_spectral_norm": 6.455799302784726e-05, + "block3_q_update_fnorm": 8.703111961949617e-05, + "block3_q_max_l1_linf_norm": 9.002082515507936e-05, + "block3_q_max_spectral_norm": 2.327234687982127e-05, + "block3_k_update_fnorm": 8.614733087597415e-05, + "block3_k_max_l1_linf_norm": 0.00011966855527134612, + "block3_k_max_spectral_norm": 2.0653827959904447e-05, + "block3_v_update_fnorm": 9.094313281821087e-05, + "block3_v_max_l1_linf_norm": 0.00013066969404462725, + "block3_v_max_spectral_norm": 2.4485836547682993e-05, + "block3_o_update_fnorm": 9.062368189916015e-05, + "block3_o_max_l1_linf_norm": 0.00011253342381678522, + "block3_o_max_spectral_norm": 3.3325519325444475e-05, + "block3_mlp_win_update_fnorm": 0.00017722888151183724, + "block3_mlp_win_max_l1_linf_norm": 0.00012237625196576118, + "block3_mlp_win_max_spectral_norm": 4.1531540773576126e-05, + "block3_mlp_wout_update_fnorm": 0.00018152545089833438, + "block3_mlp_wout_max_l1_linf_norm": 0.0005168590578250587, + "block3_mlp_wout_max_spectral_norm": 8.027384319575503e-05, + "block7_q_update_fnorm": 8.719169272808358e-05, + "block7_q_max_l1_linf_norm": 8.775648893788457e-05, + "block7_q_max_spectral_norm": 1.384622464684071e-05, + "block7_k_update_fnorm": 8.764836093178019e-05, + "block7_k_max_l1_linf_norm": 9.842876897891983e-05, + "block7_k_max_spectral_norm": 1.5851315765758045e-05, + "block7_v_update_fnorm": 9.038105781655759e-05, + "block7_v_max_l1_linf_norm": 0.00011821127554867417, + "block7_v_max_spectral_norm": 1.841070843511261e-05, + "block7_o_update_fnorm": 9.040421718964353e-05, + "block7_o_max_l1_linf_norm": 8.882144175004214e-05, + "block7_o_max_spectral_norm": 1.7077691154554486e-05, + "block7_mlp_win_update_fnorm": 0.00017897316138260067, + "block7_mlp_win_max_l1_linf_norm": 0.00012781663099303842, + "block7_mlp_win_max_spectral_norm": 2.6477884603082202e-05, + "block7_mlp_wout_update_fnorm": 0.0001813679264159873, + "block7_mlp_wout_max_l1_linf_norm": 0.0004463162331376225, + "block7_mlp_wout_max_spectral_norm": 7.042643119348213e-05, + "block11_q_update_fnorm": 8.861162496032193e-05, + "block11_q_max_l1_linf_norm": 8.24279704829678e-05, + "block11_q_max_spectral_norm": 1.3969385690870695e-05, + "block11_k_update_fnorm": 8.949190669227391e-05, + "block11_k_max_l1_linf_norm": 9.595268056727946e-05, + "block11_k_max_spectral_norm": 1.667554897721857e-05, + "block11_v_update_fnorm": 9.074083209270611e-05, + "block11_v_max_l1_linf_norm": 0.00010920439672190696, + "block11_v_max_spectral_norm": 2.1333955373847857e-05, + "block11_o_update_fnorm": 9.019548451760784e-05, + "block11_o_max_l1_linf_norm": 9.417165711056441e-05, + "block11_o_max_spectral_norm": 2.1855685190530494e-05, + "block11_mlp_win_update_fnorm": 0.00017869799921754748, + "block11_mlp_win_max_l1_linf_norm": 0.00010239379480481148, + "block11_mlp_win_max_spectral_norm": 2.7066322218161076e-05, + "block11_mlp_wout_update_fnorm": 0.0001839635515352711, + "block11_mlp_wout_max_l1_linf_norm": 0.0004173539928160608, + "block11_mlp_wout_max_spectral_norm": 5.9899819461861625e-05, + "total_sharpness": 0.017570558935403824, + "block_total_sharpness": 0.023036940023303032, + "v_norm_block": 0.001072557526640594, + "v_T_H_v_block": 2.6501227878839018e-08, + "v_norm": 0.0012715632328763604, + "ip_v_neg_g_hvp": 2.698659955058247e-05, + "cos_v_neg_g_hvp": 0.07193413376808167, + "g_hvp_norm": 0.29503610730171204, + "ip_v_neg_g_t": 2.725774720602203e-05, + "cos_v_neg_g_t": 0.1411171406507492, + "g_t_norm": 0.15190505981445312, + "g_norm": 0.29503610730171204, + "hv_norm": 0.0006617872277274728, + "cos_v_hv": 0.03376014903187752, + "hg_norm": 8.696434020996094, + "cos_g_hg": 0.39328449964523315, + "v_parallel_norm": 5.383838470152114e-06, + "v_perp_norm": 0.0012715522898361087, + "embed_lm_head_v_norm": 0.0006830028723925352, + "embed_lm_head_cos_v_neg_g": 0.10772910714149475, + "layer_1_v_norm": 0.00030866829911246896, + "layer_1_cos_v_neg_g": 0.10569679737091064, + "layer_2_v_norm": 0.0003044276381842792, + "layer_2_cos_v_neg_g": 0.06271109730005264, + "layer_3_v_norm": 0.00030891719507053494, + "layer_3_cos_v_neg_g": 0.05566904693841934, + "layer_4_v_norm": 0.00030975823756307364, + "layer_4_cos_v_neg_g": 0.057021744549274445, + "layer_5_v_norm": 0.00030399730894714594, + "layer_5_cos_v_neg_g": 0.04326842725276947, + "layer_6_v_norm": 0.0003117922751698643, + "layer_6_cos_v_neg_g": 0.05617957189679146, + "layer_7_v_norm": 0.00031137344194576144, + "layer_7_cos_v_neg_g": 0.0637233704328537, + "layer_8_v_norm": 0.00031089846743270755, + "layer_8_cos_v_neg_g": 0.06416299194097519, + "layer_9_v_norm": 0.00031075457809492946, + "layer_9_cos_v_neg_g": 0.0665658563375473, + "layer_10_v_norm": 0.0003106522199232131, + "layer_10_cos_v_neg_g": 0.08086108416318893, + "layer_11_v_norm": 0.00031086138915270567, + "layer_11_cos_v_neg_g": 0.10081003606319427, + "layer_12_v_norm": 0.00031322738504968584, + "layer_12_cos_v_neg_g": 0.13363179564476013, + "block0_q_v_norm": 8.80675797816366e-05, + "block0_q_cos_v_neg_g": 0.13404516875743866, + "block0_k_v_norm": 8.8944798335433e-05, + "block0_k_cos_v_neg_g": 0.13208016753196716, + "block0_v_v_norm": 8.884638373274356e-05, + "block0_v_cos_v_neg_g": 0.1756339967250824, + "block0_o_v_norm": 8.787590923020616e-05, + "block0_o_cos_v_neg_g": 0.12344907969236374, + "block0_mlp_win_v_norm": 0.0001780369784682989, + "block0_mlp_win_cos_v_neg_g": 0.07861921191215515, + "block0_mlp_wout_v_norm": 0.00017946463776752353, + "block0_mlp_wout_cos_v_neg_g": 0.14637921750545502, + "block3_q_v_norm": 8.703686762601137e-05, + "block3_q_cos_v_neg_g": 0.07193697988986969, + "block3_k_v_norm": 8.615312981419265e-05, + "block3_k_cos_v_neg_g": 0.08085554838180542, + "block3_v_v_norm": 9.094863344216719e-05, + "block3_v_cos_v_neg_g": 0.03825053945183754, + "block3_o_v_norm": 9.06291970750317e-05, + "block3_o_cos_v_neg_g": 0.1425383985042572, + "block3_mlp_win_v_norm": 0.00017723169003147632, + "block3_mlp_win_cos_v_neg_g": 0.05996416509151459, + "block3_mlp_wout_v_norm": 0.00018152820121031255, + "block3_mlp_wout_cos_v_neg_g": 0.19288481771945953, + "block7_q_v_norm": 8.719742618268356e-05, + "block7_q_cos_v_neg_g": 0.07222066819667816, + "block7_k_v_norm": 8.765406528254971e-05, + "block7_k_cos_v_neg_g": 0.19884620606899261, + "block7_v_v_norm": 9.038658754434437e-05, + "block7_v_cos_v_neg_g": 0.04624802991747856, + "block7_o_v_norm": 9.040974691743031e-05, + "block7_o_cos_v_neg_g": 0.19870008528232574, + "block7_mlp_win_v_norm": 0.00017897594079840928, + "block7_mlp_win_cos_v_neg_g": 0.07546523213386536, + "block7_mlp_wout_v_norm": 0.0001813706912798807, + "block7_mlp_wout_cos_v_neg_g": 0.17559775710105896, + "block11_q_v_norm": 8.861726382747293e-05, + "block11_q_cos_v_neg_g": 0.11757612973451614, + "block11_k_v_norm": 8.949749462772161e-05, + "block11_k_cos_v_neg_g": 0.19248756766319275, + "block11_v_v_norm": 9.074633999262005e-05, + "block11_v_cos_v_neg_g": 0.07682817429304123, + "block11_o_v_norm": 9.020102879730985e-05, + "block11_o_cos_v_neg_g": 0.2142740935087204, + "block11_mlp_win_v_norm": 0.00017870079318527132, + "block11_mlp_win_cos_v_neg_g": 0.1288553774356842, + "block11_mlp_wout_v_norm": 0.0001839662727434188, + "block11_mlp_wout_cos_v_neg_g": 0.18591564893722534, + "embed_lm_head_sharpness": 0.0007156517240218818, + "layer_1_sharpness": 0.008184664882719517, + "layer_2_sharpness": 0.001514606410637498, + "layer_3_sharpness": 0.0033373446203768253, + "layer_4_sharpness": 0.004271751269698143, + "layer_5_sharpness": 0.002147305989637971, + "layer_6_sharpness": 0.004796835593879223, + "layer_7_sharpness": 0.00447319820523262, + "layer_8_sharpness": 0.007553196046501398, + "layer_9_sharpness": 0.006338044069707394, + "layer_10_sharpness": 0.0031012496910989285, + "layer_11_sharpness": 0.001953914063051343, + "layer_12_sharpness": 0.008730120025575161, + "block0_q_sharpness": 0.002082608640193939, + "block0_k_sharpness": 0.0019178299698978662, + "block0_v_sharpness": 0.00299503724090755, + "block0_o_sharpness": 0.003641061717644334, + "block0_mlp_win_sharpness": 0.0025462841149419546, + "block0_mlp_wout_sharpness": 0.0038927779532969, + "block3_q_sharpness": 0.0007549318834207952, + "block3_k_sharpness": 0.0006327210576273501, + "block3_v_sharpness": 0.011286327615380287, + "block3_o_sharpness": 0.0016400831518694758, + "block3_mlp_win_sharpness": 0.000504865834955126, + "block3_mlp_wout_sharpness": 0.001135997474193573, + "block7_q_sharpness": 0.0001701294386293739, + "block7_k_sharpness": 0.00019954871095251292, + "block7_v_sharpness": 0.015097512863576412, + "block7_o_sharpness": 0.00046468721120618284, + "block7_mlp_win_sharpness": 0.0017178839771077037, + "block7_mlp_wout_sharpness": 0.002880995161831379, + "block11_q_sharpness": 0.00011738051398424432, + "block11_k_sharpness": 0.00023868322023190558, + "block11_v_sharpness": 0.001660070032812655, + "block11_o_sharpness": 0.00018010132771451026, + "block11_mlp_win_sharpness": 0.0012605054071173072, + "block11_mlp_wout_sharpness": 0.01283174566924572, + "sum_layer_numerators": 5.433740329142333e-09, + "block_diag_sharpness": 0.004723432250222232, + "cross_layer_sharpness": 0.0183135077730808 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..024bc1866f7a597c1ea478c2b84d2eede8c4464b --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_1500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.045729398727417, + "total_l1_linf_norm": 18002.23046875, + "total_spectral_norm": 2.045729637145996, + "embed_lm_head_update_fnorm": 1.3184814453125, + "embed_lm_head_max_l1_linf_norm": 0.332809180021286, + "embed_lm_head_max_spectral_norm": 0.27879592776298523, + "layer_1_update_fnorm": 0.42021194100379944, + "layer_1_max_l1_linf_norm": 0.5091425180435181, + "layer_1_max_spectral_norm": 0.08389857411384583, + "layer_2_update_fnorm": 0.43837764859199524, + "layer_2_max_l1_linf_norm": 0.7069166898727417, + "layer_2_max_spectral_norm": 0.10284975171089172, + "layer_3_update_fnorm": 0.4346812963485718, + "layer_3_max_l1_linf_norm": 0.6238440871238708, + "layer_3_max_spectral_norm": 0.09626644104719162, + "layer_4_update_fnorm": 0.42693856358528137, + "layer_4_max_l1_linf_norm": 0.7307655215263367, + "layer_4_max_spectral_norm": 0.10053970664739609, + "layer_5_update_fnorm": 0.4275621473789215, + "layer_5_max_l1_linf_norm": 0.7020285129547119, + "layer_5_max_spectral_norm": 0.09368619322776794, + "layer_6_update_fnorm": 0.4453299045562744, + "layer_6_max_l1_linf_norm": 0.623435378074646, + "layer_6_max_spectral_norm": 0.09281430393457413, + "layer_7_update_fnorm": 0.45166751742362976, + "layer_7_max_l1_linf_norm": 0.5386612415313721, + "layer_7_max_spectral_norm": 0.076221764087677, + "layer_8_update_fnorm": 0.45582589507102966, + "layer_8_max_l1_linf_norm": 0.4805978536605835, + "layer_8_max_spectral_norm": 0.06029309704899788, + "layer_9_update_fnorm": 0.4763709604740143, + "layer_9_max_l1_linf_norm": 0.4927982687950134, + "layer_9_max_spectral_norm": 0.06477206945419312, + "layer_10_update_fnorm": 0.4856439232826233, + "layer_10_max_l1_linf_norm": 0.5044893622398376, + "layer_10_max_spectral_norm": 0.06585570424795151, + "layer_11_update_fnorm": 0.4863554835319519, + "layer_11_max_l1_linf_norm": 0.5061297416687012, + "layer_11_max_spectral_norm": 0.06692609935998917, + "layer_12_update_fnorm": 0.4630433917045593, + "layer_12_max_l1_linf_norm": 0.5231035947799683, + "layer_12_max_spectral_norm": 0.08591372519731522, + "block0_q_update_fnorm": 0.1260708123445511, + "block0_q_max_l1_linf_norm": 0.25514787435531616, + "block0_q_max_spectral_norm": 0.06784199923276901, + "block0_k_update_fnorm": 0.12854008376598358, + "block0_k_max_l1_linf_norm": 0.27303043007850647, + "block0_k_max_spectral_norm": 0.08389857411384583, + "block0_v_update_fnorm": 0.12835107743740082, + "block0_v_max_l1_linf_norm": 0.26359277963638306, + "block0_v_max_spectral_norm": 0.07362841814756393, + "block0_o_update_fnorm": 0.14251220226287842, + "block0_o_max_l1_linf_norm": 0.16658088564872742, + "block0_o_max_spectral_norm": 0.05994798243045807, + "block0_mlp_win_update_fnorm": 0.24401234090328217, + "block0_mlp_win_max_l1_linf_norm": 0.14681367576122284, + "block0_mlp_win_max_spectral_norm": 0.0608162060379982, + "block0_mlp_wout_update_fnorm": 0.21851152181625366, + "block0_mlp_wout_max_l1_linf_norm": 0.5091425180435181, + "block0_mlp_wout_max_spectral_norm": 0.07960279285907745, + "block3_q_update_fnorm": 0.12591834366321564, + "block3_q_max_l1_linf_norm": 0.1683982014656067, + "block3_q_max_spectral_norm": 0.051535818725824356, + "block3_k_update_fnorm": 0.1190183088183403, + "block3_k_max_l1_linf_norm": 0.18784672021865845, + "block3_k_max_spectral_norm": 0.03657461330294609, + "block3_v_update_fnorm": 0.10908814519643784, + "block3_v_max_l1_linf_norm": 0.14169201254844666, + "block3_v_max_spectral_norm": 0.04631640017032623, + "block3_o_update_fnorm": 0.11681356281042099, + "block3_o_max_l1_linf_norm": 0.1432255059480667, + "block3_o_max_spectral_norm": 0.05060010030865669, + "block3_mlp_win_update_fnorm": 0.27420979738235474, + "block3_mlp_win_max_l1_linf_norm": 0.20219290256500244, + "block3_mlp_win_max_spectral_norm": 0.0687895193696022, + "block3_mlp_wout_update_fnorm": 0.2267649620771408, + "block3_mlp_wout_max_l1_linf_norm": 0.7307655215263367, + "block3_mlp_wout_max_spectral_norm": 0.10053970664739609, + "block7_q_update_fnorm": 0.1438576728105545, + "block7_q_max_l1_linf_norm": 0.1536240428686142, + "block7_q_max_spectral_norm": 0.03421279415488243, + "block7_k_update_fnorm": 0.13249365985393524, + "block7_k_max_l1_linf_norm": 0.15675514936447144, + "block7_k_max_spectral_norm": 0.0298715028911829, + "block7_v_update_fnorm": 0.10998960584402084, + "block7_v_max_l1_linf_norm": 0.12600508332252502, + "block7_v_max_spectral_norm": 0.04082881286740303, + "block7_o_update_fnorm": 0.12815280258655548, + "block7_o_max_l1_linf_norm": 0.13776400685310364, + "block7_o_max_spectral_norm": 0.04245445504784584, + "block7_mlp_win_update_fnorm": 0.2703368067741394, + "block7_mlp_win_max_l1_linf_norm": 0.16217100620269775, + "block7_mlp_win_max_spectral_norm": 0.06029309704899788, + "block7_mlp_wout_update_fnorm": 0.2604345679283142, + "block7_mlp_wout_max_l1_linf_norm": 0.4805978536605835, + "block7_mlp_wout_max_spectral_norm": 0.04973703622817993, + "block11_q_update_fnorm": 0.14039462804794312, + "block11_q_max_l1_linf_norm": 0.17611658573150635, + "block11_q_max_spectral_norm": 0.0401783287525177, + "block11_k_update_fnorm": 0.12319690734148026, + "block11_k_max_l1_linf_norm": 0.16385281085968018, + "block11_k_max_spectral_norm": 0.03165455907583237, + "block11_v_update_fnorm": 0.11561162769794464, + "block11_v_max_l1_linf_norm": 0.1597874015569687, + "block11_v_max_spectral_norm": 0.05581642687320709, + "block11_o_update_fnorm": 0.13979679346084595, + "block11_o_max_l1_linf_norm": 0.14635920524597168, + "block11_o_max_spectral_norm": 0.05345296114683151, + "block11_mlp_win_update_fnorm": 0.27762797474861145, + "block11_mlp_win_max_l1_linf_norm": 0.1713467836380005, + "block11_mlp_win_max_spectral_norm": 0.07403016090393066, + "block11_mlp_wout_update_fnorm": 0.2635473608970642, + "block11_mlp_wout_max_l1_linf_norm": 0.5231035947799683, + "block11_mlp_wout_max_spectral_norm": 0.08591372519731522, + "total_sharpness": 0.04116811975836754, + "block_total_sharpness": 0.06682345271110535, + "v_norm_block": 1.5641663074493408, + "v_T_H_v_block": 0.16349133849143982, + "v_norm": 2.045729398727417, + "ip_v_neg_g_hvp": 0.11671116203069687, + "cos_v_neg_g_hvp": 0.13969570398330688, + "g_hvp_norm": 0.4083957076072693, + "ip_v_neg_g_t": 0.11682581156492233, + "cos_v_neg_g_t": 0.15563634037971497, + "g_t_norm": 0.3669269382953644, + "g_norm": 0.4083957076072693, + "hv_norm": 0.9572613835334778, + "cos_v_hv": 0.08797892183065414, + "hg_norm": 4.557356357574463, + "cos_g_hg": 0.4948391020298004, + "v_parallel_norm": 0.01309182308614254, + "v_perp_norm": 2.045687437057495, + "embed_lm_head_v_norm": 1.3184814453125, + "embed_lm_head_cos_v_neg_g": 0.10320360958576202, + "layer_1_v_norm": 0.42021194100379944, + "layer_1_cos_v_neg_g": 0.23336197435855865, + "layer_2_v_norm": 0.43837764859199524, + "layer_2_cos_v_neg_g": 0.14624366164207458, + "layer_3_v_norm": 0.4346812963485718, + "layer_3_cos_v_neg_g": 0.14781071245670319, + "layer_4_v_norm": 0.42693856358528137, + "layer_4_cos_v_neg_g": 0.1541399359703064, + "layer_5_v_norm": 0.4275621473789215, + "layer_5_cos_v_neg_g": 0.12346072494983673, + "layer_6_v_norm": 0.4453299045562744, + "layer_6_cos_v_neg_g": 0.14676059782505035, + "layer_7_v_norm": 0.45166751742362976, + "layer_7_cos_v_neg_g": 0.16619345545768738, + "layer_8_v_norm": 0.4558258652687073, + "layer_8_cos_v_neg_g": 0.15756404399871826, + "layer_9_v_norm": 0.4763709604740143, + "layer_9_cos_v_neg_g": 0.15939129889011383, + "layer_10_v_norm": 0.4856439232826233, + "layer_10_cos_v_neg_g": 0.19002318382263184, + "layer_11_v_norm": 0.4863554835319519, + "layer_11_cos_v_neg_g": 0.21640467643737793, + "layer_12_v_norm": 0.4630433917045593, + "layer_12_cos_v_neg_g": 0.2623533010482788, + "block0_q_v_norm": 0.1260708123445511, + "block0_q_cos_v_neg_g": 0.3488001823425293, + "block0_k_v_norm": 0.12854008376598358, + "block0_k_cos_v_neg_g": 0.3388434946537018, + "block0_v_v_norm": 0.12835107743740082, + "block0_v_cos_v_neg_g": 0.29979705810546875, + "block0_o_v_norm": 0.14251220226287842, + "block0_o_cos_v_neg_g": 0.24142275750637054, + "block0_mlp_win_v_norm": 0.24401234090328217, + "block0_mlp_win_cos_v_neg_g": 0.3461696207523346, + "block0_mlp_wout_v_norm": 0.21851152181625366, + "block0_mlp_wout_cos_v_neg_g": 0.3512307405471802, + "block3_q_v_norm": 0.12591834366321564, + "block3_q_cos_v_neg_g": 0.17530624568462372, + "block3_k_v_norm": 0.1190183088183403, + "block3_k_cos_v_neg_g": 0.14271509647369385, + "block3_v_v_norm": 0.10908814519643784, + "block3_v_cos_v_neg_g": 0.13096709549427032, + "block3_o_v_norm": 0.11681356281042099, + "block3_o_cos_v_neg_g": 0.2772524654865265, + "block3_mlp_win_v_norm": 0.27420979738235474, + "block3_mlp_win_cos_v_neg_g": 0.1541808396577835, + "block3_mlp_wout_v_norm": 0.2267649620771408, + "block3_mlp_wout_cos_v_neg_g": 0.3399873971939087, + "block7_q_v_norm": 0.1438576728105545, + "block7_q_cos_v_neg_g": 0.17110416293144226, + "block7_k_v_norm": 0.13249365985393524, + "block7_k_cos_v_neg_g": 0.26601094007492065, + "block7_v_v_norm": 0.10998960584402084, + "block7_v_cos_v_neg_g": 0.17267589271068573, + "block7_o_v_norm": 0.12815280258655548, + "block7_o_cos_v_neg_g": 0.2995419502258301, + "block7_mlp_win_v_norm": 0.2703368067741394, + "block7_mlp_win_cos_v_neg_g": 0.2157600373029709, + "block7_mlp_wout_v_norm": 0.2604345679283142, + "block7_mlp_wout_cos_v_neg_g": 0.2745788097381592, + "block11_q_v_norm": 0.14039462804794312, + "block11_q_cos_v_neg_g": 0.23174971342086792, + "block11_k_v_norm": 0.12319690734148026, + "block11_k_cos_v_neg_g": 0.2696284055709839, + "block11_v_v_norm": 0.11561162769794464, + "block11_v_cos_v_neg_g": 0.31310418248176575, + "block11_o_v_norm": 0.13979679346084595, + "block11_o_cos_v_neg_g": 0.2902824282646179, + "block11_mlp_win_v_norm": 0.27762797474861145, + "block11_mlp_win_cos_v_neg_g": 0.26928281784057617, + "block11_mlp_wout_v_norm": 0.2635473608970642, + "block11_mlp_wout_cos_v_neg_g": 0.277326762676239, + "embed_lm_head_sharpness": 0.000547778676263988, + "layer_1_sharpness": 0.05524180829524994, + "layer_2_sharpness": 0.013753987848758698, + "layer_3_sharpness": 0.01161545142531395, + "layer_4_sharpness": 0.007450859062373638, + "layer_5_sharpness": 0.01067317184060812, + "layer_6_sharpness": 0.0069156913086771965, + "layer_7_sharpness": 0.007270374801009893, + "layer_8_sharpness": 0.00777440657839179, + "layer_9_sharpness": 0.005863448139280081, + "layer_10_sharpness": 0.004144095350056887, + "layer_11_sharpness": 0.0034547094255685806, + "layer_12_sharpness": 0.00699752289801836, + "block0_q_sharpness": 0.012265198864042759, + "block0_k_sharpness": 0.014464746229350567, + "block0_v_sharpness": 0.02904985286295414, + "block0_o_sharpness": 0.01293235644698143, + "block0_mlp_win_sharpness": 0.010311270132660866, + "block0_mlp_wout_sharpness": 0.02441682480275631, + "block3_q_sharpness": 0.004166747443377972, + "block3_k_sharpness": 0.002615000121295452, + "block3_v_sharpness": 0.006702008657157421, + "block3_o_sharpness": 0.0040005589835345745, + "block3_mlp_win_sharpness": 0.00043287131120450795, + "block3_mlp_wout_sharpness": 0.0025522697251290083, + "block7_q_sharpness": 0.0005594883696176112, + "block7_k_sharpness": 0.0029029001016169786, + "block7_v_sharpness": 0.017718229442834854, + "block7_o_sharpness": 0.0030265990644693375, + "block7_mlp_win_sharpness": 0.0018237086478620768, + "block7_mlp_wout_sharpness": 0.0012968070805072784, + "block11_q_sharpness": 0.00031067777308635414, + "block11_k_sharpness": 0.0007220146944746375, + "block11_v_sharpness": 0.004708923865109682, + "block11_o_sharpness": 0.0006467198254540563, + "block11_mlp_win_sharpness": 0.0029243638273328543, + "block11_mlp_wout_sharpness": 0.0034266540315002203, + "sum_layer_numerators": 0.026997178306542333, + "block_diag_sharpness": 0.01103449649940877, + "cross_layer_sharpness": 0.05578895621169658 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..2945f8e3232d8e90d50edc5f2732a321aa909474 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.8997042179107666, + "total_l1_linf_norm": 16230.556640625, + "total_spectral_norm": 1.8997042179107666, + "embed_lm_head_update_fnorm": 1.3080017566680908, + "embed_lm_head_max_l1_linf_norm": 0.35362720489501953, + "embed_lm_head_max_spectral_norm": 0.24904011189937592, + "layer_1_update_fnorm": 0.2843644917011261, + "layer_1_max_l1_linf_norm": 0.4029831886291504, + "layer_1_max_spectral_norm": 0.05186708644032478, + "layer_2_update_fnorm": 0.33221372961997986, + "layer_2_max_l1_linf_norm": 0.4374186098575592, + "layer_2_max_spectral_norm": 0.06559530645608902, + "layer_3_update_fnorm": 0.34616345167160034, + "layer_3_max_l1_linf_norm": 0.537233829498291, + "layer_3_max_spectral_norm": 0.06513090431690216, + "layer_4_update_fnorm": 0.3385097086429596, + "layer_4_max_l1_linf_norm": 0.44433629512786865, + "layer_4_max_spectral_norm": 0.06384585052728653, + "layer_5_update_fnorm": 0.3361773192882538, + "layer_5_max_l1_linf_norm": 0.40204906463623047, + "layer_5_max_spectral_norm": 0.06565956771373749, + "layer_6_update_fnorm": 0.37253934144973755, + "layer_6_max_l1_linf_norm": 0.5150094032287598, + "layer_6_max_spectral_norm": 0.06565343588590622, + "layer_7_update_fnorm": 0.39576831459999084, + "layer_7_max_l1_linf_norm": 0.468063086271286, + "layer_7_max_spectral_norm": 0.05801644176244736, + "layer_8_update_fnorm": 0.43638062477111816, + "layer_8_max_l1_linf_norm": 0.4754525423049927, + "layer_8_max_spectral_norm": 0.05651430785655975, + "layer_9_update_fnorm": 0.46329307556152344, + "layer_9_max_l1_linf_norm": 0.48800671100616455, + "layer_9_max_spectral_norm": 0.05745259299874306, + "layer_10_update_fnorm": 0.47246018052101135, + "layer_10_max_l1_linf_norm": 0.5026007294654846, + "layer_10_max_spectral_norm": 0.05712272226810455, + "layer_11_update_fnorm": 0.4759017825126648, + "layer_11_max_l1_linf_norm": 0.503743052482605, + "layer_11_max_spectral_norm": 0.05747560039162636, + "layer_12_update_fnorm": 0.45733723044395447, + "layer_12_max_l1_linf_norm": 0.4768412411212921, + "layer_12_max_spectral_norm": 0.0827966183423996, + "block0_q_update_fnorm": 0.07102172076702118, + "block0_q_max_l1_linf_norm": 0.10718218237161636, + "block0_q_max_spectral_norm": 0.02704710140824318, + "block0_k_update_fnorm": 0.07325288653373718, + "block0_k_max_l1_linf_norm": 0.15280784666538239, + "block0_k_max_spectral_norm": 0.04537106677889824, + "block0_v_update_fnorm": 0.07468675822019577, + "block0_v_max_l1_linf_norm": 0.16563163697719574, + "block0_v_max_spectral_norm": 0.03494809940457344, + "block0_o_update_fnorm": 0.08129292726516724, + "block0_o_max_l1_linf_norm": 0.09746192395687103, + "block0_o_max_spectral_norm": 0.02487536147236824, + "block0_mlp_win_update_fnorm": 0.17872866988182068, + "block0_mlp_win_max_l1_linf_norm": 0.14321793615818024, + "block0_mlp_win_max_spectral_norm": 0.05186708644032478, + "block0_mlp_wout_update_fnorm": 0.16210545599460602, + "block0_mlp_wout_max_l1_linf_norm": 0.4029831886291504, + "block0_mlp_wout_max_spectral_norm": 0.04632747918367386, + "block3_q_update_fnorm": 0.09229091554880142, + "block3_q_max_l1_linf_norm": 0.12078960239887238, + "block3_q_max_spectral_norm": 0.03246770426630974, + "block3_k_update_fnorm": 0.08447250723838806, + "block3_k_max_l1_linf_norm": 0.12509089708328247, + "block3_k_max_spectral_norm": 0.028792692348361015, + "block3_v_update_fnorm": 0.07147420197725296, + "block3_v_max_l1_linf_norm": 0.097953200340271, + "block3_v_max_spectral_norm": 0.02776595577597618, + "block3_o_update_fnorm": 0.07718169689178467, + "block3_o_max_l1_linf_norm": 0.11208879202604294, + "block3_o_max_spectral_norm": 0.03188536688685417, + "block3_mlp_win_update_fnorm": 0.22917020320892334, + "block3_mlp_win_max_l1_linf_norm": 0.17955335974693298, + "block3_mlp_win_max_spectral_norm": 0.06384585052728653, + "block3_mlp_wout_update_fnorm": 0.18784017860889435, + "block3_mlp_wout_max_l1_linf_norm": 0.44433629512786865, + "block3_mlp_wout_max_spectral_norm": 0.0594278909265995, + "block7_q_update_fnorm": 0.13169130682945251, + "block7_q_max_l1_linf_norm": 0.1467002034187317, + "block7_q_max_spectral_norm": 0.029566559940576553, + "block7_k_update_fnorm": 0.11760076135396957, + "block7_k_max_l1_linf_norm": 0.14519348740577698, + "block7_k_max_spectral_norm": 0.024287041276693344, + "block7_v_update_fnorm": 0.09420747309923172, + "block7_v_max_l1_linf_norm": 0.1110629215836525, + "block7_v_max_spectral_norm": 0.025288745760917664, + "block7_o_update_fnorm": 0.10965927690267563, + "block7_o_max_l1_linf_norm": 0.12509068846702576, + "block7_o_max_spectral_norm": 0.02950340136885643, + "block7_mlp_win_update_fnorm": 0.2670646607875824, + "block7_mlp_win_max_l1_linf_norm": 0.16496044397354126, + "block7_mlp_win_max_spectral_norm": 0.05651430785655975, + "block7_mlp_wout_update_fnorm": 0.2587556540966034, + "block7_mlp_wout_max_l1_linf_norm": 0.4754525423049927, + "block7_mlp_wout_max_spectral_norm": 0.043672576546669006, + "block11_q_update_fnorm": 0.14532674849033356, + "block11_q_max_l1_linf_norm": 0.1796022355556488, + "block11_q_max_spectral_norm": 0.03822150453925133, + "block11_k_update_fnorm": 0.1310293823480606, + "block11_k_max_l1_linf_norm": 0.22387605905532837, + "block11_k_max_spectral_norm": 0.029727302491664886, + "block11_v_update_fnorm": 0.1037677749991417, + "block11_v_max_l1_linf_norm": 0.14391109347343445, + "block11_v_max_spectral_norm": 0.0340208038687706, + "block11_o_update_fnorm": 0.12227965891361237, + "block11_o_max_l1_linf_norm": 0.13545945286750793, + "block11_o_max_spectral_norm": 0.034521959722042084, + "block11_mlp_win_update_fnorm": 0.27926281094551086, + "block11_mlp_win_max_l1_linf_norm": 0.1603858470916748, + "block11_mlp_win_max_spectral_norm": 0.057785723358392715, + "block11_mlp_wout_update_fnorm": 0.2590164542198181, + "block11_mlp_wout_max_l1_linf_norm": 0.4768412411212921, + "block11_mlp_wout_max_spectral_norm": 0.0827966183423996, + "total_sharpness": 0.011614876799285412, + "block_total_sharpness": 0.020090831443667412, + "v_norm_block": 1.377682089805603, + "v_T_H_v_block": 0.03813255578279495, + "v_norm": 1.8997042179107666, + "ip_v_neg_g_hvp": 0.06504862010478973, + "cos_v_neg_g_hvp": 0.11846873909235, + "g_hvp_norm": 0.28903359174728394, + "ip_v_neg_g_t": 0.06516535580158234, + "cos_v_neg_g_t": 0.15152579545974731, + "g_t_norm": 0.22638320922851562, + "g_norm": 0.28903359174728394, + "hv_norm": 0.3443860709667206, + "cos_v_hv": 0.06407004594802856, + "hg_norm": 4.9086503982543945, + "cos_g_hg": 0.3455711007118225, + "v_parallel_norm": 0.018227752298116684, + "v_perp_norm": 1.8996167182922363, + "embed_lm_head_v_norm": 1.3080017566680908, + "embed_lm_head_cos_v_neg_g": 0.11306754499673843, + "layer_1_v_norm": 0.2843644917011261, + "layer_1_cos_v_neg_g": 0.18754835426807404, + "layer_2_v_norm": 0.33221372961997986, + "layer_2_cos_v_neg_g": 0.11483629792928696, + "layer_3_v_norm": 0.34616345167160034, + "layer_3_cos_v_neg_g": 0.1157304048538208, + "layer_4_v_norm": 0.3385097086429596, + "layer_4_cos_v_neg_g": 0.10245315730571747, + "layer_5_v_norm": 0.3361773192882538, + "layer_5_cos_v_neg_g": 0.08615967631340027, + "layer_6_v_norm": 0.37253937125205994, + "layer_6_cos_v_neg_g": 0.11295102536678314, + "layer_7_v_norm": 0.39576831459999084, + "layer_7_cos_v_neg_g": 0.13333414494991302, + "layer_8_v_norm": 0.43638062477111816, + "layer_8_cos_v_neg_g": 0.13666222989559174, + "layer_9_v_norm": 0.46329307556152344, + "layer_9_cos_v_neg_g": 0.1441216915845871, + "layer_10_v_norm": 0.47246018052101135, + "layer_10_cos_v_neg_g": 0.17026720941066742, + "layer_11_v_norm": 0.4759017825126648, + "layer_11_cos_v_neg_g": 0.18138332664966583, + "layer_12_v_norm": 0.45733723044395447, + "layer_12_cos_v_neg_g": 0.21078944206237793, + "block0_q_v_norm": 0.07102172076702118, + "block0_q_cos_v_neg_g": 0.1675904095172882, + "block0_k_v_norm": 0.07325288653373718, + "block0_k_cos_v_neg_g": 0.20698407292366028, + "block0_v_v_norm": 0.07468675822019577, + "block0_v_cos_v_neg_g": 0.2785698175430298, + "block0_o_v_norm": 0.08129292726516724, + "block0_o_cos_v_neg_g": 0.23962876200675964, + "block0_mlp_win_v_norm": 0.17872866988182068, + "block0_mlp_win_cos_v_neg_g": 0.2378893494606018, + "block0_mlp_wout_v_norm": 0.16210545599460602, + "block0_mlp_wout_cos_v_neg_g": 0.22316747903823853, + "block3_q_v_norm": 0.09229091554880142, + "block3_q_cos_v_neg_g": 0.075653575360775, + "block3_k_v_norm": 0.08447250723838806, + "block3_k_cos_v_neg_g": 0.08665398508310318, + "block3_v_v_norm": 0.07147420197725296, + "block3_v_cos_v_neg_g": 0.08248251676559448, + "block3_o_v_norm": 0.07718169689178467, + "block3_o_cos_v_neg_g": 0.21236059069633484, + "block3_mlp_win_v_norm": 0.22917020320892334, + "block3_mlp_win_cos_v_neg_g": 0.1088266372680664, + "block3_mlp_wout_v_norm": 0.18784017860889435, + "block3_mlp_wout_cos_v_neg_g": 0.22823889553546906, + "block7_q_v_norm": 0.13169130682945251, + "block7_q_cos_v_neg_g": 0.1370275765657425, + "block7_k_v_norm": 0.11760076135396957, + "block7_k_cos_v_neg_g": 0.19947126507759094, + "block7_v_v_norm": 0.09420747309923172, + "block7_v_cos_v_neg_g": 0.11547072976827621, + "block7_o_v_norm": 0.10965927690267563, + "block7_o_cos_v_neg_g": 0.24856334924697876, + "block7_mlp_win_v_norm": 0.2670646607875824, + "block7_mlp_win_cos_v_neg_g": 0.18244987726211548, + "block7_mlp_wout_v_norm": 0.2587556540966034, + "block7_mlp_wout_cos_v_neg_g": 0.2408083826303482, + "block11_q_v_norm": 0.14532674849033356, + "block11_q_cos_v_neg_g": 0.21980783343315125, + "block11_k_v_norm": 0.1310293823480606, + "block11_k_cos_v_neg_g": 0.23122283816337585, + "block11_v_v_norm": 0.1037677749991417, + "block11_v_cos_v_neg_g": 0.17463502287864685, + "block11_o_v_norm": 0.12227965891361237, + "block11_o_cos_v_neg_g": 0.2390109896659851, + "block11_mlp_win_v_norm": 0.27926281094551086, + "block11_mlp_win_cos_v_neg_g": 0.22131045162677765, + "block11_mlp_wout_v_norm": 0.2590164542198181, + "block11_mlp_wout_cos_v_neg_g": 0.2297392636537552, + "embed_lm_head_sharpness": 0.00040141187491826713, + "layer_1_sharpness": 0.02590358443558216, + "layer_2_sharpness": 0.0017937938682734966, + "layer_3_sharpness": 0.0018472983501851559, + "layer_4_sharpness": 0.0024240436032414436, + "layer_5_sharpness": 0.0032338129822164774, + "layer_6_sharpness": 0.002775907516479492, + "layer_7_sharpness": 0.0035612236242741346, + "layer_8_sharpness": 0.004572990350425243, + "layer_9_sharpness": 0.0029106943402439356, + "layer_10_sharpness": 0.0020937237422913313, + "layer_11_sharpness": 0.0019465775694698095, + "layer_12_sharpness": 0.0030746846459805965, + "block0_q_sharpness": 0.004351282026618719, + "block0_k_sharpness": 0.009404520504176617, + "block0_v_sharpness": 0.031114798039197922, + "block0_o_sharpness": 0.02201494760811329, + "block0_mlp_win_sharpness": 0.0038929558359086514, + "block0_mlp_wout_sharpness": 0.007353783119469881, + "block3_q_sharpness": 0.0020386537071317434, + "block3_k_sharpness": 0.0006940497551113367, + "block3_v_sharpness": 0.004476653877645731, + "block3_o_sharpness": 0.0026460422668606043, + "block3_mlp_win_sharpness": 0.00027124062762595713, + "block3_mlp_wout_sharpness": 0.0007887129904702306, + "block7_q_sharpness": 0.0003140535845886916, + "block7_k_sharpness": 0.0012960655149072409, + "block7_v_sharpness": 0.008902951143682003, + "block7_o_sharpness": 0.0019918601028621197, + "block7_mlp_win_sharpness": 0.002015212085098028, + "block7_mlp_wout_sharpness": 0.000961862038820982, + "block11_q_sharpness": 0.0004988212021999061, + "block11_k_sharpness": 0.0012219585478305817, + "block11_v_sharpness": 0.0017304838402196765, + "block11_o_sharpness": 0.0003898315189871937, + "block11_mlp_win_sharpness": 0.0011449857847765088, + "block11_mlp_wout_sharpness": 0.0020084078423678875, + "sum_layer_numerators": 0.007147172765413292, + "block_diag_sharpness": 0.003765617947447902, + "cross_layer_sharpness": 0.016325213496219508 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..7ef8f2d9084754fec074fe00960e434b8a4969fc --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_2500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.037984848022461, + "total_l1_linf_norm": 17799.421875, + "total_spectral_norm": 2.037984848022461, + "embed_lm_head_update_fnorm": 1.3148186206817627, + "embed_lm_head_max_l1_linf_norm": 0.3440442979335785, + "embed_lm_head_max_spectral_norm": 0.22926849126815796, + "layer_1_update_fnorm": 0.3909156620502472, + "layer_1_max_l1_linf_norm": 0.4957270622253418, + "layer_1_max_spectral_norm": 0.06162479147315025, + "layer_2_update_fnorm": 0.4305720627307892, + "layer_2_max_l1_linf_norm": 0.5223175287246704, + "layer_2_max_spectral_norm": 0.07435350120067596, + "layer_3_update_fnorm": 0.42586246132850647, + "layer_3_max_l1_linf_norm": 0.6054283976554871, + "layer_3_max_spectral_norm": 0.07191567122936249, + "layer_4_update_fnorm": 0.415024071931839, + "layer_4_max_l1_linf_norm": 0.6395101547241211, + "layer_4_max_spectral_norm": 0.08049380034208298, + "layer_5_update_fnorm": 0.3994963467121124, + "layer_5_max_l1_linf_norm": 0.44873422384262085, + "layer_5_max_spectral_norm": 0.06428954750299454, + "layer_6_update_fnorm": 0.4319523274898529, + "layer_6_max_l1_linf_norm": 0.5359485149383545, + "layer_6_max_spectral_norm": 0.06643922626972198, + "layer_7_update_fnorm": 0.4481978118419647, + "layer_7_max_l1_linf_norm": 0.4880235195159912, + "layer_7_max_spectral_norm": 0.055844686925411224, + "layer_8_update_fnorm": 0.4638690948486328, + "layer_8_max_l1_linf_norm": 0.4788321852684021, + "layer_8_max_spectral_norm": 0.052545350044965744, + "layer_9_update_fnorm": 0.49134114384651184, + "layer_9_max_l1_linf_norm": 0.5031704902648926, + "layer_9_max_spectral_norm": 0.054380059242248535, + "layer_10_update_fnorm": 0.49879810214042664, + "layer_10_max_l1_linf_norm": 0.5169364213943481, + "layer_10_max_spectral_norm": 0.05349588394165039, + "layer_11_update_fnorm": 0.4974950850009918, + "layer_11_max_l1_linf_norm": 0.5110011696815491, + "layer_11_max_spectral_norm": 0.05421756953001022, + "layer_12_update_fnorm": 0.48256662487983704, + "layer_12_max_l1_linf_norm": 0.516565203666687, + "layer_12_max_spectral_norm": 0.08114531636238098, + "block0_q_update_fnorm": 0.10376107692718506, + "block0_q_max_l1_linf_norm": 0.19752252101898193, + "block0_q_max_spectral_norm": 0.042097900062799454, + "block0_k_update_fnorm": 0.09897822886705399, + "block0_k_max_l1_linf_norm": 0.20007416605949402, + "block0_k_max_spectral_norm": 0.050176218152046204, + "block0_v_update_fnorm": 0.11393917351961136, + "block0_v_max_l1_linf_norm": 0.17340758442878723, + "block0_v_max_spectral_norm": 0.05343674123287201, + "block0_o_update_fnorm": 0.11703546345233917, + "block0_o_max_l1_linf_norm": 0.11989393085241318, + "block0_o_max_spectral_norm": 0.026948871091008186, + "block0_mlp_win_update_fnorm": 0.23875223100185394, + "block0_mlp_win_max_l1_linf_norm": 0.18283873796463013, + "block0_mlp_win_max_spectral_norm": 0.05362891033291817, + "block0_mlp_wout_update_fnorm": 0.2202429473400116, + "block0_mlp_wout_max_l1_linf_norm": 0.4957270622253418, + "block0_mlp_wout_max_spectral_norm": 0.06162479147315025, + "block3_q_update_fnorm": 0.1083894819021225, + "block3_q_max_l1_linf_norm": 0.1436947137117386, + "block3_q_max_spectral_norm": 0.03929165005683899, + "block3_k_update_fnorm": 0.10695678740739822, + "block3_k_max_l1_linf_norm": 0.19844010472297668, + "block3_k_max_spectral_norm": 0.027013080194592476, + "block3_v_update_fnorm": 0.09916545450687408, + "block3_v_max_l1_linf_norm": 0.13435792922973633, + "block3_v_max_spectral_norm": 0.034482330083847046, + "block3_o_update_fnorm": 0.1048685684800148, + "block3_o_max_l1_linf_norm": 0.1251278817653656, + "block3_o_max_spectral_norm": 0.036202266812324524, + "block3_mlp_win_update_fnorm": 0.2707715928554535, + "block3_mlp_win_max_l1_linf_norm": 0.18311280012130737, + "block3_mlp_win_max_spectral_norm": 0.06538470089435577, + "block3_mlp_wout_update_fnorm": 0.23410630226135254, + "block3_mlp_wout_max_l1_linf_norm": 0.6395101547241211, + "block3_mlp_wout_max_spectral_norm": 0.08049380034208298, + "block7_q_update_fnorm": 0.1398763209581375, + "block7_q_max_l1_linf_norm": 0.14013290405273438, + "block7_q_max_spectral_norm": 0.02872084081172943, + "block7_k_update_fnorm": 0.1334967464208603, + "block7_k_max_l1_linf_norm": 0.15387555956840515, + "block7_k_max_spectral_norm": 0.0302792489528656, + "block7_v_update_fnorm": 0.11479634046554565, + "block7_v_max_l1_linf_norm": 0.13630041480064392, + "block7_v_max_spectral_norm": 0.0321173332631588, + "block7_o_update_fnorm": 0.12800443172454834, + "block7_o_max_l1_linf_norm": 0.12900817394256592, + "block7_o_max_spectral_norm": 0.03189218416810036, + "block7_mlp_win_update_fnorm": 0.2780713140964508, + "block7_mlp_win_max_l1_linf_norm": 0.17174743115901947, + "block7_mlp_win_max_spectral_norm": 0.052545350044965744, + "block7_mlp_wout_update_fnorm": 0.26608678698539734, + "block7_mlp_wout_max_l1_linf_norm": 0.4788321852684021, + "block7_mlp_wout_max_spectral_norm": 0.04461623355746269, + "block11_q_update_fnorm": 0.14905740320682526, + "block11_q_max_l1_linf_norm": 0.16770704090595245, + "block11_q_max_spectral_norm": 0.033259984105825424, + "block11_k_update_fnorm": 0.13700638711452484, + "block11_k_max_l1_linf_norm": 0.1651363968849182, + "block11_k_max_spectral_norm": 0.026724565774202347, + "block11_v_update_fnorm": 0.11893335729837418, + "block11_v_max_l1_linf_norm": 0.15449461340904236, + "block11_v_max_spectral_norm": 0.03479912877082825, + "block11_o_update_fnorm": 0.13850517570972443, + "block11_o_max_l1_linf_norm": 0.14926981925964355, + "block11_o_max_spectral_norm": 0.034173015505075455, + "block11_mlp_win_update_fnorm": 0.2939775884151459, + "block11_mlp_win_max_l1_linf_norm": 0.17115242779254913, + "block11_mlp_win_max_spectral_norm": 0.05470237880945206, + "block11_mlp_wout_update_fnorm": 0.2684081494808197, + "block11_mlp_wout_max_l1_linf_norm": 0.516565203666687, + "block11_mlp_wout_max_spectral_norm": 0.08114531636238098, + "total_sharpness": 0.011372329667210579, + "block_total_sharpness": 0.017843805253505707, + "v_norm_block": 1.557124137878418, + "v_T_H_v_block": 0.0432647243142128, + "v_norm": 2.037984848022461, + "ip_v_neg_g_hvp": 0.07184243947267532, + "cos_v_neg_g_hvp": 0.1158142238855362, + "g_hvp_norm": 0.30438148975372314, + "ip_v_neg_g_t": 0.07209565490484238, + "cos_v_neg_g_t": 0.14348290860652924, + "g_t_norm": 0.24655167758464813, + "g_norm": 0.30438148975372314, + "hv_norm": 0.43799540400505066, + "cos_v_hv": 0.05291524901986122, + "hg_norm": 5.298135757446289, + "cos_g_hg": 0.31480318307876587, + "v_parallel_norm": 0.016229458153247833, + "v_perp_norm": 2.0379202365875244, + "embed_lm_head_v_norm": 1.3148186206817627, + "embed_lm_head_cos_v_neg_g": 0.11349813640117645, + "layer_1_v_norm": 0.3909156620502472, + "layer_1_cos_v_neg_g": 0.20189355313777924, + "layer_2_v_norm": 0.4305720627307892, + "layer_2_cos_v_neg_g": 0.10678275674581528, + "layer_3_v_norm": 0.42586246132850647, + "layer_3_cos_v_neg_g": 0.10899414122104645, + "layer_4_v_norm": 0.415024071931839, + "layer_4_cos_v_neg_g": 0.08777657151222229, + "layer_5_v_norm": 0.3994963467121124, + "layer_5_cos_v_neg_g": 0.08299790322780609, + "layer_6_v_norm": 0.4319523274898529, + "layer_6_cos_v_neg_g": 0.10809370875358582, + "layer_7_v_norm": 0.4481978118419647, + "layer_7_cos_v_neg_g": 0.12717314064502716, + "layer_8_v_norm": 0.4638690948486328, + "layer_8_cos_v_neg_g": 0.12812365591526031, + "layer_9_v_norm": 0.49134114384651184, + "layer_9_cos_v_neg_g": 0.12539923191070557, + "layer_10_v_norm": 0.49879810214042664, + "layer_10_cos_v_neg_g": 0.1451706439256668, + "layer_11_v_norm": 0.4974950850009918, + "layer_11_cos_v_neg_g": 0.16687649488449097, + "layer_12_v_norm": 0.48256662487983704, + "layer_12_cos_v_neg_g": 0.18691545724868774, + "block0_q_v_norm": 0.10376107692718506, + "block0_q_cos_v_neg_g": 0.2210465669631958, + "block0_k_v_norm": 0.09897822886705399, + "block0_k_cos_v_neg_g": 0.20174336433410645, + "block0_v_v_norm": 0.11393917351961136, + "block0_v_cos_v_neg_g": 0.25740912556648254, + "block0_o_v_norm": 0.11703546345233917, + "block0_o_cos_v_neg_g": 0.2318333089351654, + "block0_mlp_win_v_norm": 0.23875223100185394, + "block0_mlp_win_cos_v_neg_g": 0.2543134391307831, + "block0_mlp_wout_v_norm": 0.2202429473400116, + "block0_mlp_wout_cos_v_neg_g": 0.22821079194545746, + "block3_q_v_norm": 0.1083894819021225, + "block3_q_cos_v_neg_g": 0.09333910048007965, + "block3_k_v_norm": 0.10695678740739822, + "block3_k_cos_v_neg_g": 0.07855351269245148, + "block3_v_v_norm": 0.09916545450687408, + "block3_v_cos_v_neg_g": 0.07483386248350143, + "block3_o_v_norm": 0.1048685684800148, + "block3_o_cos_v_neg_g": 0.20875735580921173, + "block3_mlp_win_v_norm": 0.2707715928554535, + "block3_mlp_win_cos_v_neg_g": 0.10306993871927261, + "block3_mlp_wout_v_norm": 0.23410630226135254, + "block3_mlp_wout_cos_v_neg_g": 0.24889908730983734, + "block7_q_v_norm": 0.1398763209581375, + "block7_q_cos_v_neg_g": 0.13029155135154724, + "block7_k_v_norm": 0.1334967464208603, + "block7_k_cos_v_neg_g": 0.2379828691482544, + "block7_v_v_norm": 0.11479634046554565, + "block7_v_cos_v_neg_g": 0.1159190684556961, + "block7_o_v_norm": 0.12800443172454834, + "block7_o_cos_v_neg_g": 0.24834208190441132, + "block7_mlp_win_v_norm": 0.2780713140964508, + "block7_mlp_win_cos_v_neg_g": 0.16994450986385345, + "block7_mlp_wout_v_norm": 0.26608678698539734, + "block7_mlp_wout_cos_v_neg_g": 0.241562157869339, + "block11_q_v_norm": 0.14905740320682526, + "block11_q_cos_v_neg_g": 0.17815543711185455, + "block11_k_v_norm": 0.13700638711452484, + "block11_k_cos_v_neg_g": 0.20915533602237701, + "block11_v_v_norm": 0.11893335729837418, + "block11_v_cos_v_neg_g": 0.158091738820076, + "block11_o_v_norm": 0.13850517570972443, + "block11_o_cos_v_neg_g": 0.2398521900177002, + "block11_mlp_win_v_norm": 0.2939775884151459, + "block11_mlp_win_cos_v_neg_g": 0.19287864863872528, + "block11_mlp_wout_v_norm": 0.2684081494808197, + "block11_mlp_wout_cos_v_neg_g": 0.20066513121128082, + "embed_lm_head_sharpness": 0.0004031696589663625, + "layer_1_sharpness": 0.015569152310490608, + "layer_2_sharpness": 0.0013593632029369473, + "layer_3_sharpness": 0.0025265244767069817, + "layer_4_sharpness": 0.0028864324558526278, + "layer_5_sharpness": 0.002640356542542577, + "layer_6_sharpness": 0.0033167246729135513, + "layer_7_sharpness": 0.0031910699326545, + "layer_8_sharpness": 0.004178514704108238, + "layer_9_sharpness": 0.0024515753611922264, + "layer_10_sharpness": 0.0015307337744161487, + "layer_11_sharpness": 0.0013666177401319146, + "layer_12_sharpness": 0.0025550327263772488, + "block0_q_sharpness": 0.0026662927120923996, + "block0_k_sharpness": 0.003508630907163024, + "block0_v_sharpness": 0.031627099961042404, + "block0_o_sharpness": 0.005660662427544594, + "block0_mlp_win_sharpness": 0.0028012555558234453, + "block0_mlp_wout_sharpness": 0.006215850356966257, + "block3_q_sharpness": 0.002360523911193013, + "block3_k_sharpness": 0.0010739786084741354, + "block3_v_sharpness": 0.0052628349512815475, + "block3_o_sharpness": 0.0020269008819013834, + "block3_mlp_win_sharpness": 0.00029510119929909706, + "block3_mlp_wout_sharpness": 0.0007955700857564807, + "block7_q_sharpness": 0.00026917195646092296, + "block7_k_sharpness": 0.001423571491613984, + "block7_v_sharpness": 0.008400841616094112, + "block7_o_sharpness": 0.0012391309719532728, + "block7_mlp_win_sharpness": 0.0016101840883493423, + "block7_mlp_wout_sharpness": 0.0006525676581077278, + "block11_q_sharpness": 0.00018216481839772314, + "block11_k_sharpness": 0.0003789249749388546, + "block11_v_sharpness": 0.001264306833036244, + "block11_o_sharpness": 0.0002972575603052974, + "block11_mlp_win_sharpness": 0.0009533476550132036, + "block11_mlp_wout_sharpness": 0.001944078248925507, + "sum_layer_numerators": 0.008072898527167673, + "block_diag_sharpness": 0.003329530668944989, + "cross_layer_sharpness": 0.014514274584560718 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..797afb3f3d38d0be7fe66000d74ad2fbba8df176 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1202423572540283, + "total_l1_linf_norm": 18714.529296875, + "total_spectral_norm": 2.120242118835449, + "embed_lm_head_update_fnorm": 1.3203238248825073, + "embed_lm_head_max_l1_linf_norm": 0.3735660910606384, + "embed_lm_head_max_spectral_norm": 0.2187620848417282, + "layer_1_update_fnorm": 0.44801080226898193, + "layer_1_max_l1_linf_norm": 0.5032762885093689, + "layer_1_max_spectral_norm": 0.07846672087907791, + "layer_2_update_fnorm": 0.4671185612678528, + "layer_2_max_l1_linf_norm": 0.5378683805465698, + "layer_2_max_spectral_norm": 0.07453823834657669, + "layer_3_update_fnorm": 0.4613172113895416, + "layer_3_max_l1_linf_norm": 0.5486339330673218, + "layer_3_max_spectral_norm": 0.07175596058368683, + "layer_4_update_fnorm": 0.46165210008621216, + "layer_4_max_l1_linf_norm": 0.5555054545402527, + "layer_4_max_spectral_norm": 0.07783759385347366, + "layer_5_update_fnorm": 0.4582339823246002, + "layer_5_max_l1_linf_norm": 0.5201447606086731, + "layer_5_max_spectral_norm": 0.06416074186563492, + "layer_6_update_fnorm": 0.4715403914451599, + "layer_6_max_l1_linf_norm": 0.5851908326148987, + "layer_6_max_spectral_norm": 0.07159075886011124, + "layer_7_update_fnorm": 0.4829905033111572, + "layer_7_max_l1_linf_norm": 0.567609429359436, + "layer_7_max_spectral_norm": 0.05681721493601799, + "layer_8_update_fnorm": 0.4823087453842163, + "layer_8_max_l1_linf_norm": 0.5086336135864258, + "layer_8_max_spectral_norm": 0.050191380083560944, + "layer_9_update_fnorm": 0.495798259973526, + "layer_9_max_l1_linf_norm": 0.5289345979690552, + "layer_9_max_spectral_norm": 0.0488712340593338, + "layer_10_update_fnorm": 0.5088819861412048, + "layer_10_max_l1_linf_norm": 0.512688159942627, + "layer_10_max_spectral_norm": 0.048729490488767624, + "layer_11_update_fnorm": 0.5119225978851318, + "layer_11_max_l1_linf_norm": 0.5297907590866089, + "layer_11_max_spectral_norm": 0.05145071819424629, + "layer_12_update_fnorm": 0.49218928813934326, + "layer_12_max_l1_linf_norm": 0.5273024439811707, + "layer_12_max_spectral_norm": 0.08217740058898926, + "block0_q_update_fnorm": 0.1277349293231964, + "block0_q_max_l1_linf_norm": 0.23075377941131592, + "block0_q_max_spectral_norm": 0.056652214378118515, + "block0_k_update_fnorm": 0.1271255761384964, + "block0_k_max_l1_linf_norm": 0.24248281121253967, + "block0_k_max_spectral_norm": 0.07846672087907791, + "block0_v_update_fnorm": 0.12815923988819122, + "block0_v_max_l1_linf_norm": 0.18190519511699677, + "block0_v_max_spectral_norm": 0.055523645132780075, + "block0_o_update_fnorm": 0.134894460439682, + "block0_o_max_l1_linf_norm": 0.13611970841884613, + "block0_o_max_spectral_norm": 0.03332092985510826, + "block0_mlp_win_update_fnorm": 0.2714058756828308, + "block0_mlp_win_max_l1_linf_norm": 0.16113530099391937, + "block0_mlp_win_max_spectral_norm": 0.05565483123064041, + "block0_mlp_wout_update_fnorm": 0.24470795691013336, + "block0_mlp_wout_max_l1_linf_norm": 0.5032762885093689, + "block0_mlp_wout_max_spectral_norm": 0.06551326811313629, + "block3_q_update_fnorm": 0.1404847502708435, + "block3_q_max_l1_linf_norm": 0.18044701218605042, + "block3_q_max_spectral_norm": 0.04855184257030487, + "block3_k_update_fnorm": 0.13493265211582184, + "block3_k_max_l1_linf_norm": 0.20367124676704407, + "block3_k_max_spectral_norm": 0.03402860090136528, + "block3_v_update_fnorm": 0.11053462326526642, + "block3_v_max_l1_linf_norm": 0.14903409779071808, + "block3_v_max_spectral_norm": 0.0323752760887146, + "block3_o_update_fnorm": 0.11875709146261215, + "block3_o_max_l1_linf_norm": 0.13702836632728577, + "block3_o_max_spectral_norm": 0.03926381841301918, + "block3_mlp_win_update_fnorm": 0.29531630873680115, + "block3_mlp_win_max_l1_linf_norm": 0.18087881803512573, + "block3_mlp_win_max_spectral_norm": 0.06650363653898239, + "block3_mlp_wout_update_fnorm": 0.24806718528270721, + "block3_mlp_wout_max_l1_linf_norm": 0.5555054545402527, + "block3_mlp_wout_max_spectral_norm": 0.07783759385347366, + "block7_q_update_fnorm": 0.15409933030605316, + "block7_q_max_l1_linf_norm": 0.17494836449623108, + "block7_q_max_spectral_norm": 0.031225532293319702, + "block7_k_update_fnorm": 0.14520490169525146, + "block7_k_max_l1_linf_norm": 0.1758354902267456, + "block7_k_max_spectral_norm": 0.03188932314515114, + "block7_v_update_fnorm": 0.11613257229328156, + "block7_v_max_l1_linf_norm": 0.12792854011058807, + "block7_v_max_spectral_norm": 0.0238895732909441, + "block7_o_update_fnorm": 0.13350015878677368, + "block7_o_max_l1_linf_norm": 0.12767431139945984, + "block7_o_max_spectral_norm": 0.024967867881059647, + "block7_mlp_win_update_fnorm": 0.2879861295223236, + "block7_mlp_win_max_l1_linf_norm": 0.1632155478000641, + "block7_mlp_win_max_spectral_norm": 0.047760363668203354, + "block7_mlp_wout_update_fnorm": 0.271019846200943, + "block7_mlp_wout_max_l1_linf_norm": 0.5086336135864258, + "block7_mlp_wout_max_spectral_norm": 0.050191380083560944, + "block11_q_update_fnorm": 0.14959044754505157, + "block11_q_max_l1_linf_norm": 0.16919556260108948, + "block11_q_max_spectral_norm": 0.02969144843518734, + "block11_k_update_fnorm": 0.13864293694496155, + "block11_k_max_l1_linf_norm": 0.16066592931747437, + "block11_k_max_spectral_norm": 0.02605154924094677, + "block11_v_update_fnorm": 0.12262032926082611, + "block11_v_max_l1_linf_norm": 0.14367279410362244, + "block11_v_max_spectral_norm": 0.03489287942647934, + "block11_o_update_fnorm": 0.14219550788402557, + "block11_o_max_l1_linf_norm": 0.14926810562610626, + "block11_o_max_spectral_norm": 0.035588670521974564, + "block11_mlp_win_update_fnorm": 0.30195996165275574, + "block11_mlp_win_max_l1_linf_norm": 0.17501327395439148, + "block11_mlp_win_max_spectral_norm": 0.05586184561252594, + "block11_mlp_wout_update_fnorm": 0.2722553014755249, + "block11_mlp_wout_max_l1_linf_norm": 0.5273024439811707, + "block11_mlp_wout_max_spectral_norm": 0.08217740058898926, + "total_sharpness": 0.007929416373372078, + "block_total_sharpness": 0.011870326474308968, + "v_norm_block": 1.6589668989181519, + "v_T_H_v_block": 0.03266917169094086, + "v_norm": 2.1202423572540283, + "ip_v_neg_g_hvp": 0.06324508786201477, + "cos_v_neg_g_hvp": 0.1054464802145958, + "g_hvp_norm": 0.28288453817367554, + "ip_v_neg_g_t": 0.06349246203899384, + "cos_v_neg_g_t": 0.1337411254644394, + "g_t_norm": 0.22390906512737274, + "g_norm": 0.28288453817367554, + "hv_norm": 0.3695717453956604, + "cos_v_hv": 0.045491259545087814, + "hg_norm": 2.533827781677246, + "cos_g_hg": 0.5790331959724426, + "v_parallel_norm": 0.01545167900621891, + "v_perp_norm": 2.1201860904693604, + "embed_lm_head_v_norm": 1.3203238248825073, + "embed_lm_head_cos_v_neg_g": 0.10537464916706085, + "layer_1_v_norm": 0.44801080226898193, + "layer_1_cos_v_neg_g": 0.17922620475292206, + "layer_2_v_norm": 0.4671185612678528, + "layer_2_cos_v_neg_g": 0.09474831819534302, + "layer_3_v_norm": 0.461317241191864, + "layer_3_cos_v_neg_g": 0.09139133244752884, + "layer_4_v_norm": 0.46165210008621216, + "layer_4_cos_v_neg_g": 0.08981765061616898, + "layer_5_v_norm": 0.4582339823246002, + "layer_5_cos_v_neg_g": 0.0740145593881607, + "layer_6_v_norm": 0.4715403914451599, + "layer_6_cos_v_neg_g": 0.09058128297328949, + "layer_7_v_norm": 0.4829905033111572, + "layer_7_cos_v_neg_g": 0.10832355916500092, + "layer_8_v_norm": 0.4823087453842163, + "layer_8_cos_v_neg_g": 0.10813577473163605, + "layer_9_v_norm": 0.495798259973526, + "layer_9_cos_v_neg_g": 0.10470319539308548, + "layer_10_v_norm": 0.5088819861412048, + "layer_10_cos_v_neg_g": 0.12433256208896637, + "layer_11_v_norm": 0.5119226574897766, + "layer_11_cos_v_neg_g": 0.14547373354434967, + "layer_12_v_norm": 0.49218928813934326, + "layer_12_cos_v_neg_g": 0.181082621216774, + "block0_q_v_norm": 0.1277349293231964, + "block0_q_cos_v_neg_g": 0.20669642090797424, + "block0_k_v_norm": 0.1271255761384964, + "block0_k_cos_v_neg_g": 0.19294217228889465, + "block0_v_v_norm": 0.12815923988819122, + "block0_v_cos_v_neg_g": 0.2236495316028595, + "block0_o_v_norm": 0.134894460439682, + "block0_o_cos_v_neg_g": 0.21053020656108856, + "block0_mlp_win_v_norm": 0.2714058756828308, + "block0_mlp_win_cos_v_neg_g": 0.2208457887172699, + "block0_mlp_wout_v_norm": 0.24470795691013336, + "block0_mlp_wout_cos_v_neg_g": 0.20482861995697021, + "block3_q_v_norm": 0.1404847502708435, + "block3_q_cos_v_neg_g": 0.09856913983821869, + "block3_k_v_norm": 0.13493265211582184, + "block3_k_cos_v_neg_g": 0.08913085609674454, + "block3_v_v_norm": 0.11053462326526642, + "block3_v_cos_v_neg_g": 0.060185469686985016, + "block3_o_v_norm": 0.11875709146261215, + "block3_o_cos_v_neg_g": 0.17966005206108093, + "block3_mlp_win_v_norm": 0.29531630873680115, + "block3_mlp_win_cos_v_neg_g": 0.09071244299411774, + "block3_mlp_wout_v_norm": 0.24806718528270721, + "block3_mlp_wout_cos_v_neg_g": 0.23846089839935303, + "block7_q_v_norm": 0.15409933030605316, + "block7_q_cos_v_neg_g": 0.12427271157503128, + "block7_k_v_norm": 0.14520490169525146, + "block7_k_cos_v_neg_g": 0.21353334188461304, + "block7_v_v_norm": 0.11613257229328156, + "block7_v_cos_v_neg_g": 0.08812861144542694, + "block7_o_v_norm": 0.13350015878677368, + "block7_o_cos_v_neg_g": 0.2242947667837143, + "block7_mlp_win_v_norm": 0.2879861295223236, + "block7_mlp_win_cos_v_neg_g": 0.13852852582931519, + "block7_mlp_wout_v_norm": 0.271019846200943, + "block7_mlp_wout_cos_v_neg_g": 0.23168842494487762, + "block11_q_v_norm": 0.14959044754505157, + "block11_q_cos_v_neg_g": 0.1704348623752594, + "block11_k_v_norm": 0.13864293694496155, + "block11_k_cos_v_neg_g": 0.2050691843032837, + "block11_v_v_norm": 0.12262032926082611, + "block11_v_cos_v_neg_g": 0.14163659512996674, + "block11_o_v_norm": 0.14219550788402557, + "block11_o_cos_v_neg_g": 0.2349078357219696, + "block11_mlp_win_v_norm": 0.30195996165275574, + "block11_mlp_win_cos_v_neg_g": 0.1819063425064087, + "block11_mlp_wout_v_norm": 0.2722553014755249, + "block11_mlp_wout_cos_v_neg_g": 0.21099096536636353, + "embed_lm_head_sharpness": 0.00033793781767599285, + "layer_1_sharpness": 0.008783774450421333, + "layer_2_sharpness": 0.0009720043744891882, + "layer_3_sharpness": 0.0017599252751097083, + "layer_4_sharpness": 0.0019327134359627962, + "layer_5_sharpness": 0.00156186253298074, + "layer_6_sharpness": 0.0016411019023507833, + "layer_7_sharpness": 0.0017991046188399196, + "layer_8_sharpness": 0.002306486014276743, + "layer_9_sharpness": 0.0018252021400257945, + "layer_10_sharpness": 0.0012882697628811002, + "layer_11_sharpness": 0.001169852796010673, + "layer_12_sharpness": 0.003233193652704358, + "block0_q_sharpness": 0.002028488786891103, + "block0_k_sharpness": 0.0029434654861688614, + "block0_v_sharpness": 0.007745929062366486, + "block0_o_sharpness": 0.0032001719810068607, + "block0_mlp_win_sharpness": 0.0020878571085631847, + "block0_mlp_wout_sharpness": 0.004585829563438892, + "block3_q_sharpness": 0.0021563873160630465, + "block3_k_sharpness": 0.0010052821598947048, + "block3_v_sharpness": 0.0019106712425127625, + "block3_o_sharpness": 0.001663448754698038, + "block3_mlp_win_sharpness": 0.000201163231395185, + "block3_mlp_wout_sharpness": 0.0004941149963997304, + "block7_q_sharpness": 0.00026096447254531085, + "block7_k_sharpness": 0.0007236082456074655, + "block7_v_sharpness": 0.0047837612219154835, + "block7_o_sharpness": 0.0007681435672566295, + "block7_mlp_win_sharpness": 0.000804606475867331, + "block7_mlp_wout_sharpness": 0.0005240108002908528, + "block11_q_sharpness": 0.00014844788529444486, + "block11_k_sharpness": 0.0002912952040787786, + "block11_v_sharpness": 0.0011626702034845948, + "block11_o_sharpness": 0.00028370623476803303, + "block11_mlp_win_sharpness": 0.0012273041065782309, + "block11_mlp_wout_sharpness": 0.002925235079601407, + "sum_layer_numerators": 0.0062827409267402845, + "block_diag_sharpness": 0.0022828307306356696, + "cross_layer_sharpness": 0.009587495743673298 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..9f9d658608bedf053510d129959c4e27588b73b3 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_3500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.161114454269409, + "total_l1_linf_norm": 19120.11328125, + "total_spectral_norm": 2.1611146926879883, + "embed_lm_head_update_fnorm": 1.324131727218628, + "embed_lm_head_max_l1_linf_norm": 0.36958158016204834, + "embed_lm_head_max_spectral_norm": 0.2167947143316269, + "layer_1_update_fnorm": 0.48280400037765503, + "layer_1_max_l1_linf_norm": 0.6064229011535645, + "layer_1_max_spectral_norm": 0.09441865235567093, + "layer_2_update_fnorm": 0.4924840033054352, + "layer_2_max_l1_linf_norm": 0.5537062883377075, + "layer_2_max_spectral_norm": 0.08606793731451035, + "layer_3_update_fnorm": 0.4756790101528168, + "layer_3_max_l1_linf_norm": 0.5621763467788696, + "layer_3_max_spectral_norm": 0.0806054174900055, + "layer_4_update_fnorm": 0.47608229517936707, + "layer_4_max_l1_linf_norm": 0.6273251175880432, + "layer_4_max_spectral_norm": 0.08502883464097977, + "layer_5_update_fnorm": 0.46434763073921204, + "layer_5_max_l1_linf_norm": 0.5103517770767212, + "layer_5_max_spectral_norm": 0.0654701218008995, + "layer_6_update_fnorm": 0.4843027889728546, + "layer_6_max_l1_linf_norm": 0.6190648674964905, + "layer_6_max_spectral_norm": 0.07428215444087982, + "layer_7_update_fnorm": 0.4924706816673279, + "layer_7_max_l1_linf_norm": 0.5399322509765625, + "layer_7_max_spectral_norm": 0.06457764655351639, + "layer_8_update_fnorm": 0.4944147765636444, + "layer_8_max_l1_linf_norm": 0.5208749175071716, + "layer_8_max_spectral_norm": 0.05574100837111473, + "layer_9_update_fnorm": 0.5069260001182556, + "layer_9_max_l1_linf_norm": 0.5368884801864624, + "layer_9_max_spectral_norm": 0.04875675588846207, + "layer_10_update_fnorm": 0.5192944407463074, + "layer_10_max_l1_linf_norm": 0.5256830453872681, + "layer_10_max_spectral_norm": 0.0487656444311142, + "layer_11_update_fnorm": 0.5216987133026123, + "layer_11_max_l1_linf_norm": 0.5287859439849854, + "layer_11_max_spectral_norm": 0.05158526077866554, + "layer_12_update_fnorm": 0.5025690197944641, + "layer_12_max_l1_linf_norm": 0.5675610899925232, + "layer_12_max_spectral_norm": 0.0772293359041214, + "block0_q_update_fnorm": 0.1378653645515442, + "block0_q_max_l1_linf_norm": 0.24922212958335876, + "block0_q_max_spectral_norm": 0.06875794380903244, + "block0_k_update_fnorm": 0.13104493916034698, + "block0_k_max_l1_linf_norm": 0.27707263827323914, + "block0_k_max_spectral_norm": 0.07605954259634018, + "block0_v_update_fnorm": 0.160648912191391, + "block0_v_max_l1_linf_norm": 0.2184196561574936, + "block0_v_max_spectral_norm": 0.09441865235567093, + "block0_o_update_fnorm": 0.1543942242860794, + "block0_o_max_l1_linf_norm": 0.1950570046901703, + "block0_o_max_spectral_norm": 0.08386159688234329, + "block0_mlp_win_update_fnorm": 0.2867797315120697, + "block0_mlp_win_max_l1_linf_norm": 0.18275457620620728, + "block0_mlp_win_max_spectral_norm": 0.07158853113651276, + "block0_mlp_wout_update_fnorm": 0.25484809279441833, + "block0_mlp_wout_max_l1_linf_norm": 0.6064229011535645, + "block0_mlp_wout_max_spectral_norm": 0.08467109501361847, + "block3_q_update_fnorm": 0.14424525201320648, + "block3_q_max_l1_linf_norm": 0.16300660371780396, + "block3_q_max_spectral_norm": 0.048124559223651886, + "block3_k_update_fnorm": 0.13236522674560547, + "block3_k_max_l1_linf_norm": 0.2255345582962036, + "block3_k_max_spectral_norm": 0.03408370539546013, + "block3_v_update_fnorm": 0.12287506461143494, + "block3_v_max_l1_linf_norm": 0.14792823791503906, + "block3_v_max_spectral_norm": 0.05636017024517059, + "block3_o_update_fnorm": 0.12343589216470718, + "block3_o_max_l1_linf_norm": 0.1398465633392334, + "block3_o_max_spectral_norm": 0.043709397315979004, + "block3_mlp_win_update_fnorm": 0.30366313457489014, + "block3_mlp_win_max_l1_linf_norm": 0.19047468900680542, + "block3_mlp_win_max_spectral_norm": 0.06868689507246017, + "block3_mlp_wout_update_fnorm": 0.2562541365623474, + "block3_mlp_wout_max_l1_linf_norm": 0.6273251175880432, + "block3_mlp_wout_max_spectral_norm": 0.08502883464097977, + "block7_q_update_fnorm": 0.15667402744293213, + "block7_q_max_l1_linf_norm": 0.16692087054252625, + "block7_q_max_spectral_norm": 0.027945876121520996, + "block7_k_update_fnorm": 0.14659693837165833, + "block7_k_max_l1_linf_norm": 0.16556552052497864, + "block7_k_max_spectral_norm": 0.027263764292001724, + "block7_v_update_fnorm": 0.12482550740242004, + "block7_v_max_l1_linf_norm": 0.13178709149360657, + "block7_v_max_spectral_norm": 0.029303142800927162, + "block7_o_update_fnorm": 0.13877876102924347, + "block7_o_max_l1_linf_norm": 0.13634276390075684, + "block7_o_max_spectral_norm": 0.030559860169887543, + "block7_mlp_win_update_fnorm": 0.29697728157043457, + "block7_mlp_win_max_l1_linf_norm": 0.16383349895477295, + "block7_mlp_win_max_spectral_norm": 0.050233934074640274, + "block7_mlp_wout_update_fnorm": 0.2743456959724426, + "block7_mlp_wout_max_l1_linf_norm": 0.5208749175071716, + "block7_mlp_wout_max_spectral_norm": 0.05574100837111473, + "block11_q_update_fnorm": 0.15548226237297058, + "block11_q_max_l1_linf_norm": 0.15219447016716003, + "block11_q_max_spectral_norm": 0.027940699830651283, + "block11_k_update_fnorm": 0.14574356377124786, + "block11_k_max_l1_linf_norm": 0.17086370289325714, + "block11_k_max_spectral_norm": 0.02533971332013607, + "block11_v_update_fnorm": 0.130086749792099, + "block11_v_max_l1_linf_norm": 0.15201568603515625, + "block11_v_max_spectral_norm": 0.03687819093465805, + "block11_o_update_fnorm": 0.14827099442481995, + "block11_o_max_l1_linf_norm": 0.15314289927482605, + "block11_o_max_spectral_norm": 0.03543395549058914, + "block11_mlp_win_update_fnorm": 0.3066733777523041, + "block11_mlp_win_max_l1_linf_norm": 0.16889922320842743, + "block11_mlp_win_max_spectral_norm": 0.05401144176721573, + "block11_mlp_wout_update_fnorm": 0.27223527431488037, + "block11_mlp_wout_max_l1_linf_norm": 0.5675610899925232, + "block11_mlp_wout_max_spectral_norm": 0.0772293359041214, + "total_sharpness": 0.40766751766204834, + "block_total_sharpness": 0.639356255531311, + "v_norm_block": 1.7079497575759888, + "v_T_H_v_block": 1.8650611639022827, + "v_norm": 2.161114454269409, + "ip_v_neg_g_hvp": 0.07493500411510468, + "cos_v_neg_g_hvp": 0.10068277269601822, + "g_hvp_norm": 0.34439098834991455, + "ip_v_neg_g_t": 0.07561628520488739, + "cos_v_neg_g_t": 0.1253052055835724, + "g_t_norm": 0.2792341113090515, + "g_norm": 0.34439098834991455, + "hv_norm": 21.569610595703125, + "cos_v_hv": 0.040845248848199844, + "hg_norm": 873.5787963867188, + "cos_g_hg": 0.31876471638679504, + "v_parallel_norm": 0.016300128772854805, + "v_perp_norm": 2.161052942276001, + "embed_lm_head_v_norm": 1.324131727218628, + "embed_lm_head_cos_v_neg_g": 0.08387713134288788, + "layer_1_v_norm": 0.48280400037765503, + "layer_1_cos_v_neg_g": 0.21767032146453857, + "layer_2_v_norm": 0.4924840033054352, + "layer_2_cos_v_neg_g": 0.12025047093629837, + "layer_3_v_norm": 0.4756790101528168, + "layer_3_cos_v_neg_g": 0.09829291701316833, + "layer_4_v_norm": 0.47608229517936707, + "layer_4_cos_v_neg_g": 0.08990878611803055, + "layer_5_v_norm": 0.46434763073921204, + "layer_5_cos_v_neg_g": 0.0690920278429985, + "layer_6_v_norm": 0.4843027889728546, + "layer_6_cos_v_neg_g": 0.085650235414505, + "layer_7_v_norm": 0.4924706816673279, + "layer_7_cos_v_neg_g": 0.09991861879825592, + "layer_8_v_norm": 0.4944147765636444, + "layer_8_cos_v_neg_g": 0.09502481669187546, + "layer_9_v_norm": 0.5069260001182556, + "layer_9_cos_v_neg_g": 0.08872893452644348, + "layer_10_v_norm": 0.5192944407463074, + "layer_10_cos_v_neg_g": 0.10550039261579514, + "layer_11_v_norm": 0.5216986536979675, + "layer_11_cos_v_neg_g": 0.12837648391723633, + "layer_12_v_norm": 0.5025690197944641, + "layer_12_cos_v_neg_g": 0.15776771306991577, + "block0_q_v_norm": 0.1378653645515442, + "block0_q_cos_v_neg_g": 0.19958218932151794, + "block0_k_v_norm": 0.13104493916034698, + "block0_k_cos_v_neg_g": 0.19875521957874298, + "block0_v_v_norm": 0.160648912191391, + "block0_v_cos_v_neg_g": 0.32503020763397217, + "block0_o_v_norm": 0.1543942242860794, + "block0_o_cos_v_neg_g": 0.3035357594490051, + "block0_mlp_win_v_norm": 0.2867797315120697, + "block0_mlp_win_cos_v_neg_g": 0.21608999371528625, + "block0_mlp_wout_v_norm": 0.25484809279441833, + "block0_mlp_wout_cos_v_neg_g": 0.230423703789711, + "block3_q_v_norm": 0.14424525201320648, + "block3_q_cos_v_neg_g": 0.09598492830991745, + "block3_k_v_norm": 0.13236522674560547, + "block3_k_cos_v_neg_g": 0.10559489578008652, + "block3_v_v_norm": 0.12287506461143494, + "block3_v_cos_v_neg_g": 0.07712408900260925, + "block3_o_v_norm": 0.12343589216470718, + "block3_o_cos_v_neg_g": 0.19196414947509766, + "block3_mlp_win_v_norm": 0.30366313457489014, + "block3_mlp_win_cos_v_neg_g": 0.0872417464852333, + "block3_mlp_wout_v_norm": 0.2562541365623474, + "block3_mlp_wout_cos_v_neg_g": 0.23987048864364624, + "block7_q_v_norm": 0.15667402744293213, + "block7_q_cos_v_neg_g": 0.10952486842870712, + "block7_k_v_norm": 0.14659693837165833, + "block7_k_cos_v_neg_g": 0.21903377771377563, + "block7_v_v_norm": 0.12482550740242004, + "block7_v_cos_v_neg_g": 0.0805056244134903, + "block7_o_v_norm": 0.13877876102924347, + "block7_o_cos_v_neg_g": 0.23402024805545807, + "block7_mlp_win_v_norm": 0.29697728157043457, + "block7_mlp_win_cos_v_neg_g": 0.12402737885713577, + "block7_mlp_wout_v_norm": 0.2743456959724426, + "block7_mlp_wout_cos_v_neg_g": 0.23305150866508484, + "block11_q_v_norm": 0.15548226237297058, + "block11_q_cos_v_neg_g": 0.14693570137023926, + "block11_k_v_norm": 0.14574356377124786, + "block11_k_cos_v_neg_g": 0.19795052707195282, + "block11_v_v_norm": 0.130086749792099, + "block11_v_cos_v_neg_g": 0.11916229128837585, + "block11_o_v_norm": 0.14827099442481995, + "block11_o_cos_v_neg_g": 0.21677207946777344, + "block11_mlp_win_v_norm": 0.3066733777523041, + "block11_mlp_win_cos_v_neg_g": 0.1576051563024521, + "block11_mlp_wout_v_norm": 0.27223527431488037, + "block11_mlp_wout_cos_v_neg_g": 0.19122016429901123, + "embed_lm_head_sharpness": 0.00046176265459507704, + "layer_1_sharpness": 3.8810858726501465, + "layer_2_sharpness": 0.2296312302350998, + "layer_3_sharpness": 0.0694277212023735, + "layer_4_sharpness": 0.006279734894633293, + "layer_5_sharpness": 0.0016550339059904218, + "layer_6_sharpness": 0.0013409893726930022, + "layer_7_sharpness": 0.0016949023120105267, + "layer_8_sharpness": 0.0025806487537920475, + "layer_9_sharpness": 0.0021153923589736223, + "layer_10_sharpness": 0.0013374915579333901, + "layer_11_sharpness": 0.0011429201113060117, + "layer_12_sharpness": 0.002440805546939373, + "block0_q_sharpness": 0.004469512030482292, + "block0_k_sharpness": 0.0057594021782279015, + "block0_v_sharpness": 4.995800018310547, + "block0_o_sharpness": 6.005436897277832, + "block0_mlp_win_sharpness": 0.09432896971702576, + "block0_mlp_wout_sharpness": 0.1812189668416977, + "block3_q_sharpness": 0.0017631457885727286, + "block3_k_sharpness": 0.0007403873605653644, + "block3_v_sharpness": 0.0468713603913784, + "block3_o_sharpness": 0.0017415530746802688, + "block3_mlp_win_sharpness": 0.0002227976219728589, + "block3_mlp_wout_sharpness": 0.000675494025927037, + "block7_q_sharpness": 0.00019323018204886466, + "block7_k_sharpness": 0.0006094558048062027, + "block7_v_sharpness": 0.00550159253180027, + "block7_o_sharpness": 0.0007821719627827406, + "block7_mlp_win_sharpness": 0.0009161587222479284, + "block7_mlp_wout_sharpness": 0.0005534581723622978, + "block11_q_sharpness": 0.0001075637192116119, + "block11_k_sharpness": 0.00030711485305801034, + "block11_v_sharpness": 0.000979813514277339, + "block11_o_sharpness": 0.00021532854589167982, + "block11_mlp_win_sharpness": 0.0008827747078612447, + "block11_mlp_wout_sharpness": 0.002124140039086342, + "sum_layer_numerators": 0.9810527185015077, + "block_diag_sharpness": 0.3363118450103899, + "cross_layer_sharpness": 0.3030444105209211 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..09091785cd54df3a587437b6f6dce9e400712b4f --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.156545639038086, + "total_l1_linf_norm": 19080.19921875, + "total_spectral_norm": 2.156545639038086, + "embed_lm_head_update_fnorm": 1.3232322931289673, + "embed_lm_head_max_l1_linf_norm": 0.39230790734291077, + "embed_lm_head_max_spectral_norm": 0.21090126037597656, + "layer_1_update_fnorm": 0.4568384885787964, + "layer_1_max_l1_linf_norm": 0.5779386758804321, + "layer_1_max_spectral_norm": 0.08158616721630096, + "layer_2_update_fnorm": 0.48072415590286255, + "layer_2_max_l1_linf_norm": 0.5492241978645325, + "layer_2_max_spectral_norm": 0.07773803174495697, + "layer_3_update_fnorm": 0.47221168875694275, + "layer_3_max_l1_linf_norm": 0.5822686553001404, + "layer_3_max_spectral_norm": 0.07858860492706299, + "layer_4_update_fnorm": 0.4771563708782196, + "layer_4_max_l1_linf_norm": 0.5993431210517883, + "layer_4_max_spectral_norm": 0.08372539281845093, + "layer_5_update_fnorm": 0.4648449718952179, + "layer_5_max_l1_linf_norm": 0.5297711491584778, + "layer_5_max_spectral_norm": 0.06510991603136063, + "layer_6_update_fnorm": 0.48345354199409485, + "layer_6_max_l1_linf_norm": 0.5710515975952148, + "layer_6_max_spectral_norm": 0.07233325392007828, + "layer_7_update_fnorm": 0.49405384063720703, + "layer_7_max_l1_linf_norm": 0.5391349792480469, + "layer_7_max_spectral_norm": 0.06477340310811996, + "layer_8_update_fnorm": 0.492750883102417, + "layer_8_max_l1_linf_norm": 0.5132768154144287, + "layer_8_max_spectral_norm": 0.061144113540649414, + "layer_9_update_fnorm": 0.5102674961090088, + "layer_9_max_l1_linf_norm": 0.5281302332878113, + "layer_9_max_spectral_norm": 0.04910757392644882, + "layer_10_update_fnorm": 0.5226972103118896, + "layer_10_max_l1_linf_norm": 0.5399066805839539, + "layer_10_max_spectral_norm": 0.04558930546045303, + "layer_11_update_fnorm": 0.5276724696159363, + "layer_11_max_l1_linf_norm": 0.5524810552597046, + "layer_11_max_spectral_norm": 0.052964162081480026, + "layer_12_update_fnorm": 0.5104520320892334, + "layer_12_max_l1_linf_norm": 0.5532040596008301, + "layer_12_max_spectral_norm": 0.07980874180793762, + "block0_q_update_fnorm": 0.12816382944583893, + "block0_q_max_l1_linf_norm": 0.19015756249427795, + "block0_q_max_spectral_norm": 0.051813095808029175, + "block0_k_update_fnorm": 0.12489498406648636, + "block0_k_max_l1_linf_norm": 0.21172122657299042, + "block0_k_max_spectral_norm": 0.06651661545038223, + "block0_v_update_fnorm": 0.130986750125885, + "block0_v_max_l1_linf_norm": 0.1634061336517334, + "block0_v_max_spectral_norm": 0.049170780926942825, + "block0_o_update_fnorm": 0.13205832242965698, + "block0_o_max_l1_linf_norm": 0.1444331407546997, + "block0_o_max_spectral_norm": 0.03667745739221573, + "block0_mlp_win_update_fnorm": 0.2844177782535553, + "block0_mlp_win_max_l1_linf_norm": 0.15766918659210205, + "block0_mlp_win_max_spectral_norm": 0.05344477668404579, + "block0_mlp_wout_update_fnorm": 0.24718604981899261, + "block0_mlp_wout_max_l1_linf_norm": 0.5779386758804321, + "block0_mlp_wout_max_spectral_norm": 0.08158616721630096, + "block3_q_update_fnorm": 0.14430449903011322, + "block3_q_max_l1_linf_norm": 0.18544143438339233, + "block3_q_max_spectral_norm": 0.0488949678838253, + "block3_k_update_fnorm": 0.13888958096504211, + "block3_k_max_l1_linf_norm": 0.23659619688987732, + "block3_k_max_spectral_norm": 0.034803446382284164, + "block3_v_update_fnorm": 0.11968156695365906, + "block3_v_max_l1_linf_norm": 0.14574451744556427, + "block3_v_max_spectral_norm": 0.0351644903421402, + "block3_o_update_fnorm": 0.1259349286556244, + "block3_o_max_l1_linf_norm": 0.14727067947387695, + "block3_o_max_spectral_norm": 0.044313400983810425, + "block3_mlp_win_update_fnorm": 0.3027603328227997, + "block3_mlp_win_max_l1_linf_norm": 0.19889827072620392, + "block3_mlp_win_max_spectral_norm": 0.06743178516626358, + "block3_mlp_wout_update_fnorm": 0.2561022937297821, + "block3_mlp_wout_max_l1_linf_norm": 0.5993431210517883, + "block3_mlp_wout_max_spectral_norm": 0.08372539281845093, + "block7_q_update_fnorm": 0.1556297391653061, + "block7_q_max_l1_linf_norm": 0.15285921096801758, + "block7_q_max_spectral_norm": 0.02710958570241928, + "block7_k_update_fnorm": 0.14561961591243744, + "block7_k_max_l1_linf_norm": 0.1523212194442749, + "block7_k_max_spectral_norm": 0.025512905791401863, + "block7_v_update_fnorm": 0.1265895664691925, + "block7_v_max_l1_linf_norm": 0.13347432017326355, + "block7_v_max_spectral_norm": 0.031121617183089256, + "block7_o_update_fnorm": 0.14018772542476654, + "block7_o_max_l1_linf_norm": 0.13239070773124695, + "block7_o_max_spectral_norm": 0.029217716306447983, + "block7_mlp_win_update_fnorm": 0.2965679168701172, + "block7_mlp_win_max_l1_linf_norm": 0.16567717492580414, + "block7_mlp_win_max_spectral_norm": 0.04639337956905365, + "block7_mlp_wout_update_fnorm": 0.27136874198913574, + "block7_mlp_wout_max_l1_linf_norm": 0.5132768154144287, + "block7_mlp_wout_max_spectral_norm": 0.061144113540649414, + "block11_q_update_fnorm": 0.15608875453472137, + "block11_q_max_l1_linf_norm": 0.1524161398410797, + "block11_q_max_spectral_norm": 0.02764710783958435, + "block11_k_update_fnorm": 0.14705026149749756, + "block11_k_max_l1_linf_norm": 0.16365650296211243, + "block11_k_max_spectral_norm": 0.025552844628691673, + "block11_v_update_fnorm": 0.13282577693462372, + "block11_v_max_l1_linf_norm": 0.14770103991031647, + "block11_v_max_spectral_norm": 0.036080438643693924, + "block11_o_update_fnorm": 0.15028007328510284, + "block11_o_max_l1_linf_norm": 0.1653040051460266, + "block11_o_max_spectral_norm": 0.03690575063228607, + "block11_mlp_win_update_fnorm": 0.3120087683200836, + "block11_mlp_win_max_l1_linf_norm": 0.1691085696220398, + "block11_mlp_win_max_spectral_norm": 0.05404176935553551, + "block11_mlp_wout_update_fnorm": 0.2772906720638275, + "block11_mlp_wout_max_l1_linf_norm": 0.5532040596008301, + "block11_mlp_wout_max_spectral_norm": 0.07980874180793762, + "total_sharpness": 0.008034566417336464, + "block_total_sharpness": 0.01191937830299139, + "v_norm_block": 1.7028639316558838, + "v_T_H_v_block": 0.03456316515803337, + "v_norm": 2.156545639038086, + "ip_v_neg_g_hvp": 0.06269174814224243, + "cos_v_neg_g_hvp": 0.08848269283771515, + "g_hvp_norm": 0.32854387164115906, + "ip_v_neg_g_t": 0.06304551661014557, + "cos_v_neg_g_t": 0.10678179562091827, + "g_t_norm": 0.27377787232398987, + "g_norm": 0.32854387164115906, + "hv_norm": 0.4545876085758209, + "cos_v_hv": 0.03811566159129143, + "hg_norm": 3.457228899002075, + "cos_g_hg": 0.6224142909049988, + "v_parallel_norm": 0.013090836815536022, + "v_perp_norm": 2.156505823135376, + "embed_lm_head_v_norm": 1.3232322931289673, + "embed_lm_head_cos_v_neg_g": 0.08634641021490097, + "layer_1_v_norm": 0.4568384885787964, + "layer_1_cos_v_neg_g": 0.1687316745519638, + "layer_2_v_norm": 0.48072415590286255, + "layer_2_cos_v_neg_g": 0.07683216035366058, + "layer_3_v_norm": 0.47221171855926514, + "layer_3_cos_v_neg_g": 0.07080626487731934, + "layer_4_v_norm": 0.4771563708782196, + "layer_4_cos_v_neg_g": 0.07133807241916656, + "layer_5_v_norm": 0.4648449718952179, + "layer_5_cos_v_neg_g": 0.06026725843548775, + "layer_6_v_norm": 0.48345354199409485, + "layer_6_cos_v_neg_g": 0.07994339615106583, + "layer_7_v_norm": 0.49405384063720703, + "layer_7_cos_v_neg_g": 0.0928315594792366, + "layer_8_v_norm": 0.492750883102417, + "layer_8_cos_v_neg_g": 0.08820790797472, + "layer_9_v_norm": 0.5102674961090088, + "layer_9_cos_v_neg_g": 0.08237139135599136, + "layer_10_v_norm": 0.5226972103118896, + "layer_10_cos_v_neg_g": 0.1006840169429779, + "layer_11_v_norm": 0.5276724696159363, + "layer_11_cos_v_neg_g": 0.12577055394649506, + "layer_12_v_norm": 0.5104520320892334, + "layer_12_cos_v_neg_g": 0.15762530267238617, + "block0_q_v_norm": 0.12816382944583893, + "block0_q_cos_v_neg_g": 0.21503853797912598, + "block0_k_v_norm": 0.12489498406648636, + "block0_k_cos_v_neg_g": 0.20568668842315674, + "block0_v_v_norm": 0.130986750125885, + "block0_v_cos_v_neg_g": 0.2361707091331482, + "block0_o_v_norm": 0.13205832242965698, + "block0_o_cos_v_neg_g": 0.21960684657096863, + "block0_mlp_win_v_norm": 0.2844177782535553, + "block0_mlp_win_cos_v_neg_g": 0.15452410280704498, + "block0_mlp_wout_v_norm": 0.24718604981899261, + "block0_mlp_wout_cos_v_neg_g": 0.20557576417922974, + "block3_q_v_norm": 0.14430449903011322, + "block3_q_cos_v_neg_g": 0.07422908395528793, + "block3_k_v_norm": 0.13888958096504211, + "block3_k_cos_v_neg_g": 0.07034612447023392, + "block3_v_v_norm": 0.11968156695365906, + "block3_v_cos_v_neg_g": 0.047756072133779526, + "block3_o_v_norm": 0.1259349286556244, + "block3_o_cos_v_neg_g": 0.1751680225133896, + "block3_mlp_win_v_norm": 0.3027603328227997, + "block3_mlp_win_cos_v_neg_g": 0.08403240144252777, + "block3_mlp_wout_v_norm": 0.2561022937297821, + "block3_mlp_wout_cos_v_neg_g": 0.23718707263469696, + "block7_q_v_norm": 0.1556297391653061, + "block7_q_cos_v_neg_g": 0.10505496710538864, + "block7_k_v_norm": 0.14561961591243744, + "block7_k_cos_v_neg_g": 0.2107471227645874, + "block7_v_v_norm": 0.1265895664691925, + "block7_v_cos_v_neg_g": 0.071421317756176, + "block7_o_v_norm": 0.14018772542476654, + "block7_o_cos_v_neg_g": 0.2292460799217224, + "block7_mlp_win_v_norm": 0.2965679168701172, + "block7_mlp_win_cos_v_neg_g": 0.11620090901851654, + "block7_mlp_wout_v_norm": 0.27136874198913574, + "block7_mlp_wout_cos_v_neg_g": 0.2381700575351715, + "block11_q_v_norm": 0.15608875453472137, + "block11_q_cos_v_neg_g": 0.1565430611371994, + "block11_k_v_norm": 0.14705026149749756, + "block11_k_cos_v_neg_g": 0.20835766196250916, + "block11_v_v_norm": 0.13282577693462372, + "block11_v_cos_v_neg_g": 0.10691125690937042, + "block11_o_v_norm": 0.15028007328510284, + "block11_o_cos_v_neg_g": 0.23521991074085236, + "block11_mlp_win_v_norm": 0.3120087683200836, + "block11_mlp_win_cos_v_neg_g": 0.15019692480564117, + "block11_mlp_wout_v_norm": 0.2772906720638275, + "block11_mlp_wout_cos_v_neg_g": 0.2043936550617218, + "embed_lm_head_sharpness": 0.00031579643837176263, + "layer_1_sharpness": 0.010405473411083221, + "layer_2_sharpness": 0.0010165874846279621, + "layer_3_sharpness": 0.0018306808779016137, + "layer_4_sharpness": 0.0017127097817137837, + "layer_5_sharpness": 0.0010450362460687757, + "layer_6_sharpness": 0.0014612177619710565, + "layer_7_sharpness": 0.0017137652030214667, + "layer_8_sharpness": 0.0026827766560018063, + "layer_9_sharpness": 0.002032651798799634, + "layer_10_sharpness": 0.0011803677771240473, + "layer_11_sharpness": 0.0013053861912339926, + "layer_12_sharpness": 0.004053718410432339, + "block0_q_sharpness": 0.001308213802985847, + "block0_k_sharpness": 0.0021175930742174387, + "block0_v_sharpness": 0.00655358424410224, + "block0_o_sharpness": 0.005457594990730286, + "block0_mlp_win_sharpness": 0.0019076557364314795, + "block0_mlp_wout_sharpness": 0.005059407092630863, + "block3_q_sharpness": 0.001370398560538888, + "block3_k_sharpness": 0.0006106335204094648, + "block3_v_sharpness": 0.0027870305348187685, + "block3_o_sharpness": 0.0011810563737526536, + "block3_mlp_win_sharpness": 0.00022329149942379445, + "block3_mlp_wout_sharpness": 0.0004647333989851177, + "block7_q_sharpness": 0.00018353384803049266, + "block7_k_sharpness": 0.0004963331739418209, + "block7_v_sharpness": 0.005754723679274321, + "block7_o_sharpness": 0.0006735234637744725, + "block7_mlp_win_sharpness": 0.0008471322362311184, + "block7_mlp_wout_sharpness": 0.0006266258424147964, + "block11_q_sharpness": 0.00011653680849121884, + "block11_k_sharpness": 0.0002638544829096645, + "block11_v_sharpness": 0.0008822344243526459, + "block11_o_sharpness": 0.00021568698866758496, + "block11_mlp_win_sharpness": 0.0011976789683103561, + "block11_mlp_wout_sharpness": 0.004348742309957743, + "sum_layer_numerators": 0.007113212815991468, + "block_diag_sharpness": 0.0024530472225681053, + "cross_layer_sharpness": 0.009466331080423284 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..5167635c26d80658f354173c99f58331df8ef19a --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_4500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1630237102508545, + "total_l1_linf_norm": 19155.0703125, + "total_spectral_norm": 2.1630241870880127, + "embed_lm_head_update_fnorm": 1.3208733797073364, + "embed_lm_head_max_l1_linf_norm": 0.3688089847564697, + "embed_lm_head_max_spectral_norm": 0.203680157661438, + "layer_1_update_fnorm": 0.46573352813720703, + "layer_1_max_l1_linf_norm": 0.549912691116333, + "layer_1_max_spectral_norm": 0.07601892948150635, + "layer_2_update_fnorm": 0.4818713665008545, + "layer_2_max_l1_linf_norm": 0.5603924989700317, + "layer_2_max_spectral_norm": 0.08029057830572128, + "layer_3_update_fnorm": 0.47701284289360046, + "layer_3_max_l1_linf_norm": 0.5925528407096863, + "layer_3_max_spectral_norm": 0.07921385765075684, + "layer_4_update_fnorm": 0.4804668426513672, + "layer_4_max_l1_linf_norm": 0.564979612827301, + "layer_4_max_spectral_norm": 0.07969415187835693, + "layer_5_update_fnorm": 0.47622859477996826, + "layer_5_max_l1_linf_norm": 0.4904418885707855, + "layer_5_max_spectral_norm": 0.06260882318019867, + "layer_6_update_fnorm": 0.48727741837501526, + "layer_6_max_l1_linf_norm": 0.6020174026489258, + "layer_6_max_spectral_norm": 0.07732421159744263, + "layer_7_update_fnorm": 0.49875181913375854, + "layer_7_max_l1_linf_norm": 0.579442024230957, + "layer_7_max_spectral_norm": 0.0671982392668724, + "layer_8_update_fnorm": 0.4961446225643158, + "layer_8_max_l1_linf_norm": 0.5411319732666016, + "layer_8_max_spectral_norm": 0.0640799030661583, + "layer_9_update_fnorm": 0.5088367462158203, + "layer_9_max_l1_linf_norm": 0.5413346290588379, + "layer_9_max_spectral_norm": 0.04928702116012573, + "layer_10_update_fnorm": 0.5216191411018372, + "layer_10_max_l1_linf_norm": 0.5390035510063171, + "layer_10_max_spectral_norm": 0.04584145173430443, + "layer_11_update_fnorm": 0.5259233713150024, + "layer_11_max_l1_linf_norm": 0.5567348003387451, + "layer_11_max_spectral_norm": 0.054109808057546616, + "layer_12_update_fnorm": 0.5096343755722046, + "layer_12_max_l1_linf_norm": 0.549290120601654, + "layer_12_max_spectral_norm": 0.06886279582977295, + "block0_q_update_fnorm": 0.13974106311798096, + "block0_q_max_l1_linf_norm": 0.21634544432163239, + "block0_q_max_spectral_norm": 0.06090151146054268, + "block0_k_update_fnorm": 0.13330480456352234, + "block0_k_max_l1_linf_norm": 0.22158528864383698, + "block0_k_max_spectral_norm": 0.0679624006152153, + "block0_v_update_fnorm": 0.13368192315101624, + "block0_v_max_l1_linf_norm": 0.16256782412528992, + "block0_v_max_spectral_norm": 0.04862101748585701, + "block0_o_update_fnorm": 0.13499696552753448, + "block0_o_max_l1_linf_norm": 0.14599823951721191, + "block0_o_max_spectral_norm": 0.03698178008198738, + "block0_mlp_win_update_fnorm": 0.2854249179363251, + "block0_mlp_win_max_l1_linf_norm": 0.161034494638443, + "block0_mlp_win_max_spectral_norm": 0.04676011577248573, + "block0_mlp_wout_update_fnorm": 0.24891676008701324, + "block0_mlp_wout_max_l1_linf_norm": 0.549912691116333, + "block0_mlp_wout_max_spectral_norm": 0.07601892948150635, + "block3_q_update_fnorm": 0.1468561589717865, + "block3_q_max_l1_linf_norm": 0.17131221294403076, + "block3_q_max_spectral_norm": 0.04760180413722992, + "block3_k_update_fnorm": 0.14097677171230316, + "block3_k_max_l1_linf_norm": 0.18000972270965576, + "block3_k_max_spectral_norm": 0.03133254125714302, + "block3_v_update_fnorm": 0.11991564929485321, + "block3_v_max_l1_linf_norm": 0.14680622518062592, + "block3_v_max_spectral_norm": 0.035339925438165665, + "block3_o_update_fnorm": 0.1253490000963211, + "block3_o_max_l1_linf_norm": 0.14411193132400513, + "block3_o_max_spectral_norm": 0.0406411774456501, + "block3_mlp_win_update_fnorm": 0.30648142099380493, + "block3_mlp_win_max_l1_linf_norm": 0.19755849242210388, + "block3_mlp_win_max_spectral_norm": 0.06647387146949768, + "block3_mlp_wout_update_fnorm": 0.25546392798423767, + "block3_mlp_wout_max_l1_linf_norm": 0.564979612827301, + "block3_mlp_wout_max_spectral_norm": 0.07969415187835693, + "block7_q_update_fnorm": 0.15861910581588745, + "block7_q_max_l1_linf_norm": 0.16597658395767212, + "block7_q_max_spectral_norm": 0.026829706504940987, + "block7_k_update_fnorm": 0.14854390919208527, + "block7_k_max_l1_linf_norm": 0.1854468435049057, + "block7_k_max_spectral_norm": 0.02636544778943062, + "block7_v_update_fnorm": 0.128518745303154, + "block7_v_max_l1_linf_norm": 0.13699957728385925, + "block7_v_max_spectral_norm": 0.028663504868745804, + "block7_o_update_fnorm": 0.13996577262878418, + "block7_o_max_l1_linf_norm": 0.13356095552444458, + "block7_o_max_spectral_norm": 0.02710537612438202, + "block7_mlp_win_update_fnorm": 0.3002946972846985, + "block7_mlp_win_max_l1_linf_norm": 0.1712038815021515, + "block7_mlp_win_max_spectral_norm": 0.048582177609205246, + "block7_mlp_wout_update_fnorm": 0.26932892203330994, + "block7_mlp_wout_max_l1_linf_norm": 0.5411319732666016, + "block7_mlp_wout_max_spectral_norm": 0.0640799030661583, + "block11_q_update_fnorm": 0.1576208621263504, + "block11_q_max_l1_linf_norm": 0.15837663412094116, + "block11_q_max_spectral_norm": 0.02809758670628071, + "block11_k_update_fnorm": 0.14819355309009552, + "block11_k_max_l1_linf_norm": 0.16710767149925232, + "block11_k_max_spectral_norm": 0.025322169065475464, + "block11_v_update_fnorm": 0.13295520842075348, + "block11_v_max_l1_linf_norm": 0.151439368724823, + "block11_v_max_spectral_norm": 0.034411802887916565, + "block11_o_update_fnorm": 0.14895014464855194, + "block11_o_max_l1_linf_norm": 0.15037208795547485, + "block11_o_max_spectral_norm": 0.0354679711163044, + "block11_mlp_win_update_fnorm": 0.3111463785171509, + "block11_mlp_win_max_l1_linf_norm": 0.16373887658119202, + "block11_mlp_win_max_spectral_norm": 0.05128566548228264, + "block11_mlp_wout_update_fnorm": 0.2759271264076233, + "block11_mlp_wout_max_l1_linf_norm": 0.549290120601654, + "block11_mlp_wout_max_spectral_norm": 0.06886279582977295, + "total_sharpness": 0.006840861402451992, + "block_total_sharpness": 0.01001746952533722, + "v_norm_block": 1.7128826379776, + "v_T_H_v_block": 0.02939092554152012, + "v_norm": 2.1630237102508545, + "ip_v_neg_g_hvp": 0.05905665457248688, + "cos_v_neg_g_hvp": 0.07851415127515793, + "g_hvp_norm": 0.3477439880371094, + "ip_v_neg_g_t": 0.05936138704419136, + "cos_v_neg_g_t": 0.09266722947359085, + "g_t_norm": 0.29615330696105957, + "g_norm": 0.3477439880371094, + "hv_norm": 0.43962904810905457, + "cos_v_hv": 0.033657800406217575, + "hg_norm": 4.025289058685303, + "cos_g_hg": 0.6122668981552124, + "v_parallel_norm": 0.009980416856706142, + "v_perp_norm": 2.1630008220672607, + "embed_lm_head_v_norm": 1.3208733797073364, + "embed_lm_head_cos_v_neg_g": 0.09303539991378784, + "layer_1_v_norm": 0.46573352813720703, + "layer_1_cos_v_neg_g": 0.15338069200515747, + "layer_2_v_norm": 0.4818713665008545, + "layer_2_cos_v_neg_g": 0.0757863000035286, + "layer_3_v_norm": 0.47701284289360046, + "layer_3_cos_v_neg_g": 0.06741957366466522, + "layer_4_v_norm": 0.4804668426513672, + "layer_4_cos_v_neg_g": 0.06432972848415375, + "layer_5_v_norm": 0.47622859477996826, + "layer_5_cos_v_neg_g": 0.05267370119690895, + "layer_6_v_norm": 0.48727741837501526, + "layer_6_cos_v_neg_g": 0.06745147705078125, + "layer_7_v_norm": 0.49875181913375854, + "layer_7_cos_v_neg_g": 0.07940326631069183, + "layer_8_v_norm": 0.4961446225643158, + "layer_8_cos_v_neg_g": 0.07406636327505112, + "layer_9_v_norm": 0.5088367462158203, + "layer_9_cos_v_neg_g": 0.07316368818283081, + "layer_10_v_norm": 0.5216191411018372, + "layer_10_cos_v_neg_g": 0.08765809237957001, + "layer_11_v_norm": 0.5259233713150024, + "layer_11_cos_v_neg_g": 0.10861340165138245, + "layer_12_v_norm": 0.5096343755722046, + "layer_12_cos_v_neg_g": 0.13825148344039917, + "block0_q_v_norm": 0.13974106311798096, + "block0_q_cos_v_neg_g": 0.19147363305091858, + "block0_k_v_norm": 0.13330480456352234, + "block0_k_cos_v_neg_g": 0.18478640913963318, + "block0_v_v_norm": 0.13368192315101624, + "block0_v_cos_v_neg_g": 0.22568410634994507, + "block0_o_v_norm": 0.13499696552753448, + "block0_o_cos_v_neg_g": 0.19632776081562042, + "block0_mlp_win_v_norm": 0.2854249179363251, + "block0_mlp_win_cos_v_neg_g": 0.13367228209972382, + "block0_mlp_wout_v_norm": 0.24891676008701324, + "block0_mlp_wout_cos_v_neg_g": 0.19004330039024353, + "block3_q_v_norm": 0.1468561589717865, + "block3_q_cos_v_neg_g": 0.07808811962604523, + "block3_k_v_norm": 0.14097677171230316, + "block3_k_cos_v_neg_g": 0.0811103880405426, + "block3_v_v_norm": 0.11991564929485321, + "block3_v_cos_v_neg_g": 0.04194766655564308, + "block3_o_v_norm": 0.1253490000963211, + "block3_o_cos_v_neg_g": 0.15369069576263428, + "block3_mlp_win_v_norm": 0.30648142099380493, + "block3_mlp_win_cos_v_neg_g": 0.07035030424594879, + "block3_mlp_wout_v_norm": 0.25546392798423767, + "block3_mlp_wout_cos_v_neg_g": 0.23502488434314728, + "block7_q_v_norm": 0.15861910581588745, + "block7_q_cos_v_neg_g": 0.0870501697063446, + "block7_k_v_norm": 0.14854390919208527, + "block7_k_cos_v_neg_g": 0.2073860466480255, + "block7_v_v_norm": 0.128518745303154, + "block7_v_cos_v_neg_g": 0.06198050081729889, + "block7_o_v_norm": 0.13996577262878418, + "block7_o_cos_v_neg_g": 0.22495760023593903, + "block7_mlp_win_v_norm": 0.3002946972846985, + "block7_mlp_win_cos_v_neg_g": 0.10423580557107925, + "block7_mlp_wout_v_norm": 0.26932892203330994, + "block7_mlp_wout_cos_v_neg_g": 0.2347613424062729, + "block11_q_v_norm": 0.1576208621263504, + "block11_q_cos_v_neg_g": 0.14317385852336884, + "block11_k_v_norm": 0.14819355309009552, + "block11_k_cos_v_neg_g": 0.19149121642112732, + "block11_v_v_norm": 0.13295520842075348, + "block11_v_cos_v_neg_g": 0.09675632417201996, + "block11_o_v_norm": 0.14895014464855194, + "block11_o_cos_v_neg_g": 0.22353674471378326, + "block11_mlp_win_v_norm": 0.3111463785171509, + "block11_mlp_win_cos_v_neg_g": 0.1337779313325882, + "block11_mlp_wout_v_norm": 0.2759271264076233, + "block11_mlp_wout_cos_v_neg_g": 0.18300606310367584, + "embed_lm_head_sharpness": 0.00031878877780400217, + "layer_1_sharpness": 0.008203072473406792, + "layer_2_sharpness": 0.00042694978765212, + "layer_3_sharpness": 0.0011529605835676193, + "layer_4_sharpness": 0.0013331009540706873, + "layer_5_sharpness": 0.0008571923826821148, + "layer_6_sharpness": 0.0012994806747883558, + "layer_7_sharpness": 0.0015337058575823903, + "layer_8_sharpness": 0.002604384906589985, + "layer_9_sharpness": 0.0019607527647167444, + "layer_10_sharpness": 0.001080856192857027, + "layer_11_sharpness": 0.0010926517425104976, + "layer_12_sharpness": 0.002192180138081312, + "block0_q_sharpness": 0.0011880126548931003, + "block0_k_sharpness": 0.0011828261194750667, + "block0_v_sharpness": 0.005682002753019333, + "block0_o_sharpness": 0.004553902428597212, + "block0_mlp_win_sharpness": 0.0013660205295309424, + "block0_mlp_wout_sharpness": 0.003564355196431279, + "block3_q_sharpness": 0.0012151176342740655, + "block3_k_sharpness": 0.0004132459289394319, + "block3_v_sharpness": 0.001961464760825038, + "block3_o_sharpness": 0.0008226872305385768, + "block3_mlp_win_sharpness": 0.0001766566128935665, + "block3_mlp_wout_sharpness": 0.00048193251132033765, + "block7_q_sharpness": 0.00014845003897789866, + "block7_k_sharpness": 0.0003861114091705531, + "block7_v_sharpness": 0.00512331910431385, + "block7_o_sharpness": 0.000544420734513551, + "block7_mlp_win_sharpness": 0.0008759239572100341, + "block7_mlp_wout_sharpness": 0.0006400732090696692, + "block11_q_sharpness": 0.00012326391879469156, + "block11_k_sharpness": 0.00025198381626978517, + "block11_v_sharpness": 0.00085197773296386, + "block11_o_sharpness": 0.00018496207485441118, + "block11_mlp_win_sharpness": 0.0007439568871632218, + "block11_mlp_wout_sharpness": 0.0020419915672391653, + "sum_layer_numerators": 0.005647445545318057, + "block_diag_sharpness": 0.001924849760477518, + "cross_layer_sharpness": 0.008092619764859702 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..74122d848f522d4acbef7af78148340cf568ae86 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.406083345413208, + "total_l1_linf_norm": 12253.9375, + "total_spectral_norm": 1.406083345413208, + "embed_lm_head_update_fnorm": 0.9606704711914062, + "embed_lm_head_max_l1_linf_norm": 0.2504149079322815, + "embed_lm_head_max_spectral_norm": 0.2883702218532562, + "layer_1_update_fnorm": 0.26890110969543457, + "layer_1_max_l1_linf_norm": 0.45081695914268494, + "layer_1_max_spectral_norm": 0.06864017993211746, + "layer_2_update_fnorm": 0.2715532183647156, + "layer_2_max_l1_linf_norm": 0.3364999294281006, + "layer_2_max_spectral_norm": 0.0586087703704834, + "layer_3_update_fnorm": 0.28082171082496643, + "layer_3_max_l1_linf_norm": 0.36240580677986145, + "layer_3_max_spectral_norm": 0.05854490026831627, + "layer_4_update_fnorm": 0.28363993763923645, + "layer_4_max_l1_linf_norm": 0.364271879196167, + "layer_4_max_spectral_norm": 0.06720840930938721, + "layer_5_update_fnorm": 0.28431400656700134, + "layer_5_max_l1_linf_norm": 0.3455794155597687, + "layer_5_max_spectral_norm": 0.07658275216817856, + "layer_6_update_fnorm": 0.29104673862457275, + "layer_6_max_l1_linf_norm": 0.39613789319992065, + "layer_6_max_spectral_norm": 0.07560689002275467, + "layer_7_update_fnorm": 0.3065149188041687, + "layer_7_max_l1_linf_norm": 0.3884482681751251, + "layer_7_max_spectral_norm": 0.07197824120521545, + "layer_8_update_fnorm": 0.31799471378326416, + "layer_8_max_l1_linf_norm": 0.397396057844162, + "layer_8_max_spectral_norm": 0.06822749227285385, + "layer_9_update_fnorm": 0.32132700085639954, + "layer_9_max_l1_linf_norm": 0.3851194977760315, + "layer_9_max_spectral_norm": 0.06416574120521545, + "layer_10_update_fnorm": 0.3153623938560486, + "layer_10_max_l1_linf_norm": 0.37556901574134827, + "layer_10_max_spectral_norm": 0.06522469967603683, + "layer_11_update_fnorm": 0.31131261587142944, + "layer_11_max_l1_linf_norm": 0.41649961471557617, + "layer_11_max_spectral_norm": 0.07292160391807556, + "layer_12_update_fnorm": 0.2976343631744385, + "layer_12_max_l1_linf_norm": 0.38136565685272217, + "layer_12_max_spectral_norm": 0.0913398340344429, + "block0_q_update_fnorm": 0.08258012682199478, + "block0_q_max_l1_linf_norm": 0.1485363245010376, + "block0_q_max_spectral_norm": 0.045797236263751984, + "block0_k_update_fnorm": 0.07807068526744843, + "block0_k_max_l1_linf_norm": 0.14690858125686646, + "block0_k_max_spectral_norm": 0.045739609748125076, + "block0_v_update_fnorm": 0.07007752358913422, + "block0_v_max_l1_linf_norm": 0.2288878858089447, + "block0_v_max_spectral_norm": 0.06033103168010712, + "block0_o_update_fnorm": 0.10261547565460205, + "block0_o_max_l1_linf_norm": 0.15173788368701935, + "block0_o_max_spectral_norm": 0.06269384175539017, + "block0_mlp_win_update_fnorm": 0.15274332463741302, + "block0_mlp_win_max_l1_linf_norm": 0.11914839595556259, + "block0_mlp_win_max_spectral_norm": 0.056795649230480194, + "block0_mlp_wout_update_fnorm": 0.14296142756938934, + "block0_mlp_wout_max_l1_linf_norm": 0.45081695914268494, + "block0_mlp_wout_max_spectral_norm": 0.06864017993211746, + "block3_q_update_fnorm": 0.08719604462385178, + "block3_q_max_l1_linf_norm": 0.1396242082118988, + "block3_q_max_spectral_norm": 0.04200838878750801, + "block3_k_update_fnorm": 0.07507337629795074, + "block3_k_max_l1_linf_norm": 0.1570844054222107, + "block3_k_max_spectral_norm": 0.03726020082831383, + "block3_v_update_fnorm": 0.08123419433832169, + "block3_v_max_l1_linf_norm": 0.13926373422145844, + "block3_v_max_spectral_norm": 0.04293172061443329, + "block3_o_update_fnorm": 0.09970416873693466, + "block3_o_max_l1_linf_norm": 0.14270025491714478, + "block3_o_max_spectral_norm": 0.05169135332107544, + "block3_mlp_win_update_fnorm": 0.16266626119613647, + "block3_mlp_win_max_l1_linf_norm": 0.12135317921638489, + "block3_mlp_win_max_spectral_norm": 0.06720840930938721, + "block3_mlp_wout_update_fnorm": 0.15548597276210785, + "block3_mlp_wout_max_l1_linf_norm": 0.364271879196167, + "block3_mlp_wout_max_spectral_norm": 0.055893849581480026, + "block7_q_update_fnorm": 0.09988243132829666, + "block7_q_max_l1_linf_norm": 0.15141046047210693, + "block7_q_max_spectral_norm": 0.04109812527894974, + "block7_k_update_fnorm": 0.08432731032371521, + "block7_k_max_l1_linf_norm": 0.1288008987903595, + "block7_k_max_spectral_norm": 0.029304323717951775, + "block7_v_update_fnorm": 0.0913560688495636, + "block7_v_max_l1_linf_norm": 0.12082895636558533, + "block7_v_max_spectral_norm": 0.04578742757439613, + "block7_o_update_fnorm": 0.1192842647433281, + "block7_o_max_l1_linf_norm": 0.14028549194335938, + "block7_o_max_spectral_norm": 0.05591658130288124, + "block7_mlp_win_update_fnorm": 0.18002794682979584, + "block7_mlp_win_max_l1_linf_norm": 0.13143624365329742, + "block7_mlp_win_max_spectral_norm": 0.06822749227285385, + "block7_mlp_wout_update_fnorm": 0.170295849442482, + "block7_mlp_wout_max_l1_linf_norm": 0.397396057844162, + "block7_mlp_wout_max_spectral_norm": 0.055929794907569885, + "block11_q_update_fnorm": 0.08599437028169632, + "block11_q_max_l1_linf_norm": 0.15291263163089752, + "block11_q_max_spectral_norm": 0.04175233468413353, + "block11_k_update_fnorm": 0.07199707627296448, + "block11_k_max_l1_linf_norm": 0.13677650690078735, + "block11_k_max_spectral_norm": 0.027340615168213844, + "block11_v_update_fnorm": 0.08759894967079163, + "block11_v_max_l1_linf_norm": 0.11154720187187195, + "block11_v_max_spectral_norm": 0.04946506395936012, + "block11_o_update_fnorm": 0.11874186992645264, + "block11_o_max_l1_linf_norm": 0.14394447207450867, + "block11_o_max_spectral_norm": 0.06655673682689667, + "block11_mlp_win_update_fnorm": 0.15836834907531738, + "block11_mlp_win_max_l1_linf_norm": 0.1282804310321808, + "block11_mlp_win_max_spectral_norm": 0.07992061227560043, + "block11_mlp_wout_update_fnorm": 0.17066293954849243, + "block11_mlp_wout_max_l1_linf_norm": 0.38136565685272217, + "block11_mlp_wout_max_spectral_norm": 0.0913398340344429, + "total_sharpness": 0.18090024590492249, + "block_total_sharpness": 0.32310494780540466, + "v_norm_block": 1.0267337560653687, + "v_T_H_v_block": 0.3406115174293518, + "v_norm": 1.406083345413208, + "ip_v_neg_g_hvp": 0.17860978841781616, + "cos_v_neg_g_hvp": 0.16244643926620483, + "g_hvp_norm": 0.781959056854248, + "ip_v_neg_g_t": 0.17850156128406525, + "cos_v_neg_g_t": 0.16938795149326324, + "g_t_norm": 0.749459981918335, + "g_norm": 0.781959056854248, + "hv_norm": 1.6825518608093262, + "cos_v_hv": 0.1511756330728531, + "hg_norm": 10.556232452392578, + "cos_g_hg": 0.8222600817680359, + "v_parallel_norm": 0.01316953543573618, + "v_perp_norm": 1.4060217142105103, + "embed_lm_head_v_norm": 0.9606704711914062, + "embed_lm_head_cos_v_neg_g": 0.0688190683722496, + "layer_1_v_norm": 0.26890110969543457, + "layer_1_cos_v_neg_g": 0.2877426743507385, + "layer_2_v_norm": 0.2715532183647156, + "layer_2_cos_v_neg_g": 0.30483338236808777, + "layer_3_v_norm": 0.28082171082496643, + "layer_3_cos_v_neg_g": 0.298144668340683, + "layer_4_v_norm": 0.28363993763923645, + "layer_4_cos_v_neg_g": 0.3013460636138916, + "layer_5_v_norm": 0.28431400656700134, + "layer_5_cos_v_neg_g": 0.29431456327438354, + "layer_6_v_norm": 0.29104673862457275, + "layer_6_cos_v_neg_g": 0.28886035084724426, + "layer_7_v_norm": 0.3065149188041687, + "layer_7_cos_v_neg_g": 0.31050199270248413, + "layer_8_v_norm": 0.31799471378326416, + "layer_8_cos_v_neg_g": 0.33070671558380127, + "layer_9_v_norm": 0.32132700085639954, + "layer_9_cos_v_neg_g": 0.27125808596611023, + "layer_10_v_norm": 0.3153623938560486, + "layer_10_cos_v_neg_g": 0.2840331196784973, + "layer_11_v_norm": 0.31131261587142944, + "layer_11_cos_v_neg_g": 0.2680991291999817, + "layer_12_v_norm": 0.2976343631744385, + "layer_12_cos_v_neg_g": 0.2529275417327881, + "block0_q_v_norm": 0.08258012682199478, + "block0_q_cos_v_neg_g": 0.3608741760253906, + "block0_k_v_norm": 0.07807068526744843, + "block0_k_cos_v_neg_g": 0.2757096588611603, + "block0_v_v_norm": 0.07007752358913422, + "block0_v_cos_v_neg_g": 0.34940341114997864, + "block0_o_v_norm": 0.10261547565460205, + "block0_o_cos_v_neg_g": 0.47141629457473755, + "block0_mlp_win_v_norm": 0.15274332463741302, + "block0_mlp_win_cos_v_neg_g": 0.4175340235233307, + "block0_mlp_wout_v_norm": 0.14296142756938934, + "block0_mlp_wout_cos_v_neg_g": 0.4217154383659363, + "block3_q_v_norm": 0.08719604462385178, + "block3_q_cos_v_neg_g": 0.26580533385276794, + "block3_k_v_norm": 0.07507337629795074, + "block3_k_cos_v_neg_g": 0.13858984410762787, + "block3_v_v_norm": 0.08123419433832169, + "block3_v_cos_v_neg_g": 0.24606497585773468, + "block3_o_v_norm": 0.09970416873693466, + "block3_o_cos_v_neg_g": 0.30422890186309814, + "block3_mlp_win_v_norm": 0.16266626119613647, + "block3_mlp_win_cos_v_neg_g": 0.3550291359424591, + "block3_mlp_wout_v_norm": 0.15548597276210785, + "block3_mlp_wout_cos_v_neg_g": 0.388791024684906, + "block7_q_v_norm": 0.09988243132829666, + "block7_q_cos_v_neg_g": 0.38597825169563293, + "block7_k_v_norm": 0.08432731032371521, + "block7_k_cos_v_neg_g": 0.363587886095047, + "block7_v_v_norm": 0.0913560688495636, + "block7_v_cos_v_neg_g": 0.33250874280929565, + "block7_o_v_norm": 0.1192842647433281, + "block7_o_cos_v_neg_g": 0.36796316504478455, + "block7_mlp_win_v_norm": 0.18002794682979584, + "block7_mlp_win_cos_v_neg_g": 0.3679488003253937, + "block7_mlp_wout_v_norm": 0.170295849442482, + "block7_mlp_wout_cos_v_neg_g": 0.37384191155433655, + "block11_q_v_norm": 0.08599437028169632, + "block11_q_cos_v_neg_g": 0.34503358602523804, + "block11_k_v_norm": 0.07199707627296448, + "block11_k_cos_v_neg_g": 0.2710370123386383, + "block11_v_v_norm": 0.08759894967079163, + "block11_v_cos_v_neg_g": 0.2689424157142639, + "block11_o_v_norm": 0.11874186992645264, + "block11_o_cos_v_neg_g": 0.29511719942092896, + "block11_mlp_win_v_norm": 0.15836834907531738, + "block11_mlp_win_cos_v_neg_g": 0.23209905624389648, + "block11_mlp_wout_v_norm": 0.17066293954849243, + "block11_mlp_wout_cos_v_neg_g": 0.28289881348609924, + "embed_lm_head_sharpness": 0.0010991395683959126, + "layer_1_sharpness": 0.5097613334655762, + "layer_2_sharpness": 0.05143611878156662, + "layer_3_sharpness": 0.04100409150123596, + "layer_4_sharpness": 0.02674025483429432, + "layer_5_sharpness": 0.02743702195584774, + "layer_6_sharpness": 0.02257724665105343, + "layer_7_sharpness": 0.0247501228004694, + "layer_8_sharpness": 0.01681053638458252, + "layer_9_sharpness": 0.013778877444565296, + "layer_10_sharpness": 0.007420549634844065, + "layer_11_sharpness": 0.007804579101502895, + "layer_12_sharpness": 0.020046954974532127, + "block0_q_sharpness": 0.03571022301912308, + "block0_k_sharpness": 0.03475601598620415, + "block0_v_sharpness": 0.7057982087135315, + "block0_o_sharpness": 0.35735005140304565, + "block0_mlp_win_sharpness": 0.0430256687104702, + "block0_mlp_wout_sharpness": 0.14724081754684448, + "block3_q_sharpness": 0.00109605782199651, + "block3_k_sharpness": 0.0005366278346627951, + "block3_v_sharpness": 0.009926323778927326, + "block3_o_sharpness": 0.011461907997727394, + "block3_mlp_win_sharpness": 0.004505265038460493, + "block3_mlp_wout_sharpness": 0.018575195223093033, + "block7_q_sharpness": 0.001652053790166974, + "block7_k_sharpness": 0.024681275710463524, + "block7_v_sharpness": 0.008017984218895435, + "block7_o_sharpness": 0.004606681875884533, + "block7_mlp_win_sharpness": 0.0031478849705308676, + "block7_mlp_wout_sharpness": 0.007008287124335766, + "block11_q_sharpness": 0.0007070419960655272, + "block11_k_sharpness": 0.0019282258581370115, + "block11_v_sharpness": 0.008916283026337624, + "block11_o_sharpness": 0.002199260052293539, + "block11_mlp_win_sharpness": 0.018558749929070473, + "block11_mlp_wout_sharpness": 0.007048012223094702, + "sum_layer_numerators": 0.05888608303868121, + "block_diag_sharpness": 0.05585949251678956, + "cross_layer_sharpness": 0.2672454552886151 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..5ba8c958d8469cf20c69a4f94e31b8de111b88b2 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.201107978820801, + "total_l1_linf_norm": 19531.6796875, + "total_spectral_norm": 2.20110821723938, + "embed_lm_head_update_fnorm": 1.328104853630066, + "embed_lm_head_max_l1_linf_norm": 0.3372075855731964, + "embed_lm_head_max_spectral_norm": 0.2099251002073288, + "layer_1_update_fnorm": 0.4794049859046936, + "layer_1_max_l1_linf_norm": 0.5781557559967041, + "layer_1_max_spectral_norm": 0.08027706295251846, + "layer_2_update_fnorm": 0.4955706298351288, + "layer_2_max_l1_linf_norm": 0.5840539336204529, + "layer_2_max_spectral_norm": 0.0771515890955925, + "layer_3_update_fnorm": 0.4886876046657562, + "layer_3_max_l1_linf_norm": 0.6223211288452148, + "layer_3_max_spectral_norm": 0.08107662200927734, + "layer_4_update_fnorm": 0.4932505786418915, + "layer_4_max_l1_linf_norm": 0.7109434008598328, + "layer_4_max_spectral_norm": 0.08457495272159576, + "layer_5_update_fnorm": 0.4903993606567383, + "layer_5_max_l1_linf_norm": 0.5945761203765869, + "layer_5_max_spectral_norm": 0.06842552125453949, + "layer_6_update_fnorm": 0.5002065300941467, + "layer_6_max_l1_linf_norm": 0.6540209054946899, + "layer_6_max_spectral_norm": 0.07973147928714752, + "layer_7_update_fnorm": 0.5072349309921265, + "layer_7_max_l1_linf_norm": 0.5636236667633057, + "layer_7_max_spectral_norm": 0.07277638465166092, + "layer_8_update_fnorm": 0.5038881897926331, + "layer_8_max_l1_linf_norm": 0.5426750183105469, + "layer_8_max_spectral_norm": 0.06427062302827835, + "layer_9_update_fnorm": 0.51873779296875, + "layer_9_max_l1_linf_norm": 0.5246073007583618, + "layer_9_max_spectral_norm": 0.05227011814713478, + "layer_10_update_fnorm": 0.534620463848114, + "layer_10_max_l1_linf_norm": 0.5384043455123901, + "layer_10_max_spectral_norm": 0.045172348618507385, + "layer_11_update_fnorm": 0.5384992361068726, + "layer_11_max_l1_linf_norm": 0.5446304082870483, + "layer_11_max_spectral_norm": 0.05062002316117287, + "layer_12_update_fnorm": 0.5260285139083862, + "layer_12_max_l1_linf_norm": 0.5518191456794739, + "layer_12_max_spectral_norm": 0.07454878836870193, + "block0_q_update_fnorm": 0.1442253589630127, + "block0_q_max_l1_linf_norm": 0.2435019165277481, + "block0_q_max_spectral_norm": 0.06394221633672714, + "block0_k_update_fnorm": 0.13745450973510742, + "block0_k_max_l1_linf_norm": 0.24098768830299377, + "block0_k_max_spectral_norm": 0.06724506616592407, + "block0_v_update_fnorm": 0.1385350078344345, + "block0_v_max_l1_linf_norm": 0.1743948608636856, + "block0_v_max_spectral_norm": 0.048826783895492554, + "block0_o_update_fnorm": 0.14223887026309967, + "block0_o_max_l1_linf_norm": 0.14702273905277252, + "block0_o_max_spectral_norm": 0.036526333540678024, + "block0_mlp_win_update_fnorm": 0.2925199866294861, + "block0_mlp_win_max_l1_linf_norm": 0.15342561900615692, + "block0_mlp_win_max_spectral_norm": 0.046893492341041565, + "block0_mlp_wout_update_fnorm": 0.25504565238952637, + "block0_mlp_wout_max_l1_linf_norm": 0.5781557559967041, + "block0_mlp_wout_max_spectral_norm": 0.08027706295251846, + "block3_q_update_fnorm": 0.15160182118415833, + "block3_q_max_l1_linf_norm": 0.18071824312210083, + "block3_q_max_spectral_norm": 0.05632444843649864, + "block3_k_update_fnorm": 0.14518645405769348, + "block3_k_max_l1_linf_norm": 0.20809294283390045, + "block3_k_max_spectral_norm": 0.03459726274013519, + "block3_v_update_fnorm": 0.12621724605560303, + "block3_v_max_l1_linf_norm": 0.18229909241199493, + "block3_v_max_spectral_norm": 0.03887701407074928, + "block3_o_update_fnorm": 0.13260149955749512, + "block3_o_max_l1_linf_norm": 0.15192559361457825, + "block3_o_max_spectral_norm": 0.045000866055488586, + "block3_mlp_win_update_fnorm": 0.3109701871871948, + "block3_mlp_win_max_l1_linf_norm": 0.2073397934436798, + "block3_mlp_win_max_spectral_norm": 0.0684075802564621, + "block3_mlp_wout_update_fnorm": 0.2624664306640625, + "block3_mlp_wout_max_l1_linf_norm": 0.7109434008598328, + "block3_mlp_wout_max_spectral_norm": 0.08457495272159576, + "block7_q_update_fnorm": 0.16134323179721832, + "block7_q_max_l1_linf_norm": 0.16056427359580994, + "block7_q_max_spectral_norm": 0.02785990573465824, + "block7_k_update_fnorm": 0.1541355550289154, + "block7_k_max_l1_linf_norm": 0.17418348789215088, + "block7_k_max_spectral_norm": 0.02854670025408268, + "block7_v_update_fnorm": 0.13204926252365112, + "block7_v_max_l1_linf_norm": 0.13747836649417877, + "block7_v_max_spectral_norm": 0.03145381063222885, + "block7_o_update_fnorm": 0.1440756469964981, + "block7_o_max_l1_linf_norm": 0.13990266621112823, + "block7_o_max_spectral_norm": 0.03019564412534237, + "block7_mlp_win_update_fnorm": 0.3042035400867462, + "block7_mlp_win_max_l1_linf_norm": 0.1715599000453949, + "block7_mlp_win_max_spectral_norm": 0.046146079897880554, + "block7_mlp_wout_update_fnorm": 0.2706642150878906, + "block7_mlp_wout_max_l1_linf_norm": 0.5426750183105469, + "block7_mlp_wout_max_spectral_norm": 0.06427062302827835, + "block11_q_update_fnorm": 0.16272933781147003, + "block11_q_max_l1_linf_norm": 0.17370522022247314, + "block11_q_max_spectral_norm": 0.03156709671020508, + "block11_k_update_fnorm": 0.1552216112613678, + "block11_k_max_l1_linf_norm": 0.20499363541603088, + "block11_k_max_spectral_norm": 0.030452696606516838, + "block11_v_update_fnorm": 0.13869331777095795, + "block11_v_max_l1_linf_norm": 0.16072455048561096, + "block11_v_max_spectral_norm": 0.03474053367972374, + "block11_o_update_fnorm": 0.1544833779335022, + "block11_o_max_l1_linf_norm": 0.15634600818157196, + "block11_o_max_spectral_norm": 0.03683887794613838, + "block11_mlp_win_update_fnorm": 0.3198711574077606, + "block11_mlp_win_max_l1_linf_norm": 0.18225356936454773, + "block11_mlp_win_max_spectral_norm": 0.05201280117034912, + "block11_mlp_wout_update_fnorm": 0.28389421105384827, + "block11_mlp_wout_max_l1_linf_norm": 0.5518191456794739, + "block11_mlp_wout_max_spectral_norm": 0.07454878836870193, + "total_sharpness": 0.007554301992058754, + "block_total_sharpness": 0.011030035093426704, + "v_norm_block": 1.755281925201416, + "v_T_H_v_block": 0.03398369997739792, + "v_norm": 2.201107978820801, + "ip_v_neg_g_hvp": 0.06210790574550629, + "cos_v_neg_g_hvp": 0.0717826634645462, + "g_hvp_norm": 0.3930845558643341, + "ip_v_neg_g_t": 0.062309298664331436, + "cos_v_neg_g_t": 0.08215919882059097, + "g_t_norm": 0.3445524275302887, + "g_norm": 0.3930845558643341, + "hv_norm": 0.5007055401802063, + "cos_v_hv": 0.03320881351828575, + "hg_norm": 5.138421058654785, + "cos_g_hg": 0.7297182679176331, + "v_parallel_norm": 0.007512002717703581, + "v_perp_norm": 2.2010953426361084, + "embed_lm_head_v_norm": 1.328104853630066, + "embed_lm_head_cos_v_neg_g": 0.09342417120933533, + "layer_1_v_norm": 0.4794049859046936, + "layer_1_cos_v_neg_g": 0.14802616834640503, + "layer_2_v_norm": 0.4955706298351288, + "layer_2_cos_v_neg_g": 0.064899742603302, + "layer_3_v_norm": 0.4886876046657562, + "layer_3_cos_v_neg_g": 0.0550314299762249, + "layer_4_v_norm": 0.4932505786418915, + "layer_4_cos_v_neg_g": 0.05986711010336876, + "layer_5_v_norm": 0.4903993606567383, + "layer_5_cos_v_neg_g": 0.05164264142513275, + "layer_6_v_norm": 0.500206470489502, + "layer_6_cos_v_neg_g": 0.06747474521398544, + "layer_7_v_norm": 0.5072349309921265, + "layer_7_cos_v_neg_g": 0.07632491737604141, + "layer_8_v_norm": 0.5038881897926331, + "layer_8_cos_v_neg_g": 0.0679815262556076, + "layer_9_v_norm": 0.51873779296875, + "layer_9_cos_v_neg_g": 0.06480924785137177, + "layer_10_v_norm": 0.534620463848114, + "layer_10_cos_v_neg_g": 0.07920168340206146, + "layer_11_v_norm": 0.5384992361068726, + "layer_11_cos_v_neg_g": 0.1018185243010521, + "layer_12_v_norm": 0.5260285139083862, + "layer_12_cos_v_neg_g": 0.14679066836833954, + "block0_q_v_norm": 0.1442253589630127, + "block0_q_cos_v_neg_g": 0.1629042625427246, + "block0_k_v_norm": 0.13745450973510742, + "block0_k_cos_v_neg_g": 0.15010856091976166, + "block0_v_v_norm": 0.1385350078344345, + "block0_v_cos_v_neg_g": 0.22478429973125458, + "block0_o_v_norm": 0.14223887026309967, + "block0_o_cos_v_neg_g": 0.17758116126060486, + "block0_mlp_win_v_norm": 0.2925199866294861, + "block0_mlp_win_cos_v_neg_g": 0.12433000653982162, + "block0_mlp_wout_v_norm": 0.25504565238952637, + "block0_mlp_wout_cos_v_neg_g": 0.18809878826141357, + "block3_q_v_norm": 0.15160182118415833, + "block3_q_cos_v_neg_g": 0.08805999904870987, + "block3_k_v_norm": 0.14518645405769348, + "block3_k_cos_v_neg_g": 0.04562701657414436, + "block3_v_v_norm": 0.12621724605560303, + "block3_v_cos_v_neg_g": 0.043027110397815704, + "block3_o_v_norm": 0.13260149955749512, + "block3_o_cos_v_neg_g": 0.1593555212020874, + "block3_mlp_win_v_norm": 0.3109701871871948, + "block3_mlp_win_cos_v_neg_g": 0.06768722087144852, + "block3_mlp_wout_v_norm": 0.2624664306640625, + "block3_mlp_wout_cos_v_neg_g": 0.23044772446155548, + "block7_q_v_norm": 0.16134323179721832, + "block7_q_cos_v_neg_g": 0.08907105028629303, + "block7_k_v_norm": 0.1541355550289154, + "block7_k_cos_v_neg_g": 0.21675613522529602, + "block7_v_v_norm": 0.13204926252365112, + "block7_v_cos_v_neg_g": 0.060315947979688644, + "block7_o_v_norm": 0.1440756469964981, + "block7_o_cos_v_neg_g": 0.23376408219337463, + "block7_mlp_win_v_norm": 0.3042035400867462, + "block7_mlp_win_cos_v_neg_g": 0.09159854799509048, + "block7_mlp_wout_v_norm": 0.2706642150878906, + "block7_mlp_wout_cos_v_neg_g": 0.2444722205400467, + "block11_q_v_norm": 0.16272933781147003, + "block11_q_cos_v_neg_g": 0.12474305927753448, + "block11_k_v_norm": 0.1552216112613678, + "block11_k_cos_v_neg_g": 0.19548407196998596, + "block11_v_v_norm": 0.13869331777095795, + "block11_v_cos_v_neg_g": 0.08553437888622284, + "block11_o_v_norm": 0.1544833779335022, + "block11_o_cos_v_neg_g": 0.22154510021209717, + "block11_mlp_win_v_norm": 0.3198711574077606, + "block11_mlp_win_cos_v_neg_g": 0.15464811027050018, + "block11_mlp_wout_v_norm": 0.28389421105384827, + "block11_mlp_wout_cos_v_neg_g": 0.20307223498821259, + "embed_lm_head_sharpness": 0.00031918869353830814, + "layer_1_sharpness": 0.006875826511532068, + "layer_2_sharpness": 0.0006305041024461389, + "layer_3_sharpness": 0.0017716923030093312, + "layer_4_sharpness": 0.0015939460135996342, + "layer_5_sharpness": 0.0013073775917291641, + "layer_6_sharpness": 0.0018503231694921851, + "layer_7_sharpness": 0.0018792074406519532, + "layer_8_sharpness": 0.002974311588332057, + "layer_9_sharpness": 0.002361688297241926, + "layer_10_sharpness": 0.0011398412752896547, + "layer_11_sharpness": 0.000974810856860131, + "layer_12_sharpness": 0.0019956761971116066, + "block0_q_sharpness": 0.0011384326498955488, + "block0_k_sharpness": 0.001209078705869615, + "block0_v_sharpness": 0.004990477114915848, + "block0_o_sharpness": 0.003702624002471566, + "block0_mlp_win_sharpness": 0.001322033815085888, + "block0_mlp_wout_sharpness": 0.0034920801408588886, + "block3_q_sharpness": 0.0019115698523819447, + "block3_k_sharpness": 0.0005050231120549142, + "block3_v_sharpness": 0.0022807202767580748, + "block3_o_sharpness": 0.0008178033749572933, + "block3_mlp_win_sharpness": 0.00021062047744635493, + "block3_mlp_wout_sharpness": 0.00042059089173562825, + "block7_q_sharpness": 0.0001743703178362921, + "block7_k_sharpness": 0.0004384537460282445, + "block7_v_sharpness": 0.0059341853484511375, + "block7_o_sharpness": 0.0006205580430105329, + "block7_mlp_win_sharpness": 0.000896753161214292, + "block7_mlp_wout_sharpness": 0.0006580126355402172, + "block11_q_sharpness": 0.00010573299368843436, + "block11_k_sharpness": 0.00022722926223650575, + "block11_v_sharpness": 0.0007380423485301435, + "block11_o_sharpness": 0.00016433697601314634, + "block11_mlp_win_sharpness": 0.0006872417288832366, + "block11_mlp_wout_sharpness": 0.0019073631847277284, + "sum_layer_numerators": 0.006358264493903218, + "block_diag_sharpness": 0.0020636917519550033, + "cross_layer_sharpness": 0.008966343341471701 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..b90a1d75edc4612f08c5d56f175285d30ed3fea3 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_5500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2145795822143555, + "total_l1_linf_norm": 19696.341796875, + "total_spectral_norm": 2.2145793437957764, + "embed_lm_head_update_fnorm": 1.3291919231414795, + "embed_lm_head_max_l1_linf_norm": 0.3532475531101227, + "embed_lm_head_max_spectral_norm": 0.2071266621351242, + "layer_1_update_fnorm": 0.4936727285385132, + "layer_1_max_l1_linf_norm": 0.6329169273376465, + "layer_1_max_spectral_norm": 0.08385691046714783, + "layer_2_update_fnorm": 0.5024579167366028, + "layer_2_max_l1_linf_norm": 0.5874888300895691, + "layer_2_max_spectral_norm": 0.08051186800003052, + "layer_3_update_fnorm": 0.5016084313392639, + "layer_3_max_l1_linf_norm": 0.6499385237693787, + "layer_3_max_spectral_norm": 0.09443988651037216, + "layer_4_update_fnorm": 0.50273597240448, + "layer_4_max_l1_linf_norm": 0.6562386155128479, + "layer_4_max_spectral_norm": 0.09704360365867615, + "layer_5_update_fnorm": 0.5053119659423828, + "layer_5_max_l1_linf_norm": 0.5693289041519165, + "layer_5_max_spectral_norm": 0.0766521468758583, + "layer_6_update_fnorm": 0.5024325251579285, + "layer_6_max_l1_linf_norm": 0.6226441860198975, + "layer_6_max_spectral_norm": 0.08436759561300278, + "layer_7_update_fnorm": 0.5096402764320374, + "layer_7_max_l1_linf_norm": 0.6254869103431702, + "layer_7_max_spectral_norm": 0.07821819186210632, + "layer_8_update_fnorm": 0.5055093169212341, + "layer_8_max_l1_linf_norm": 0.5571299195289612, + "layer_8_max_spectral_norm": 0.07512018084526062, + "layer_9_update_fnorm": 0.5174412727355957, + "layer_9_max_l1_linf_norm": 0.5459593534469604, + "layer_9_max_spectral_norm": 0.06060987338423729, + "layer_10_update_fnorm": 0.5326250791549683, + "layer_10_max_l1_linf_norm": 0.5427941083908081, + "layer_10_max_spectral_norm": 0.046584974974393845, + "layer_11_update_fnorm": 0.5378103256225586, + "layer_11_max_l1_linf_norm": 0.5569823980331421, + "layer_11_max_spectral_norm": 0.052896615117788315, + "layer_12_update_fnorm": 0.5228316783905029, + "layer_12_max_l1_linf_norm": 0.5729019641876221, + "layer_12_max_spectral_norm": 0.06728851795196533, + "block0_q_update_fnorm": 0.15342950820922852, + "block0_q_max_l1_linf_norm": 0.22882679104804993, + "block0_q_max_spectral_norm": 0.06501273065805435, + "block0_k_update_fnorm": 0.15009832382202148, + "block0_k_max_l1_linf_norm": 0.22791126370429993, + "block0_k_max_spectral_norm": 0.07599752396345139, + "block0_v_update_fnorm": 0.14352844655513763, + "block0_v_max_l1_linf_norm": 0.2015545666217804, + "block0_v_max_spectral_norm": 0.05126873776316643, + "block0_o_update_fnorm": 0.1463894248008728, + "block0_o_max_l1_linf_norm": 0.15874767303466797, + "block0_o_max_spectral_norm": 0.03863886371254921, + "block0_mlp_win_update_fnorm": 0.2955491244792938, + "block0_mlp_win_max_l1_linf_norm": 0.16110043227672577, + "block0_mlp_win_max_spectral_norm": 0.0460377112030983, + "block0_mlp_wout_update_fnorm": 0.26105305552482605, + "block0_mlp_wout_max_l1_linf_norm": 0.6329169273376465, + "block0_mlp_wout_max_spectral_norm": 0.08385691046714783, + "block3_q_update_fnorm": 0.15288305282592773, + "block3_q_max_l1_linf_norm": 0.1730518639087677, + "block3_q_max_spectral_norm": 0.053586412221193314, + "block3_k_update_fnorm": 0.14890985190868378, + "block3_k_max_l1_linf_norm": 0.20429660379886627, + "block3_k_max_spectral_norm": 0.034362077713012695, + "block3_v_update_fnorm": 0.13341644406318665, + "block3_v_max_l1_linf_norm": 0.1769496351480484, + "block3_v_max_spectral_norm": 0.04769781604409218, + "block3_o_update_fnorm": 0.13799522817134857, + "block3_o_max_l1_linf_norm": 0.1597164124250412, + "block3_o_max_spectral_norm": 0.05242263525724411, + "block3_mlp_win_update_fnorm": 0.3134181499481201, + "block3_mlp_win_max_l1_linf_norm": 0.1912587583065033, + "block3_mlp_win_max_spectral_norm": 0.07262605428695679, + "block3_mlp_wout_update_fnorm": 0.2682792842388153, + "block3_mlp_wout_max_l1_linf_norm": 0.6562386155128479, + "block3_mlp_wout_max_spectral_norm": 0.09704360365867615, + "block7_q_update_fnorm": 0.1627318412065506, + "block7_q_max_l1_linf_norm": 0.1660347580909729, + "block7_q_max_spectral_norm": 0.028011735528707504, + "block7_k_update_fnorm": 0.15397417545318604, + "block7_k_max_l1_linf_norm": 0.1684720367193222, + "block7_k_max_spectral_norm": 0.025733163580298424, + "block7_v_update_fnorm": 0.1345202922821045, + "block7_v_max_l1_linf_norm": 0.14608731865882874, + "block7_v_max_spectral_norm": 0.033539511263370514, + "block7_o_update_fnorm": 0.14553838968276978, + "block7_o_max_l1_linf_norm": 0.14267997443675995, + "block7_o_max_spectral_norm": 0.031552504748106, + "block7_mlp_win_update_fnorm": 0.30371904373168945, + "block7_mlp_win_max_l1_linf_norm": 0.17814326286315918, + "block7_mlp_win_max_spectral_norm": 0.047079216688871384, + "block7_mlp_wout_update_fnorm": 0.27148881554603577, + "block7_mlp_wout_max_l1_linf_norm": 0.5571299195289612, + "block7_mlp_wout_max_spectral_norm": 0.07512018084526062, + "block11_q_update_fnorm": 0.15952813625335693, + "block11_q_max_l1_linf_norm": 0.15616066753864288, + "block11_q_max_spectral_norm": 0.02712566778063774, + "block11_k_update_fnorm": 0.15363368391990662, + "block11_k_max_l1_linf_norm": 0.19047243893146515, + "block11_k_max_spectral_norm": 0.026100993156433105, + "block11_v_update_fnorm": 0.14201486110687256, + "block11_v_max_l1_linf_norm": 0.17003321647644043, + "block11_v_max_spectral_norm": 0.035978127270936966, + "block11_o_update_fnorm": 0.15630248188972473, + "block11_o_max_l1_linf_norm": 0.15929216146469116, + "block11_o_max_spectral_norm": 0.040911827236413956, + "block11_mlp_win_update_fnorm": 0.31722983717918396, + "block11_mlp_win_max_l1_linf_norm": 0.17536768317222595, + "block11_mlp_win_max_spectral_norm": 0.04924710839986801, + "block11_mlp_wout_update_fnorm": 0.28096985816955566, + "block11_mlp_wout_max_l1_linf_norm": 0.5729019641876221, + "block11_mlp_wout_max_spectral_norm": 0.06728851795196533, + "total_sharpness": 0.0099570881575346, + "block_total_sharpness": 0.01467221137136221, + "v_norm_block": 1.7713301181793213, + "v_T_H_v_block": 0.046035684645175934, + "v_norm": 2.2145795822143555, + "ip_v_neg_g_hvp": 0.06489206105470657, + "cos_v_neg_g_hvp": 0.06959269940853119, + "g_hvp_norm": 0.42105284333229065, + "ip_v_neg_g_t": 0.0653086006641388, + "cos_v_neg_g_t": 0.07905667275190353, + "g_t_norm": 0.3730272352695465, + "g_norm": 0.42105284333229065, + "hv_norm": 0.709900975227356, + "cos_v_hv": 0.031061742454767227, + "hg_norm": 9.447190284729004, + "cos_g_hg": 0.4219956398010254, + "v_parallel_norm": 0.006882394663989544, + "v_perp_norm": 2.214568853378296, + "embed_lm_head_v_norm": 1.3291919231414795, + "embed_lm_head_cos_v_neg_g": 0.09111231565475464, + "layer_1_v_norm": 0.4936727285385132, + "layer_1_cos_v_neg_g": 0.15907599031925201, + "layer_2_v_norm": 0.5024579167366028, + "layer_2_cos_v_neg_g": 0.047044187784194946, + "layer_3_v_norm": 0.5016084313392639, + "layer_3_cos_v_neg_g": 0.056248001754283905, + "layer_4_v_norm": 0.50273597240448, + "layer_4_cos_v_neg_g": 0.05643627420067787, + "layer_5_v_norm": 0.5053119659423828, + "layer_5_cos_v_neg_g": 0.05344653129577637, + "layer_6_v_norm": 0.5024325251579285, + "layer_6_cos_v_neg_g": 0.06362764537334442, + "layer_7_v_norm": 0.5096402764320374, + "layer_7_cos_v_neg_g": 0.0738525465130806, + "layer_8_v_norm": 0.5055093169212341, + "layer_8_cos_v_neg_g": 0.0683327466249466, + "layer_9_v_norm": 0.5174412727355957, + "layer_9_cos_v_neg_g": 0.06710878759622574, + "layer_10_v_norm": 0.5326250791549683, + "layer_10_cos_v_neg_g": 0.07621659338474274, + "layer_11_v_norm": 0.5378103256225586, + "layer_11_cos_v_neg_g": 0.10196796804666519, + "layer_12_v_norm": 0.5228316783905029, + "layer_12_cos_v_neg_g": 0.1326381266117096, + "block0_q_v_norm": 0.15342950820922852, + "block0_q_cos_v_neg_g": 0.17158271372318268, + "block0_k_v_norm": 0.15009832382202148, + "block0_k_cos_v_neg_g": 0.16791534423828125, + "block0_v_v_norm": 0.14352844655513763, + "block0_v_cos_v_neg_g": 0.23885029554367065, + "block0_o_v_norm": 0.1463894248008728, + "block0_o_cos_v_neg_g": 0.17722241580486298, + "block0_mlp_win_v_norm": 0.2955491244792938, + "block0_mlp_win_cos_v_neg_g": 0.12159984558820724, + "block0_mlp_wout_v_norm": 0.26105305552482605, + "block0_mlp_wout_cos_v_neg_g": 0.20134930312633514, + "block3_q_v_norm": 0.15288305282592773, + "block3_q_cos_v_neg_g": 0.08078401535749435, + "block3_k_v_norm": 0.14890985190868378, + "block3_k_cos_v_neg_g": 0.039201028645038605, + "block3_v_v_norm": 0.13341644406318665, + "block3_v_cos_v_neg_g": 0.04760264232754707, + "block3_o_v_norm": 0.13799522817134857, + "block3_o_cos_v_neg_g": 0.1712060570716858, + "block3_mlp_win_v_norm": 0.3134181499481201, + "block3_mlp_win_cos_v_neg_g": 0.0667603611946106, + "block3_mlp_wout_v_norm": 0.2682792842388153, + "block3_mlp_wout_cos_v_neg_g": 0.2511485517024994, + "block7_q_v_norm": 0.1627318412065506, + "block7_q_cos_v_neg_g": 0.08502959460020065, + "block7_k_v_norm": 0.15397417545318604, + "block7_k_cos_v_neg_g": 0.21233801543712616, + "block7_v_v_norm": 0.1345202922821045, + "block7_v_cos_v_neg_g": 0.05839861184358597, + "block7_o_v_norm": 0.14553838968276978, + "block7_o_cos_v_neg_g": 0.22682636976242065, + "block7_mlp_win_v_norm": 0.30371904373168945, + "block7_mlp_win_cos_v_neg_g": 0.09319629520177841, + "block7_mlp_wout_v_norm": 0.27148881554603577, + "block7_mlp_wout_cos_v_neg_g": 0.22928713262081146, + "block11_q_v_norm": 0.15952813625335693, + "block11_q_cos_v_neg_g": 0.1295211911201477, + "block11_k_v_norm": 0.15363368391990662, + "block11_k_cos_v_neg_g": 0.1914232224225998, + "block11_v_v_norm": 0.14201486110687256, + "block11_v_cos_v_neg_g": 0.08291689306497574, + "block11_o_v_norm": 0.15630248188972473, + "block11_o_cos_v_neg_g": 0.22767433524131775, + "block11_mlp_win_v_norm": 0.31722983717918396, + "block11_mlp_win_cos_v_neg_g": 0.12961450219154358, + "block11_mlp_wout_v_norm": 0.28096985816955566, + "block11_mlp_wout_cos_v_neg_g": 0.17551690340042114, + "embed_lm_head_sharpness": 0.00031026246142573655, + "layer_1_sharpness": 0.010957836173474789, + "layer_2_sharpness": 0.0016664797440171242, + "layer_3_sharpness": 0.0068757012486457825, + "layer_4_sharpness": 0.002463758457452059, + "layer_5_sharpness": 0.0013681213604286313, + "layer_6_sharpness": 0.0014801750658079982, + "layer_7_sharpness": 0.0015919508878141642, + "layer_8_sharpness": 0.00296092894859612, + "layer_9_sharpness": 0.002169563202187419, + "layer_10_sharpness": 0.0011239846935495734, + "layer_11_sharpness": 0.0010520912474021316, + "layer_12_sharpness": 0.0021870704367756844, + "block0_q_sharpness": 0.0021714521571993828, + "block0_k_sharpness": 0.0022132317535579205, + "block0_v_sharpness": 0.005734540522098541, + "block0_o_sharpness": 0.004094009753316641, + "block0_mlp_win_sharpness": 0.001380337867885828, + "block0_mlp_wout_sharpness": 0.004108792170882225, + "block3_q_sharpness": 0.0017190886428579688, + "block3_k_sharpness": 0.0007508392445743084, + "block3_v_sharpness": 0.0044618272222578526, + "block3_o_sharpness": 0.001068020355887711, + "block3_mlp_win_sharpness": 0.00023539159155916423, + "block3_mlp_wout_sharpness": 0.0006245491094887257, + "block7_q_sharpness": 0.00011159609857713804, + "block7_k_sharpness": 0.00027551528182812035, + "block7_v_sharpness": 0.006016099825501442, + "block7_o_sharpness": 0.0005236585275270045, + "block7_mlp_win_sharpness": 0.0009438134147785604, + "block7_mlp_wout_sharpness": 0.0008317690808326006, + "block11_q_sharpness": 0.0001019299088511616, + "block11_k_sharpness": 0.0002442379482090473, + "block11_v_sharpness": 0.0007741409935988486, + "block11_o_sharpness": 0.00020101691188756377, + "block11_mlp_win_sharpness": 0.0006449070642702281, + "block11_mlp_wout_sharpness": 0.0023655558470636606, + "sum_layer_numerators": 0.009139000745281999, + "block_diag_sharpness": 0.002912726443502868, + "cross_layer_sharpness": 0.011759484927859342 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..24c34c6aa1d89882066f92bb9029219c69dbd8e8 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2073557376861572, + "total_l1_linf_norm": 19590.1328125, + "total_spectral_norm": 2.2073557376861572, + "embed_lm_head_update_fnorm": 1.3310381174087524, + "embed_lm_head_max_l1_linf_norm": 0.41608425974845886, + "embed_lm_head_max_spectral_norm": 0.20637241005897522, + "layer_1_update_fnorm": 0.4827719032764435, + "layer_1_max_l1_linf_norm": 0.6452490091323853, + "layer_1_max_spectral_norm": 0.08135103434324265, + "layer_2_update_fnorm": 0.4982633590698242, + "layer_2_max_l1_linf_norm": 0.6018134951591492, + "layer_2_max_spectral_norm": 0.07980848103761673, + "layer_3_update_fnorm": 0.49379563331604004, + "layer_3_max_l1_linf_norm": 0.606063723564148, + "layer_3_max_spectral_norm": 0.07984475791454315, + "layer_4_update_fnorm": 0.4977642297744751, + "layer_4_max_l1_linf_norm": 0.5871429443359375, + "layer_4_max_spectral_norm": 0.0867304801940918, + "layer_5_update_fnorm": 0.4917960464954376, + "layer_5_max_l1_linf_norm": 0.5169471502304077, + "layer_5_max_spectral_norm": 0.0700443685054779, + "layer_6_update_fnorm": 0.5023554563522339, + "layer_6_max_l1_linf_norm": 0.5830857753753662, + "layer_6_max_spectral_norm": 0.08386518061161041, + "layer_7_update_fnorm": 0.5093839764595032, + "layer_7_max_l1_linf_norm": 0.5760335922241211, + "layer_7_max_spectral_norm": 0.07676862925291061, + "layer_8_update_fnorm": 0.5048056244850159, + "layer_8_max_l1_linf_norm": 0.6084764003753662, + "layer_8_max_spectral_norm": 0.07234472036361694, + "layer_9_update_fnorm": 0.5181894898414612, + "layer_9_max_l1_linf_norm": 0.5457712411880493, + "layer_9_max_spectral_norm": 0.057687994092702866, + "layer_10_update_fnorm": 0.5319095849990845, + "layer_10_max_l1_linf_norm": 0.5554857850074768, + "layer_10_max_spectral_norm": 0.04619365558028221, + "layer_11_update_fnorm": 0.5371009111404419, + "layer_11_max_l1_linf_norm": 0.5699857473373413, + "layer_11_max_spectral_norm": 0.05639321357011795, + "layer_12_update_fnorm": 0.5285527110099792, + "layer_12_max_l1_linf_norm": 0.5745750665664673, + "layer_12_max_spectral_norm": 0.08578667044639587, + "block0_q_update_fnorm": 0.1478002667427063, + "block0_q_max_l1_linf_norm": 0.18205174803733826, + "block0_q_max_spectral_norm": 0.051844995468854904, + "block0_k_update_fnorm": 0.149379700422287, + "block0_k_max_l1_linf_norm": 0.25309649109840393, + "block0_k_max_spectral_norm": 0.07465298473834991, + "block0_v_update_fnorm": 0.14178988337516785, + "block0_v_max_l1_linf_norm": 0.19731827080249786, + "block0_v_max_spectral_norm": 0.04881884530186653, + "block0_o_update_fnorm": 0.1392344832420349, + "block0_o_max_l1_linf_norm": 0.17707540094852448, + "block0_o_max_spectral_norm": 0.0396113246679306, + "block0_mlp_win_update_fnorm": 0.2884034514427185, + "block0_mlp_win_max_l1_linf_norm": 0.1639961302280426, + "block0_mlp_win_max_spectral_norm": 0.052658561617136, + "block0_mlp_wout_update_fnorm": 0.257137268781662, + "block0_mlp_wout_max_l1_linf_norm": 0.6452490091323853, + "block0_mlp_wout_max_spectral_norm": 0.08135103434324265, + "block3_q_update_fnorm": 0.15126824378967285, + "block3_q_max_l1_linf_norm": 0.18105322122573853, + "block3_q_max_spectral_norm": 0.051080722361803055, + "block3_k_update_fnorm": 0.14660868048667908, + "block3_k_max_l1_linf_norm": 0.2356509268283844, + "block3_k_max_spectral_norm": 0.03718586266040802, + "block3_v_update_fnorm": 0.13008716702461243, + "block3_v_max_l1_linf_norm": 0.1631627082824707, + "block3_v_max_spectral_norm": 0.04395398870110512, + "block3_o_update_fnorm": 0.13467518985271454, + "block3_o_max_l1_linf_norm": 0.16279208660125732, + "block3_o_max_spectral_norm": 0.04777868837118149, + "block3_mlp_win_update_fnorm": 0.31249281764030457, + "block3_mlp_win_max_l1_linf_norm": 0.1928255558013916, + "block3_mlp_win_max_spectral_norm": 0.07202507555484772, + "block3_mlp_wout_update_fnorm": 0.2655751705169678, + "block3_mlp_wout_max_l1_linf_norm": 0.5871429443359375, + "block3_mlp_wout_max_spectral_norm": 0.0867304801940918, + "block7_q_update_fnorm": 0.15837819874286652, + "block7_q_max_l1_linf_norm": 0.1675332486629486, + "block7_q_max_spectral_norm": 0.025724656879901886, + "block7_k_update_fnorm": 0.1527518779039383, + "block7_k_max_l1_linf_norm": 0.15836767852306366, + "block7_k_max_spectral_norm": 0.026904884725809097, + "block7_v_update_fnorm": 0.13436615467071533, + "block7_v_max_l1_linf_norm": 0.14621631801128387, + "block7_v_max_spectral_norm": 0.029505599290132523, + "block7_o_update_fnorm": 0.1442682445049286, + "block7_o_max_l1_linf_norm": 0.1355820596218109, + "block7_o_max_spectral_norm": 0.029700666666030884, + "block7_mlp_win_update_fnorm": 0.3050490617752075, + "block7_mlp_win_max_l1_linf_norm": 0.1609230488538742, + "block7_mlp_win_max_spectral_norm": 0.046024393290281296, + "block7_mlp_wout_update_fnorm": 0.27271145582199097, + "block7_mlp_wout_max_l1_linf_norm": 0.6084764003753662, + "block7_mlp_wout_max_spectral_norm": 0.07234472036361694, + "block11_q_update_fnorm": 0.16343456506729126, + "block11_q_max_l1_linf_norm": 0.18014487624168396, + "block11_q_max_spectral_norm": 0.02962450124323368, + "block11_k_update_fnorm": 0.1568993330001831, + "block11_k_max_l1_linf_norm": 0.16667680442333221, + "block11_k_max_spectral_norm": 0.02642657607793808, + "block11_v_update_fnorm": 0.13733433187007904, + "block11_v_max_l1_linf_norm": 0.1543741524219513, + "block11_v_max_spectral_norm": 0.03494903817772865, + "block11_o_update_fnorm": 0.15024013817310333, + "block11_o_max_l1_linf_norm": 0.15732735395431519, + "block11_o_max_spectral_norm": 0.03568999841809273, + "block11_mlp_win_update_fnorm": 0.32005971670150757, + "block11_mlp_win_max_l1_linf_norm": 0.17317312955856323, + "block11_mlp_win_max_spectral_norm": 0.05342823639512062, + "block11_mlp_wout_update_fnorm": 0.28988876938819885, + "block11_mlp_wout_max_l1_linf_norm": 0.5745750665664673, + "block11_mlp_wout_max_spectral_norm": 0.08578667044639587, + "total_sharpness": 0.0065831467509269714, + "block_total_sharpness": 0.009310091845691204, + "v_norm_block": 1.7608968019485474, + "v_T_H_v_block": 0.028868336230516434, + "v_norm": 2.2073557376861572, + "ip_v_neg_g_hvp": 0.055340368300676346, + "cos_v_neg_g_hvp": 0.06970350444316864, + "g_hvp_norm": 0.3596790134906769, + "ip_v_neg_g_t": 0.055752526968717575, + "cos_v_neg_g_t": 0.08458318561315536, + "g_t_norm": 0.29861265420913696, + "g_norm": 0.3596790134906769, + "hv_norm": 0.5058419704437256, + "cos_v_hv": 0.02872704714536667, + "hg_norm": 10.451447486877441, + "cos_g_hg": 0.3628765642642975, + "v_parallel_norm": 0.01022217608988285, + "v_perp_norm": 2.207332134246826, + "embed_lm_head_v_norm": 1.3310381174087524, + "embed_lm_head_cos_v_neg_g": 0.07842979580163956, + "layer_1_v_norm": 0.4827719032764435, + "layer_1_cos_v_neg_g": 0.13534021377563477, + "layer_2_v_norm": 0.4982633590698242, + "layer_2_cos_v_neg_g": 0.06880202144384384, + "layer_3_v_norm": 0.49379560351371765, + "layer_3_cos_v_neg_g": 0.06110220402479172, + "layer_4_v_norm": 0.4977642297744751, + "layer_4_cos_v_neg_g": 0.05868566036224365, + "layer_5_v_norm": 0.4917960464954376, + "layer_5_cos_v_neg_g": 0.04802071675658226, + "layer_6_v_norm": 0.5023554563522339, + "layer_6_cos_v_neg_g": 0.057725343853235245, + "layer_7_v_norm": 0.5093839764595032, + "layer_7_cos_v_neg_g": 0.06806423515081406, + "layer_8_v_norm": 0.5048056244850159, + "layer_8_cos_v_neg_g": 0.06612690538167953, + "layer_9_v_norm": 0.5181894898414612, + "layer_9_cos_v_neg_g": 0.062219709157943726, + "layer_10_v_norm": 0.5319095849990845, + "layer_10_cos_v_neg_g": 0.07642053812742233, + "layer_11_v_norm": 0.5371009111404419, + "layer_11_cos_v_neg_g": 0.09899809211492538, + "layer_12_v_norm": 0.5285527110099792, + "layer_12_cos_v_neg_g": 0.13409686088562012, + "block0_q_v_norm": 0.1478002667427063, + "block0_q_cos_v_neg_g": 0.1555309295654297, + "block0_k_v_norm": 0.149379700422287, + "block0_k_cos_v_neg_g": 0.17930978536605835, + "block0_v_v_norm": 0.14178988337516785, + "block0_v_cos_v_neg_g": 0.21768654882907867, + "block0_o_v_norm": 0.1392344832420349, + "block0_o_cos_v_neg_g": 0.16584746539592743, + "block0_mlp_win_v_norm": 0.2884034514427185, + "block0_mlp_win_cos_v_neg_g": 0.10047169029712677, + "block0_mlp_wout_v_norm": 0.257137268781662, + "block0_mlp_wout_cos_v_neg_g": 0.17009437084197998, + "block3_q_v_norm": 0.15126824378967285, + "block3_q_cos_v_neg_g": 0.06363333761692047, + "block3_k_v_norm": 0.14660868048667908, + "block3_k_cos_v_neg_g": 0.06421280652284622, + "block3_v_v_norm": 0.13008716702461243, + "block3_v_cos_v_neg_g": 0.041596390306949615, + "block3_o_v_norm": 0.13467518985271454, + "block3_o_cos_v_neg_g": 0.15977096557617188, + "block3_mlp_win_v_norm": 0.31249281764030457, + "block3_mlp_win_cos_v_neg_g": 0.06363549083471298, + "block3_mlp_wout_v_norm": 0.2655751705169678, + "block3_mlp_wout_cos_v_neg_g": 0.22481176257133484, + "block7_q_v_norm": 0.15837819874286652, + "block7_q_cos_v_neg_g": 0.07173877209424973, + "block7_k_v_norm": 0.1527518779039383, + "block7_k_cos_v_neg_g": 0.19651958346366882, + "block7_v_v_norm": 0.13436615467071533, + "block7_v_cos_v_neg_g": 0.055597350001335144, + "block7_o_v_norm": 0.1442682445049286, + "block7_o_cos_v_neg_g": 0.21969228982925415, + "block7_mlp_win_v_norm": 0.3050490617752075, + "block7_mlp_win_cos_v_neg_g": 0.08391277492046356, + "block7_mlp_wout_v_norm": 0.27271145582199097, + "block7_mlp_wout_cos_v_neg_g": 0.2273789644241333, + "block11_q_v_norm": 0.16343456506729126, + "block11_q_cos_v_neg_g": 0.11789894849061966, + "block11_k_v_norm": 0.1568993330001831, + "block11_k_cos_v_neg_g": 0.1758367419242859, + "block11_v_v_norm": 0.13733433187007904, + "block11_v_cos_v_neg_g": 0.07506921887397766, + "block11_o_v_norm": 0.15024013817310333, + "block11_o_cos_v_neg_g": 0.21007047593593597, + "block11_mlp_win_v_norm": 0.32005971670150757, + "block11_mlp_win_cos_v_neg_g": 0.13346797227859497, + "block11_mlp_wout_v_norm": 0.28988876938819885, + "block11_mlp_wout_cos_v_neg_g": 0.19080792367458344, + "embed_lm_head_sharpness": 0.00038340050377883017, + "layer_1_sharpness": 0.013996871188282967, + "layer_2_sharpness": 0.0005249023670330644, + "layer_3_sharpness": 0.0011080156546086073, + "layer_4_sharpness": 0.001455503050237894, + "layer_5_sharpness": 0.0009522587060928345, + "layer_6_sharpness": 0.0016433954006060958, + "layer_7_sharpness": 0.0015476841945201159, + "layer_8_sharpness": 0.002491771476343274, + "layer_9_sharpness": 0.0018237583572044969, + "layer_10_sharpness": 0.0009342028643004596, + "layer_11_sharpness": 0.0008647216018289328, + "layer_12_sharpness": 0.0032598688267171383, + "block0_q_sharpness": 0.005038361996412277, + "block0_k_sharpness": 0.007396867964416742, + "block0_v_sharpness": 0.012002907693386078, + "block0_o_sharpness": 0.006731199100613594, + "block0_mlp_win_sharpness": 0.0011351271532475948, + "block0_mlp_wout_sharpness": 0.0027455068193376064, + "block3_q_sharpness": 0.0009866030886769295, + "block3_k_sharpness": 0.0004841239715460688, + "block3_v_sharpness": 0.0029522753320634365, + "block3_o_sharpness": 0.0008119004778563976, + "block3_mlp_win_sharpness": 0.0002037777449004352, + "block3_mlp_wout_sharpness": 0.00041671391227282584, + "block7_q_sharpness": 9.798559040063992e-05, + "block7_k_sharpness": 0.00028532560099847615, + "block7_v_sharpness": 0.005305747967213392, + "block7_o_sharpness": 0.0004252265498507768, + "block7_mlp_win_sharpness": 0.0007516557816416025, + "block7_mlp_wout_sharpness": 0.0007122356328181922, + "block11_q_sharpness": 8.675405115354806e-05, + "block11_k_sharpness": 0.00015687209088355303, + "block11_v_sharpness": 0.0006047615897841752, + "block11_o_sharpness": 0.00014527214807458222, + "block11_mlp_win_sharpness": 0.0007811074610799551, + "block11_mlp_wout_sharpness": 0.004446804989129305, + "sum_layer_numerators": 0.007619134045833608, + "block_diag_sharpness": 0.002457184713757588, + "cross_layer_sharpness": 0.006852907131933616 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..ba1bf5bb23fecce5e8c98c128a937f504d19530f --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_6500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.217879056930542, + "total_l1_linf_norm": 19716.80859375, + "total_spectral_norm": 2.217879295349121, + "embed_lm_head_update_fnorm": 1.3302679061889648, + "embed_lm_head_max_l1_linf_norm": 0.33512258529663086, + "embed_lm_head_max_spectral_norm": 0.21131694316864014, + "layer_1_update_fnorm": 0.49133530259132385, + "layer_1_max_l1_linf_norm": 0.6004868745803833, + "layer_1_max_spectral_norm": 0.08789868652820587, + "layer_2_update_fnorm": 0.5060881972312927, + "layer_2_max_l1_linf_norm": 0.5903525352478027, + "layer_2_max_spectral_norm": 0.08090823143720627, + "layer_3_update_fnorm": 0.495525598526001, + "layer_3_max_l1_linf_norm": 0.6999650001525879, + "layer_3_max_spectral_norm": 0.09075351059436798, + "layer_4_update_fnorm": 0.49688518047332764, + "layer_4_max_l1_linf_norm": 0.6409996151924133, + "layer_4_max_spectral_norm": 0.09296763688325882, + "layer_5_update_fnorm": 0.49899014830589294, + "layer_5_max_l1_linf_norm": 0.6023988723754883, + "layer_5_max_spectral_norm": 0.07740632444620132, + "layer_6_update_fnorm": 0.504794180393219, + "layer_6_max_l1_linf_norm": 0.6231706738471985, + "layer_6_max_spectral_norm": 0.08738438040018082, + "layer_7_update_fnorm": 0.5097715854644775, + "layer_7_max_l1_linf_norm": 0.6065211296081543, + "layer_7_max_spectral_norm": 0.07832854241132736, + "layer_8_update_fnorm": 0.5059956908226013, + "layer_8_max_l1_linf_norm": 0.5664266347885132, + "layer_8_max_spectral_norm": 0.07398835569620132, + "layer_9_update_fnorm": 0.5215620994567871, + "layer_9_max_l1_linf_norm": 0.5588952898979187, + "layer_9_max_spectral_norm": 0.06359152495861053, + "layer_10_update_fnorm": 0.5386348366737366, + "layer_10_max_l1_linf_norm": 0.5611462593078613, + "layer_10_max_spectral_norm": 0.04721781983971596, + "layer_11_update_fnorm": 0.5415374636650085, + "layer_11_max_l1_linf_norm": 0.5718190670013428, + "layer_11_max_spectral_norm": 0.056264784187078476, + "layer_12_update_fnorm": 0.5331743955612183, + "layer_12_max_l1_linf_norm": 0.5973535776138306, + "layer_12_max_spectral_norm": 0.07217548787593842, + "block0_q_update_fnorm": 0.15292927622795105, + "block0_q_max_l1_linf_norm": 0.25998055934906006, + "block0_q_max_spectral_norm": 0.05906322970986366, + "block0_k_update_fnorm": 0.15240223705768585, + "block0_k_max_l1_linf_norm": 0.26154401898384094, + "block0_k_max_spectral_norm": 0.07742850482463837, + "block0_v_update_fnorm": 0.14453038573265076, + "block0_v_max_l1_linf_norm": 0.18250468373298645, + "block0_v_max_spectral_norm": 0.06591583043336868, + "block0_o_update_fnorm": 0.14466483891010284, + "block0_o_max_l1_linf_norm": 0.16491913795471191, + "block0_o_max_spectral_norm": 0.05233289301395416, + "block0_mlp_win_update_fnorm": 0.29374808073043823, + "block0_mlp_win_max_l1_linf_norm": 0.16574542224407196, + "block0_mlp_win_max_spectral_norm": 0.05439687520265579, + "block0_mlp_wout_update_fnorm": 0.25802305340766907, + "block0_mlp_wout_max_l1_linf_norm": 0.6004868745803833, + "block0_mlp_wout_max_spectral_norm": 0.08789868652820587, + "block3_q_update_fnorm": 0.14953453838825226, + "block3_q_max_l1_linf_norm": 0.1825026422739029, + "block3_q_max_spectral_norm": 0.05160193517804146, + "block3_k_update_fnorm": 0.1434752196073532, + "block3_k_max_l1_linf_norm": 0.2212052047252655, + "block3_k_max_spectral_norm": 0.03275546804070473, + "block3_v_update_fnorm": 0.13268131017684937, + "block3_v_max_l1_linf_norm": 0.1562539041042328, + "block3_v_max_spectral_norm": 0.042748961597681046, + "block3_o_update_fnorm": 0.13877004384994507, + "block3_o_max_l1_linf_norm": 0.16568370163440704, + "block3_o_max_spectral_norm": 0.051684677600860596, + "block3_mlp_win_update_fnorm": 0.3112194240093231, + "block3_mlp_win_max_l1_linf_norm": 0.18801316618919373, + "block3_mlp_win_max_spectral_norm": 0.07280980050563812, + "block3_mlp_wout_update_fnorm": 0.26473960280418396, + "block3_mlp_wout_max_l1_linf_norm": 0.6409996151924133, + "block3_mlp_wout_max_spectral_norm": 0.09296763688325882, + "block7_q_update_fnorm": 0.16198372840881348, + "block7_q_max_l1_linf_norm": 0.16769225895404816, + "block7_q_max_spectral_norm": 0.024503715336322784, + "block7_k_update_fnorm": 0.15386554598808289, + "block7_k_max_l1_linf_norm": 0.1582259237766266, + "block7_k_max_spectral_norm": 0.028286149725317955, + "block7_v_update_fnorm": 0.13763493299484253, + "block7_v_max_l1_linf_norm": 0.14311347901821136, + "block7_v_max_spectral_norm": 0.03322276845574379, + "block7_o_update_fnorm": 0.14615458250045776, + "block7_o_max_l1_linf_norm": 0.14253520965576172, + "block7_o_max_spectral_norm": 0.0304484311491251, + "block7_mlp_win_update_fnorm": 0.3046170771121979, + "block7_mlp_win_max_l1_linf_norm": 0.18770617246627808, + "block7_mlp_win_max_spectral_norm": 0.043432530015707016, + "block7_mlp_wout_update_fnorm": 0.27000418305397034, + "block7_mlp_wout_max_l1_linf_norm": 0.5664266347885132, + "block7_mlp_wout_max_spectral_norm": 0.07398835569620132, + "block11_q_update_fnorm": 0.1623387634754181, + "block11_q_max_l1_linf_norm": 0.1537422388792038, + "block11_q_max_spectral_norm": 0.02672048844397068, + "block11_k_update_fnorm": 0.15794450044631958, + "block11_k_max_l1_linf_norm": 0.19406355917453766, + "block11_k_max_spectral_norm": 0.026549912989139557, + "block11_v_update_fnorm": 0.14457839727401733, + "block11_v_max_l1_linf_norm": 0.1520993411540985, + "block11_v_max_spectral_norm": 0.03639821708202362, + "block11_o_update_fnorm": 0.15697817504405975, + "block11_o_max_l1_linf_norm": 0.15543198585510254, + "block11_o_max_spectral_norm": 0.0357915535569191, + "block11_mlp_win_update_fnorm": 0.322070837020874, + "block11_mlp_win_max_l1_linf_norm": 0.16868332028388977, + "block11_mlp_win_max_spectral_norm": 0.04953081160783768, + "block11_mlp_wout_update_fnorm": 0.2890752851963043, + "block11_mlp_wout_max_l1_linf_norm": 0.5973535776138306, + "block11_mlp_wout_max_spectral_norm": 0.07217548787593842, + "total_sharpness": 0.006418891716748476, + "block_total_sharpness": 0.00918511301279068, + "v_norm_block": 1.77464759349823, + "v_T_H_v_block": 0.028927359730005264, + "v_norm": 2.217879056930542, + "ip_v_neg_g_hvp": 0.05743962153792381, + "cos_v_neg_g_hvp": 0.06219375133514404, + "g_hvp_norm": 0.41641557216644287, + "ip_v_neg_g_t": 0.057876668870449066, + "cos_v_neg_g_t": 0.07297223061323166, + "g_t_norm": 0.3576086759567261, + "g_norm": 0.41641557216644287, + "hv_norm": 0.4619430601596832, + "cos_v_hv": 0.030818352475762367, + "hg_norm": 7.117341041564941, + "cos_g_hg": 0.661406397819519, + "v_parallel_norm": 0.0075242891907691956, + "v_perp_norm": 2.2178661823272705, + "embed_lm_head_v_norm": 1.3302679061889648, + "embed_lm_head_cos_v_neg_g": 0.07789233326911926, + "layer_1_v_norm": 0.49133530259132385, + "layer_1_cos_v_neg_g": 0.12286323308944702, + "layer_2_v_norm": 0.5060881972312927, + "layer_2_cos_v_neg_g": 0.058509599417448044, + "layer_3_v_norm": 0.4955255687236786, + "layer_3_cos_v_neg_g": 0.05213601142168045, + "layer_4_v_norm": 0.49688518047332764, + "layer_4_cos_v_neg_g": 0.05516828969120979, + "layer_5_v_norm": 0.49899014830589294, + "layer_5_cos_v_neg_g": 0.04411156475543976, + "layer_6_v_norm": 0.504794180393219, + "layer_6_cos_v_neg_g": 0.054430048912763596, + "layer_7_v_norm": 0.5097715854644775, + "layer_7_cos_v_neg_g": 0.06291068345308304, + "layer_8_v_norm": 0.5059956908226013, + "layer_8_cos_v_neg_g": 0.05640784278512001, + "layer_9_v_norm": 0.5215620994567871, + "layer_9_cos_v_neg_g": 0.05675449222326279, + "layer_10_v_norm": 0.5386348366737366, + "layer_10_cos_v_neg_g": 0.06651879101991653, + "layer_11_v_norm": 0.5415374636650085, + "layer_11_cos_v_neg_g": 0.08927957713603973, + "layer_12_v_norm": 0.5331743955612183, + "layer_12_cos_v_neg_g": 0.11972399801015854, + "block0_q_v_norm": 0.15292927622795105, + "block0_q_cos_v_neg_g": 0.15320415794849396, + "block0_k_v_norm": 0.15240223705768585, + "block0_k_cos_v_neg_g": 0.2008572220802307, + "block0_v_v_norm": 0.14453038573265076, + "block0_v_cos_v_neg_g": 0.1784391552209854, + "block0_o_v_norm": 0.14466483891010284, + "block0_o_cos_v_neg_g": 0.14063189923763275, + "block0_mlp_win_v_norm": 0.29374808073043823, + "block0_mlp_win_cos_v_neg_g": 0.09138255566358566, + "block0_mlp_wout_v_norm": 0.25802305340766907, + "block0_mlp_wout_cos_v_neg_g": 0.1581358015537262, + "block3_q_v_norm": 0.14953453838825226, + "block3_q_cos_v_neg_g": 0.07936352491378784, + "block3_k_v_norm": 0.1434752196073532, + "block3_k_cos_v_neg_g": 0.06411104649305344, + "block3_v_v_norm": 0.13268131017684937, + "block3_v_cos_v_neg_g": 0.036898981779813766, + "block3_o_v_norm": 0.13877004384994507, + "block3_o_cos_v_neg_g": 0.17475609481334686, + "block3_mlp_win_v_norm": 0.3112194240093231, + "block3_mlp_win_cos_v_neg_g": 0.05942194163799286, + "block3_mlp_wout_v_norm": 0.26473960280418396, + "block3_mlp_wout_cos_v_neg_g": 0.22569824755191803, + "block7_q_v_norm": 0.16198372840881348, + "block7_q_cos_v_neg_g": 0.07350216805934906, + "block7_k_v_norm": 0.15386554598808289, + "block7_k_cos_v_neg_g": 0.2111142873764038, + "block7_v_v_norm": 0.13763493299484253, + "block7_v_cos_v_neg_g": 0.045749861747026443, + "block7_o_v_norm": 0.14615458250045776, + "block7_o_cos_v_neg_g": 0.2236151546239853, + "block7_mlp_win_v_norm": 0.3046170771121979, + "block7_mlp_win_cos_v_neg_g": 0.07777617871761322, + "block7_mlp_wout_v_norm": 0.27000418305397034, + "block7_mlp_wout_cos_v_neg_g": 0.23151880502700806, + "block11_q_v_norm": 0.1623387634754181, + "block11_q_cos_v_neg_g": 0.114716075360775, + "block11_k_v_norm": 0.15794450044631958, + "block11_k_cos_v_neg_g": 0.18693850934505463, + "block11_v_v_norm": 0.14457839727401733, + "block11_v_cos_v_neg_g": 0.06853329390287399, + "block11_o_v_norm": 0.15697817504405975, + "block11_o_cos_v_neg_g": 0.21968834102153778, + "block11_mlp_win_v_norm": 0.322070837020874, + "block11_mlp_win_cos_v_neg_g": 0.11616744846105576, + "block11_mlp_wout_v_norm": 0.2890752851963043, + "block11_mlp_wout_cos_v_neg_g": 0.18278726935386658, + "embed_lm_head_sharpness": 0.0003107522497884929, + "layer_1_sharpness": 0.007599147502332926, + "layer_2_sharpness": 0.000542927416972816, + "layer_3_sharpness": 0.0012626685202121735, + "layer_4_sharpness": 0.001527991727925837, + "layer_5_sharpness": 0.0010294943349435925, + "layer_6_sharpness": 0.0013714063679799438, + "layer_7_sharpness": 0.001258857548236847, + "layer_8_sharpness": 0.0022936903405934572, + "layer_9_sharpness": 0.0020265281200408936, + "layer_10_sharpness": 0.0013528406852856278, + "layer_11_sharpness": 0.00106300157494843, + "layer_12_sharpness": 0.002490994054824114, + "block0_q_sharpness": 0.0008922139531932771, + "block0_k_sharpness": 0.003945039119571447, + "block0_v_sharpness": 0.006166176404803991, + "block0_o_sharpness": 0.005309625528752804, + "block0_mlp_win_sharpness": 0.0014485325664281845, + "block0_mlp_wout_sharpness": 0.0024784381967037916, + "block3_q_sharpness": 0.0012957599246874452, + "block3_k_sharpness": 0.0006383113795891404, + "block3_v_sharpness": 0.002278817817568779, + "block3_o_sharpness": 0.0011227852664887905, + "block3_mlp_win_sharpness": 0.0001942430972121656, + "block3_mlp_wout_sharpness": 0.00043271604226902127, + "block7_q_sharpness": 0.00012270716251805425, + "block7_k_sharpness": 0.00022709231416229159, + "block7_v_sharpness": 0.004467345774173737, + "block7_o_sharpness": 0.0004355615237727761, + "block7_mlp_win_sharpness": 0.0006345699075609446, + "block7_mlp_wout_sharpness": 0.0007991988677531481, + "block11_q_sharpness": 8.942050772020593e-05, + "block11_k_sharpness": 0.00020211172522976995, + "block11_v_sharpness": 0.0007180750253610313, + "block11_o_sharpness": 0.00017685387865640223, + "block11_mlp_win_sharpness": 0.0007592678884975612, + "block11_mlp_wout_sharpness": 0.0027171997353434563, + "sum_layer_numerators": 0.0061446850347318255, + "block_diag_sharpness": 0.0019510813502878518, + "cross_layer_sharpness": 0.007234031662502828 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..363a8d048a706baf973b9402230ac50792cfd7bc --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2359697818756104, + "total_l1_linf_norm": 19886.640625, + "total_spectral_norm": 2.2359697818756104, + "embed_lm_head_update_fnorm": 1.329874873161316, + "embed_lm_head_max_l1_linf_norm": 0.35980069637298584, + "embed_lm_head_max_spectral_norm": 0.21005882322788239, + "layer_1_update_fnorm": 0.5077475309371948, + "layer_1_max_l1_linf_norm": 0.6615148782730103, + "layer_1_max_spectral_norm": 0.08768025785684586, + "layer_2_update_fnorm": 0.5136144161224365, + "layer_2_max_l1_linf_norm": 0.6013402938842773, + "layer_2_max_spectral_norm": 0.08189399540424347, + "layer_3_update_fnorm": 0.5055730938911438, + "layer_3_max_l1_linf_norm": 0.6829129457473755, + "layer_3_max_spectral_norm": 0.08910589665174484, + "layer_4_update_fnorm": 0.506813645362854, + "layer_4_max_l1_linf_norm": 0.6161026954650879, + "layer_4_max_spectral_norm": 0.09064669162034988, + "layer_5_update_fnorm": 0.5005315542221069, + "layer_5_max_l1_linf_norm": 0.5689353942871094, + "layer_5_max_spectral_norm": 0.07034949213266373, + "layer_6_update_fnorm": 0.5055644512176514, + "layer_6_max_l1_linf_norm": 0.6056473255157471, + "layer_6_max_spectral_norm": 0.08289938420057297, + "layer_7_update_fnorm": 0.5144784450531006, + "layer_7_max_l1_linf_norm": 0.5980411171913147, + "layer_7_max_spectral_norm": 0.07735170423984528, + "layer_8_update_fnorm": 0.512957751750946, + "layer_8_max_l1_linf_norm": 0.5635507106781006, + "layer_8_max_spectral_norm": 0.07109608501195908, + "layer_9_update_fnorm": 0.5265750885009766, + "layer_9_max_l1_linf_norm": 0.5403259992599487, + "layer_9_max_spectral_norm": 0.0539439432322979, + "layer_10_update_fnorm": 0.5399253964424133, + "layer_10_max_l1_linf_norm": 0.5616840720176697, + "layer_10_max_spectral_norm": 0.04430318996310234, + "layer_11_update_fnorm": 0.5496845841407776, + "layer_11_max_l1_linf_norm": 0.5906344652175903, + "layer_11_max_spectral_norm": 0.05469932779669762, + "layer_12_update_fnorm": 0.5403950214385986, + "layer_12_max_l1_linf_norm": 0.5714649558067322, + "layer_12_max_spectral_norm": 0.0697651281952858, + "block0_q_update_fnorm": 0.1551523655653, + "block0_q_max_l1_linf_norm": 0.22791190445423126, + "block0_q_max_spectral_norm": 0.05853546783328056, + "block0_k_update_fnorm": 0.15581852197647095, + "block0_k_max_l1_linf_norm": 0.2289276123046875, + "block0_k_max_spectral_norm": 0.07688611000776291, + "block0_v_update_fnorm": 0.15017324686050415, + "block0_v_max_l1_linf_norm": 0.1859802007675171, + "block0_v_max_spectral_norm": 0.05080191418528557, + "block0_o_update_fnorm": 0.15388740599155426, + "block0_o_max_l1_linf_norm": 0.16098365187644958, + "block0_o_max_spectral_norm": 0.04297390207648277, + "block0_mlp_win_update_fnorm": 0.3010120987892151, + "block0_mlp_win_max_l1_linf_norm": 0.1546737253665924, + "block0_mlp_win_max_spectral_norm": 0.043726034462451935, + "block0_mlp_wout_update_fnorm": 0.2692466080188751, + "block0_mlp_wout_max_l1_linf_norm": 0.6615148782730103, + "block0_mlp_wout_max_spectral_norm": 0.08768025785684586, + "block3_q_update_fnorm": 0.15613244473934174, + "block3_q_max_l1_linf_norm": 0.1808406263589859, + "block3_q_max_spectral_norm": 0.053034815937280655, + "block3_k_update_fnorm": 0.1508885622024536, + "block3_k_max_l1_linf_norm": 0.19605384767055511, + "block3_k_max_spectral_norm": 0.036557212471961975, + "block3_v_update_fnorm": 0.1342429369688034, + "block3_v_max_l1_linf_norm": 0.174219012260437, + "block3_v_max_spectral_norm": 0.04413440823554993, + "block3_o_update_fnorm": 0.13696633279323578, + "block3_o_max_l1_linf_norm": 0.15319432318210602, + "block3_o_max_spectral_norm": 0.043532636016607285, + "block3_mlp_win_update_fnorm": 0.3160778880119324, + "block3_mlp_win_max_l1_linf_norm": 0.2647939622402191, + "block3_mlp_win_max_spectral_norm": 0.06971454620361328, + "block3_mlp_wout_update_fnorm": 0.2699738144874573, + "block3_mlp_wout_max_l1_linf_norm": 0.6161026954650879, + "block3_mlp_wout_max_spectral_norm": 0.09064669162034988, + "block7_q_update_fnorm": 0.16312359273433685, + "block7_q_max_l1_linf_norm": 0.17770424485206604, + "block7_q_max_spectral_norm": 0.02448262646794319, + "block7_k_update_fnorm": 0.1566481590270996, + "block7_k_max_l1_linf_norm": 0.16740506887435913, + "block7_k_max_spectral_norm": 0.027020353823900223, + "block7_v_update_fnorm": 0.13909758627414703, + "block7_v_max_l1_linf_norm": 0.14356589317321777, + "block7_v_max_spectral_norm": 0.03031381405889988, + "block7_o_update_fnorm": 0.14820700883865356, + "block7_o_max_l1_linf_norm": 0.14008235931396484, + "block7_o_max_spectral_norm": 0.029574232175946236, + "block7_mlp_win_update_fnorm": 0.30983033776283264, + "block7_mlp_win_max_l1_linf_norm": 0.16684973239898682, + "block7_mlp_win_max_spectral_norm": 0.044106077402830124, + "block7_mlp_wout_update_fnorm": 0.2730385661125183, + "block7_mlp_wout_max_l1_linf_norm": 0.5635507106781006, + "block7_mlp_wout_max_spectral_norm": 0.07109608501195908, + "block11_q_update_fnorm": 0.16620223224163055, + "block11_q_max_l1_linf_norm": 0.18955664336681366, + "block11_q_max_spectral_norm": 0.026828764006495476, + "block11_k_update_fnorm": 0.16072623431682587, + "block11_k_max_l1_linf_norm": 0.177049919962883, + "block11_k_max_spectral_norm": 0.025274615734815598, + "block11_v_update_fnorm": 0.1491231769323349, + "block11_v_max_l1_linf_norm": 0.15501338243484497, + "block11_v_max_spectral_norm": 0.03473736718297005, + "block11_o_update_fnorm": 0.16050778329372406, + "block11_o_max_l1_linf_norm": 0.1638239324092865, + "block11_o_max_spectral_norm": 0.034257423132658005, + "block11_mlp_win_update_fnorm": 0.32509225606918335, + "block11_mlp_win_max_l1_linf_norm": 0.17080913484096527, + "block11_mlp_win_max_spectral_norm": 0.04906576871871948, + "block11_mlp_wout_update_fnorm": 0.2911278307437897, + "block11_mlp_wout_max_l1_linf_norm": 0.5714649558067322, + "block11_mlp_wout_max_spectral_norm": 0.0697651281952858, + "total_sharpness": 0.004970862064510584, + "block_total_sharpness": 0.006938896141946316, + "v_norm_block": 1.7974964380264282, + "v_T_H_v_block": 0.022419527173042297, + "v_norm": 2.2359697818756104, + "ip_v_neg_g_hvp": 0.05276186019182205, + "cos_v_neg_g_hvp": 0.06153726205229759, + "g_hvp_norm": 0.38345640897750854, + "ip_v_neg_g_t": 0.05302559584379196, + "cos_v_neg_g_t": 0.07401452213525772, + "g_t_norm": 0.320407509803772, + "g_norm": 0.38345640897750854, + "hv_norm": 0.4292432963848114, + "cos_v_hv": 0.025893699377775192, + "hg_norm": 6.231695175170898, + "cos_g_hg": 0.6752647161483765, + "v_parallel_norm": 0.008603210560977459, + "v_perp_norm": 2.2359533309936523, + "embed_lm_head_v_norm": 1.329874873161316, + "embed_lm_head_cos_v_neg_g": 0.07072842121124268, + "layer_1_v_norm": 0.5077475309371948, + "layer_1_cos_v_neg_g": 0.11965351551771164, + "layer_2_v_norm": 0.5136144161224365, + "layer_2_cos_v_neg_g": 0.05482623726129532, + "layer_3_v_norm": 0.5055730938911438, + "layer_3_cos_v_neg_g": 0.05279235169291496, + "layer_4_v_norm": 0.506813645362854, + "layer_4_cos_v_neg_g": 0.05163959786295891, + "layer_5_v_norm": 0.5005315542221069, + "layer_5_cos_v_neg_g": 0.04254821315407753, + "layer_6_v_norm": 0.5055644512176514, + "layer_6_cos_v_neg_g": 0.054642580449581146, + "layer_7_v_norm": 0.5144784450531006, + "layer_7_cos_v_neg_g": 0.061266135424375534, + "layer_8_v_norm": 0.512957751750946, + "layer_8_cos_v_neg_g": 0.057006675750017166, + "layer_9_v_norm": 0.5265750885009766, + "layer_9_cos_v_neg_g": 0.055029015988111496, + "layer_10_v_norm": 0.5399253964424133, + "layer_10_cos_v_neg_g": 0.0653902068734169, + "layer_11_v_norm": 0.5496845245361328, + "layer_11_cos_v_neg_g": 0.08698676526546478, + "layer_12_v_norm": 0.5403950214385986, + "layer_12_cos_v_neg_g": 0.11656725406646729, + "block0_q_v_norm": 0.1551523655653, + "block0_q_cos_v_neg_g": 0.14600013196468353, + "block0_k_v_norm": 0.15581852197647095, + "block0_k_cos_v_neg_g": 0.13197612762451172, + "block0_v_v_norm": 0.15017324686050415, + "block0_v_cos_v_neg_g": 0.205953449010849, + "block0_o_v_norm": 0.15388740599155426, + "block0_o_cos_v_neg_g": 0.14629188179969788, + "block0_mlp_win_v_norm": 0.3010120987892151, + "block0_mlp_win_cos_v_neg_g": 0.08741475641727448, + "block0_mlp_wout_v_norm": 0.2692466080188751, + "block0_mlp_wout_cos_v_neg_g": 0.1569950133562088, + "block3_q_v_norm": 0.15613244473934174, + "block3_q_cos_v_neg_g": 0.07469531893730164, + "block3_k_v_norm": 0.1508885622024536, + "block3_k_cos_v_neg_g": 0.05591095611453056, + "block3_v_v_norm": 0.1342429369688034, + "block3_v_cos_v_neg_g": 0.03631002828478813, + "block3_o_v_norm": 0.13696633279323578, + "block3_o_cos_v_neg_g": 0.15502998232841492, + "block3_mlp_win_v_norm": 0.3160778880119324, + "block3_mlp_win_cos_v_neg_g": 0.05369480326771736, + "block3_mlp_wout_v_norm": 0.2699738144874573, + "block3_mlp_wout_cos_v_neg_g": 0.20909276604652405, + "block7_q_v_norm": 0.16312359273433685, + "block7_q_cos_v_neg_g": 0.07026228308677673, + "block7_k_v_norm": 0.1566481590270996, + "block7_k_cos_v_neg_g": 0.19673877954483032, + "block7_v_v_norm": 0.13909758627414703, + "block7_v_cos_v_neg_g": 0.046991389244794846, + "block7_o_v_norm": 0.14820700883865356, + "block7_o_cos_v_neg_g": 0.2179422229528427, + "block7_mlp_win_v_norm": 0.30983033776283264, + "block7_mlp_win_cos_v_neg_g": 0.07326433062553406, + "block7_mlp_wout_v_norm": 0.2730385661125183, + "block7_mlp_wout_cos_v_neg_g": 0.21560537815093994, + "block11_q_v_norm": 0.16620223224163055, + "block11_q_cos_v_neg_g": 0.11851173639297485, + "block11_k_v_norm": 0.16072623431682587, + "block11_k_cos_v_neg_g": 0.1940336376428604, + "block11_v_v_norm": 0.1491231769323349, + "block11_v_cos_v_neg_g": 0.06403020024299622, + "block11_o_v_norm": 0.16050778329372406, + "block11_o_cos_v_neg_g": 0.2130797952413559, + "block11_mlp_win_v_norm": 0.32509225606918335, + "block11_mlp_win_cos_v_neg_g": 0.11051242798566818, + "block11_mlp_wout_v_norm": 0.2911278307437897, + "block11_mlp_wout_cos_v_neg_g": 0.17859160900115967, + "embed_lm_head_sharpness": 0.00030045685707591474, + "layer_1_sharpness": 0.0055066682398319244, + "layer_2_sharpness": 0.0005288136308081448, + "layer_3_sharpness": 0.0011814843164756894, + "layer_4_sharpness": 0.001152588869445026, + "layer_5_sharpness": 0.0007120922673493624, + "layer_6_sharpness": 0.0010621732799336314, + "layer_7_sharpness": 0.0011661580065265298, + "layer_8_sharpness": 0.001924517098814249, + "layer_9_sharpness": 0.0015653701266273856, + "layer_10_sharpness": 0.0007884154911153018, + "layer_11_sharpness": 0.0007524400716647506, + "layer_12_sharpness": 0.002077731303870678, + "block0_q_sharpness": 0.001274125650525093, + "block0_k_sharpness": 0.002196912420913577, + "block0_v_sharpness": 0.0031373645178973675, + "block0_o_sharpness": 0.003317645052447915, + "block0_mlp_win_sharpness": 0.0009425298776477575, + "block0_mlp_wout_sharpness": 0.002429517451673746, + "block3_q_sharpness": 0.0008703807834535837, + "block3_k_sharpness": 0.0004495579923968762, + "block3_v_sharpness": 0.002036791993305087, + "block3_o_sharpness": 0.0007360833114944398, + "block3_mlp_win_sharpness": 0.00016014932771213353, + "block3_mlp_wout_sharpness": 0.000393768772482872, + "block7_q_sharpness": 8.000968227861449e-05, + "block7_k_sharpness": 0.00021124417253304273, + "block7_v_sharpness": 0.003936944529414177, + "block7_o_sharpness": 0.0003698538348544389, + "block7_mlp_win_sharpness": 0.0006179800839163363, + "block7_mlp_wout_sharpness": 0.0005659025628119707, + "block11_q_sharpness": 0.0001247973123099655, + "block11_k_sharpness": 0.00018631317652761936, + "block11_v_sharpness": 0.0005773901357315481, + "block11_o_sharpness": 0.00013351016968954355, + "block11_mlp_win_sharpness": 0.0006055083940736949, + "block11_mlp_wout_sharpness": 0.002348557347431779, + "sum_layer_numerators": 0.004920145886635645, + "block_diag_sharpness": 0.001522796616836075, + "cross_layer_sharpness": 0.005416099525110241 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..90f349e986359fd761f25eccd51924a5b1014d97 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_7500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.247102975845337, + "total_l1_linf_norm": 19990.125, + "total_spectral_norm": 2.247103452682495, + "embed_lm_head_update_fnorm": 1.331178069114685, + "embed_lm_head_max_l1_linf_norm": 0.4083956480026245, + "embed_lm_head_max_spectral_norm": 0.20292377471923828, + "layer_1_update_fnorm": 0.5111977458000183, + "layer_1_max_l1_linf_norm": 0.585112988948822, + "layer_1_max_spectral_norm": 0.08738110959529877, + "layer_2_update_fnorm": 0.5157666206359863, + "layer_2_max_l1_linf_norm": 0.5884043574333191, + "layer_2_max_spectral_norm": 0.08109819144010544, + "layer_3_update_fnorm": 0.5049395561218262, + "layer_3_max_l1_linf_norm": 0.5913490056991577, + "layer_3_max_spectral_norm": 0.08367212861776352, + "layer_4_update_fnorm": 0.5161705017089844, + "layer_4_max_l1_linf_norm": 0.6230329275131226, + "layer_4_max_spectral_norm": 0.09381922334432602, + "layer_5_update_fnorm": 0.5108900666236877, + "layer_5_max_l1_linf_norm": 0.5685228705406189, + "layer_5_max_spectral_norm": 0.07798685133457184, + "layer_6_update_fnorm": 0.5141477584838867, + "layer_6_max_l1_linf_norm": 0.635857105255127, + "layer_6_max_spectral_norm": 0.08433589339256287, + "layer_7_update_fnorm": 0.5195400714874268, + "layer_7_max_l1_linf_norm": 0.5816580057144165, + "layer_7_max_spectral_norm": 0.08287211507558823, + "layer_8_update_fnorm": 0.5157589912414551, + "layer_8_max_l1_linf_norm": 0.6049346923828125, + "layer_8_max_spectral_norm": 0.08400147408246994, + "layer_9_update_fnorm": 0.5251339077949524, + "layer_9_max_l1_linf_norm": 0.5853673219680786, + "layer_9_max_spectral_norm": 0.06791727244853973, + "layer_10_update_fnorm": 0.5416540503501892, + "layer_10_max_l1_linf_norm": 0.551868200302124, + "layer_10_max_spectral_norm": 0.047492727637290955, + "layer_11_update_fnorm": 0.551548182964325, + "layer_11_max_l1_linf_norm": 0.571404218673706, + "layer_11_max_spectral_norm": 0.056538455188274384, + "layer_12_update_fnorm": 0.5422712564468384, + "layer_12_max_l1_linf_norm": 0.5614223480224609, + "layer_12_max_spectral_norm": 0.07831072062253952, + "block0_q_update_fnorm": 0.1533506214618683, + "block0_q_max_l1_linf_norm": 0.18219752609729767, + "block0_q_max_spectral_norm": 0.05185661464929581, + "block0_k_update_fnorm": 0.15527959167957306, + "block0_k_max_l1_linf_norm": 0.23798176646232605, + "block0_k_max_spectral_norm": 0.07316957414150238, + "block0_v_update_fnorm": 0.1535879522562027, + "block0_v_max_l1_linf_norm": 0.1809787154197693, + "block0_v_max_spectral_norm": 0.04756595194339752, + "block0_o_update_fnorm": 0.15573211014270782, + "block0_o_max_l1_linf_norm": 0.1720362901687622, + "block0_o_max_spectral_norm": 0.04392936825752258, + "block0_mlp_win_update_fnorm": 0.303272545337677, + "block0_mlp_win_max_l1_linf_norm": 0.17918050289154053, + "block0_mlp_win_max_spectral_norm": 0.046625327318906784, + "block0_mlp_wout_update_fnorm": 0.27156898379325867, + "block0_mlp_wout_max_l1_linf_norm": 0.585112988948822, + "block0_mlp_wout_max_spectral_norm": 0.08738110959529877, + "block3_q_update_fnorm": 0.1612909883260727, + "block3_q_max_l1_linf_norm": 0.21235391497612, + "block3_q_max_spectral_norm": 0.055472809821367264, + "block3_k_update_fnorm": 0.15550529956817627, + "block3_k_max_l1_linf_norm": 0.26258575916290283, + "block3_k_max_spectral_norm": 0.03799697011709213, + "block3_v_update_fnorm": 0.1350482553243637, + "block3_v_max_l1_linf_norm": 0.1733497977256775, + "block3_v_max_spectral_norm": 0.04656325280666351, + "block3_o_update_fnorm": 0.1416700929403305, + "block3_o_max_l1_linf_norm": 0.16730190813541412, + "block3_o_max_spectral_norm": 0.05121736600995064, + "block3_mlp_win_update_fnorm": 0.3211885690689087, + "block3_mlp_win_max_l1_linf_norm": 0.1964009404182434, + "block3_mlp_win_max_spectral_norm": 0.07180776447057724, + "block3_mlp_wout_update_fnorm": 0.2731145918369293, + "block3_mlp_wout_max_l1_linf_norm": 0.6230329275131226, + "block3_mlp_wout_max_spectral_norm": 0.09381922334432602, + "block7_q_update_fnorm": 0.16557300090789795, + "block7_q_max_l1_linf_norm": 0.19874180853366852, + "block7_q_max_spectral_norm": 0.03193614259362221, + "block7_k_update_fnorm": 0.15750469267368317, + "block7_k_max_l1_linf_norm": 0.1942163109779358, + "block7_k_max_spectral_norm": 0.0299705658107996, + "block7_v_update_fnorm": 0.1405961513519287, + "block7_v_max_l1_linf_norm": 0.14491716027259827, + "block7_v_max_spectral_norm": 0.031029582023620605, + "block7_o_update_fnorm": 0.1485753208398819, + "block7_o_max_l1_linf_norm": 0.14272594451904297, + "block7_o_max_spectral_norm": 0.029595565050840378, + "block7_mlp_win_update_fnorm": 0.3110537528991699, + "block7_mlp_win_max_l1_linf_norm": 0.1801367849111557, + "block7_mlp_win_max_spectral_norm": 0.04350021481513977, + "block7_mlp_wout_update_fnorm": 0.27396953105926514, + "block7_mlp_wout_max_l1_linf_norm": 0.6049346923828125, + "block7_mlp_wout_max_spectral_norm": 0.08400147408246994, + "block11_q_update_fnorm": 0.16534686088562012, + "block11_q_max_l1_linf_norm": 0.1581498682498932, + "block11_q_max_spectral_norm": 0.02762250229716301, + "block11_k_update_fnorm": 0.15926723182201385, + "block11_k_max_l1_linf_norm": 0.1712644100189209, + "block11_k_max_spectral_norm": 0.026737455278635025, + "block11_v_update_fnorm": 0.15090732276439667, + "block11_v_max_l1_linf_norm": 0.1710573434829712, + "block11_v_max_spectral_norm": 0.03492860496044159, + "block11_o_update_fnorm": 0.16210363805294037, + "block11_o_max_l1_linf_norm": 0.16331464052200317, + "block11_o_max_spectral_norm": 0.03711094334721565, + "block11_mlp_win_update_fnorm": 0.326657235622406, + "block11_mlp_win_max_l1_linf_norm": 0.18872645497322083, + "block11_mlp_win_max_spectral_norm": 0.05171508714556694, + "block11_mlp_wout_update_fnorm": 0.2923242747783661, + "block11_mlp_wout_max_l1_linf_norm": 0.5614223480224609, + "block11_mlp_wout_max_spectral_norm": 0.07831072062253952, + "total_sharpness": 0.007201227359473705, + "block_total_sharpness": 0.010357936844229698, + "v_norm_block": 1.810369849205017, + "v_T_H_v_block": 0.03394750505685806, + "v_norm": 2.247102975845337, + "ip_v_neg_g_hvp": 0.05694734677672386, + "cos_v_neg_g_hvp": 0.06218694522976875, + "g_hvp_norm": 0.4075222313404083, + "ip_v_neg_g_t": 0.057753756642341614, + "cos_v_neg_g_t": 0.07143168896436691, + "g_t_norm": 0.35980427265167236, + "g_norm": 0.4075222313404083, + "hv_norm": 0.6425034999847412, + "cos_v_hv": 0.02518569864332676, + "hg_norm": 7.056056022644043, + "cos_g_hg": 0.6807644367218018, + "v_parallel_norm": 0.007344823330640793, + "v_perp_norm": 2.247091054916382, + "embed_lm_head_v_norm": 1.331178069114685, + "embed_lm_head_cos_v_neg_g": 0.0772554948925972, + "layer_1_v_norm": 0.5111977458000183, + "layer_1_cos_v_neg_g": 0.1243722215294838, + "layer_2_v_norm": 0.5157666206359863, + "layer_2_cos_v_neg_g": 0.062397368252277374, + "layer_3_v_norm": 0.504939615726471, + "layer_3_cos_v_neg_g": 0.05829824134707451, + "layer_4_v_norm": 0.5161705017089844, + "layer_4_cos_v_neg_g": 0.059755243360996246, + "layer_5_v_norm": 0.5108900666236877, + "layer_5_cos_v_neg_g": 0.04533512890338898, + "layer_6_v_norm": 0.5141477584838867, + "layer_6_cos_v_neg_g": 0.05510079488158226, + "layer_7_v_norm": 0.5195400714874268, + "layer_7_cos_v_neg_g": 0.06243918091058731, + "layer_8_v_norm": 0.5157589912414551, + "layer_8_cos_v_neg_g": 0.05978073179721832, + "layer_9_v_norm": 0.5251339077949524, + "layer_9_cos_v_neg_g": 0.05489484965801239, + "layer_10_v_norm": 0.5416540503501892, + "layer_10_cos_v_neg_g": 0.06221849471330643, + "layer_11_v_norm": 0.551548182964325, + "layer_11_cos_v_neg_g": 0.08521264046430588, + "layer_12_v_norm": 0.5422712564468384, + "layer_12_cos_v_neg_g": 0.12203409522771835, + "block0_q_v_norm": 0.1533506214618683, + "block0_q_cos_v_neg_g": 0.15269982814788818, + "block0_k_v_norm": 0.15527959167957306, + "block0_k_cos_v_neg_g": 0.16228769719600677, + "block0_v_v_norm": 0.1535879522562027, + "block0_v_cos_v_neg_g": 0.2093571275472641, + "block0_o_v_norm": 0.15573211014270782, + "block0_o_cos_v_neg_g": 0.14714428782463074, + "block0_mlp_win_v_norm": 0.303272545337677, + "block0_mlp_win_cos_v_neg_g": 0.09098750352859497, + "block0_mlp_wout_v_norm": 0.27156898379325867, + "block0_mlp_wout_cos_v_neg_g": 0.16327689588069916, + "block3_q_v_norm": 0.1612909883260727, + "block3_q_cos_v_neg_g": 0.08280196785926819, + "block3_k_v_norm": 0.15550529956817627, + "block3_k_cos_v_neg_g": 0.06094689294695854, + "block3_v_v_norm": 0.1350482553243637, + "block3_v_cos_v_neg_g": 0.03932401165366173, + "block3_o_v_norm": 0.1416700929403305, + "block3_o_cos_v_neg_g": 0.16655205190181732, + "block3_mlp_win_v_norm": 0.3211885690689087, + "block3_mlp_win_cos_v_neg_g": 0.05474962294101715, + "block3_mlp_wout_v_norm": 0.2731145918369293, + "block3_mlp_wout_cos_v_neg_g": 0.2402353584766388, + "block7_q_v_norm": 0.16557300090789795, + "block7_q_cos_v_neg_g": 0.0792582780122757, + "block7_k_v_norm": 0.15750469267368317, + "block7_k_cos_v_neg_g": 0.21781282126903534, + "block7_v_v_norm": 0.1405961513519287, + "block7_v_cos_v_neg_g": 0.05138937756419182, + "block7_o_v_norm": 0.1485753208398819, + "block7_o_cos_v_neg_g": 0.23323622345924377, + "block7_mlp_win_v_norm": 0.3110537528991699, + "block7_mlp_win_cos_v_neg_g": 0.07177947461605072, + "block7_mlp_wout_v_norm": 0.27396953105926514, + "block7_mlp_wout_cos_v_neg_g": 0.24387650191783905, + "block11_q_v_norm": 0.16534686088562012, + "block11_q_cos_v_neg_g": 0.11397559195756912, + "block11_k_v_norm": 0.15926723182201385, + "block11_k_cos_v_neg_g": 0.19695287942886353, + "block11_v_v_norm": 0.15090732276439667, + "block11_v_cos_v_neg_g": 0.06962116807699203, + "block11_o_v_norm": 0.16210363805294037, + "block11_o_cos_v_neg_g": 0.23853471875190735, + "block11_mlp_win_v_norm": 0.326657235622406, + "block11_mlp_win_cos_v_neg_g": 0.11489035189151764, + "block11_mlp_wout_v_norm": 0.2923242747783661, + "block11_mlp_wout_cos_v_neg_g": 0.1957259327173233, + "embed_lm_head_sharpness": 0.0003003430610988289, + "layer_1_sharpness": 0.005300643388181925, + "layer_2_sharpness": 0.0008995844982564449, + "layer_3_sharpness": 0.0011924764839932323, + "layer_4_sharpness": 0.0018175969598814845, + "layer_5_sharpness": 0.0009796351660043001, + "layer_6_sharpness": 0.0016753426752984524, + "layer_7_sharpness": 0.0017199540743604302, + "layer_8_sharpness": 0.0034996618051081896, + "layer_9_sharpness": 0.0024425373412668705, + "layer_10_sharpness": 0.001085244701243937, + "layer_11_sharpness": 0.0008579790010116994, + "layer_12_sharpness": 0.002999626798555255, + "block0_q_sharpness": 0.00073387281736359, + "block0_k_sharpness": 0.0009672348969615996, + "block0_v_sharpness": 0.003102756105363369, + "block0_o_sharpness": 0.0034233820624649525, + "block0_mlp_win_sharpness": 0.0008647580980323255, + "block0_mlp_wout_sharpness": 0.002265207003802061, + "block3_q_sharpness": 0.0012561528710648417, + "block3_k_sharpness": 0.0008058834355324507, + "block3_v_sharpness": 0.0026153582148253918, + "block3_o_sharpness": 0.000870028103236109, + "block3_mlp_win_sharpness": 0.00023528520250692964, + "block3_mlp_wout_sharpness": 0.0005354339955374599, + "block7_q_sharpness": 0.0001773914700606838, + "block7_k_sharpness": 0.0001603852433618158, + "block7_v_sharpness": 0.005678940564393997, + "block7_o_sharpness": 0.0004139192751608789, + "block7_mlp_win_sharpness": 0.0011187009513378143, + "block7_mlp_wout_sharpness": 0.0010587825672701001, + "block11_q_sharpness": 0.00012035960389766842, + "block11_k_sharpness": 0.00017701202887110412, + "block11_v_sharpness": 0.0006586391828022897, + "block11_o_sharpness": 0.00014653736434411258, + "block11_mlp_win_sharpness": 0.0007636822410859168, + "block11_mlp_wout_sharpness": 0.0038375521544367075, + "sum_layer_numerators": 0.006641577293891587, + "block_diag_sharpness": 0.0020264533717670534, + "cross_layer_sharpness": 0.008331483472462644 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..147695e3b852891b010fab5d9513385577648879 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2178537845611572, + "total_l1_linf_norm": 19718.962890625, + "total_spectral_norm": 2.217853307723999, + "embed_lm_head_update_fnorm": 1.322728157043457, + "embed_lm_head_max_l1_linf_norm": 0.33891215920448303, + "embed_lm_head_max_spectral_norm": 0.19714878499507904, + "layer_1_update_fnorm": 0.4914834499359131, + "layer_1_max_l1_linf_norm": 0.5753586888313293, + "layer_1_max_spectral_norm": 0.08356557041406631, + "layer_2_update_fnorm": 0.5090928077697754, + "layer_2_max_l1_linf_norm": 0.5825939178466797, + "layer_2_max_spectral_norm": 0.08372332155704498, + "layer_3_update_fnorm": 0.5030264258384705, + "layer_3_max_l1_linf_norm": 0.6749606728553772, + "layer_3_max_spectral_norm": 0.08970329910516739, + "layer_4_update_fnorm": 0.5028313398361206, + "layer_4_max_l1_linf_norm": 0.6398512125015259, + "layer_4_max_spectral_norm": 0.09423420578241348, + "layer_5_update_fnorm": 0.49612703919410706, + "layer_5_max_l1_linf_norm": 0.5779134035110474, + "layer_5_max_spectral_norm": 0.07466881722211838, + "layer_6_update_fnorm": 0.5053552985191345, + "layer_6_max_l1_linf_norm": 0.6340459585189819, + "layer_6_max_spectral_norm": 0.08207202702760696, + "layer_7_update_fnorm": 0.5127381086349487, + "layer_7_max_l1_linf_norm": 0.5579586625099182, + "layer_7_max_spectral_norm": 0.07282872498035431, + "layer_8_update_fnorm": 0.5080756545066833, + "layer_8_max_l1_linf_norm": 0.5494349002838135, + "layer_8_max_spectral_norm": 0.07044634968042374, + "layer_9_update_fnorm": 0.5190827250480652, + "layer_9_max_l1_linf_norm": 0.536116361618042, + "layer_9_max_spectral_norm": 0.053566448390483856, + "layer_10_update_fnorm": 0.5343239903450012, + "layer_10_max_l1_linf_norm": 0.5446904897689819, + "layer_10_max_spectral_norm": 0.04493233188986778, + "layer_11_update_fnorm": 0.5438159108161926, + "layer_11_max_l1_linf_norm": 0.5544961094856262, + "layer_11_max_spectral_norm": 0.05464132875204086, + "layer_12_update_fnorm": 0.5379640460014343, + "layer_12_max_l1_linf_norm": 0.5724872350692749, + "layer_12_max_spectral_norm": 0.07559246569871902, + "block0_q_update_fnorm": 0.14433789253234863, + "block0_q_max_l1_linf_norm": 0.16372621059417725, + "block0_q_max_spectral_norm": 0.042284686118364334, + "block0_k_update_fnorm": 0.1427609771490097, + "block0_k_max_l1_linf_norm": 0.1816890686750412, + "block0_k_max_spectral_norm": 0.05462173745036125, + "block0_v_update_fnorm": 0.1399664729833603, + "block0_v_max_l1_linf_norm": 0.16477380692958832, + "block0_v_max_spectral_norm": 0.03883003816008568, + "block0_o_update_fnorm": 0.14462117850780487, + "block0_o_max_l1_linf_norm": 0.1516318917274475, + "block0_o_max_spectral_norm": 0.03219885006546974, + "block0_mlp_win_update_fnorm": 0.2968218922615051, + "block0_mlp_win_max_l1_linf_norm": 0.15797606110572815, + "block0_mlp_win_max_spectral_norm": 0.043332841247320175, + "block0_mlp_wout_update_fnorm": 0.2676020562648773, + "block0_mlp_wout_max_l1_linf_norm": 0.5753586888313293, + "block0_mlp_wout_max_spectral_norm": 0.08356557041406631, + "block3_q_update_fnorm": 0.15184395015239716, + "block3_q_max_l1_linf_norm": 0.18356741964817047, + "block3_q_max_spectral_norm": 0.05258437991142273, + "block3_k_update_fnorm": 0.14251495897769928, + "block3_k_max_l1_linf_norm": 0.21136926114559174, + "block3_k_max_spectral_norm": 0.031904593110084534, + "block3_v_update_fnorm": 0.13060225546360016, + "block3_v_max_l1_linf_norm": 0.15090632438659668, + "block3_v_max_spectral_norm": 0.04163322597742081, + "block3_o_update_fnorm": 0.13626715540885925, + "block3_o_max_l1_linf_norm": 0.1559108942747116, + "block3_o_max_spectral_norm": 0.047054167836904526, + "block3_mlp_win_update_fnorm": 0.31553521752357483, + "block3_mlp_win_max_l1_linf_norm": 0.18955570459365845, + "block3_mlp_win_max_spectral_norm": 0.07025223970413208, + "block3_mlp_wout_update_fnorm": 0.2722563147544861, + "block3_mlp_wout_max_l1_linf_norm": 0.6398512125015259, + "block3_mlp_wout_max_spectral_norm": 0.09423420578241348, + "block7_q_update_fnorm": 0.16023941338062286, + "block7_q_max_l1_linf_norm": 0.15131710469722748, + "block7_q_max_spectral_norm": 0.02307220548391342, + "block7_k_update_fnorm": 0.15417270362377167, + "block7_k_max_l1_linf_norm": 0.15375393629074097, + "block7_k_max_spectral_norm": 0.02681919001042843, + "block7_v_update_fnorm": 0.13813500106334686, + "block7_v_max_l1_linf_norm": 0.14028750360012054, + "block7_v_max_spectral_norm": 0.027451058849692345, + "block7_o_update_fnorm": 0.1456691175699234, + "block7_o_max_l1_linf_norm": 0.14028123021125793, + "block7_o_max_spectral_norm": 0.028694676235318184, + "block7_mlp_win_update_fnorm": 0.30821889638900757, + "block7_mlp_win_max_l1_linf_norm": 0.17061728239059448, + "block7_mlp_win_max_spectral_norm": 0.041379451751708984, + "block7_mlp_wout_update_fnorm": 0.2706950902938843, + "block7_mlp_wout_max_l1_linf_norm": 0.5494349002838135, + "block7_mlp_wout_max_spectral_norm": 0.07044634968042374, + "block11_q_update_fnorm": 0.16343100368976593, + "block11_q_max_l1_linf_norm": 0.15539082884788513, + "block11_q_max_spectral_norm": 0.024123981595039368, + "block11_k_update_fnorm": 0.15820401906967163, + "block11_k_max_l1_linf_norm": 0.17215988039970398, + "block11_k_max_spectral_norm": 0.023819461464881897, + "block11_v_update_fnorm": 0.14731864631175995, + "block11_v_max_l1_linf_norm": 0.16294457018375397, + "block11_v_max_spectral_norm": 0.033221133053302765, + "block11_o_update_fnorm": 0.15892450511455536, + "block11_o_max_l1_linf_norm": 0.15692463517189026, + "block11_o_max_spectral_norm": 0.039496008306741714, + "block11_mlp_win_update_fnorm": 0.3242853283882141, + "block11_mlp_win_max_l1_linf_norm": 0.1737685203552246, + "block11_mlp_win_max_spectral_norm": 0.05015919730067253, + "block11_mlp_wout_update_fnorm": 0.29223912954330444, + "block11_mlp_wout_max_l1_linf_norm": 0.5724872350692749, + "block11_mlp_wout_max_spectral_norm": 0.07559246569871902, + "total_sharpness": 0.004969785455614328, + "block_total_sharpness": 0.007029735948890448, + "v_norm_block": 1.780242681503296, + "v_T_H_v_block": 0.02227908745408058, + "v_norm": 2.2178537845611572, + "ip_v_neg_g_hvp": 0.050231657922267914, + "cos_v_neg_g_hvp": 0.06161994859576225, + "g_hvp_norm": 0.36755579710006714, + "ip_v_neg_g_t": 0.05040296912193298, + "cos_v_neg_g_t": 0.0767245814204216, + "g_t_norm": 0.29620248079299927, + "g_norm": 0.36755579710006714, + "hv_norm": 0.4643981158733368, + "cos_v_hv": 0.023734500631690025, + "hg_norm": 6.5778584480285645, + "cos_g_hg": 0.6406418681144714, + "v_parallel_norm": 0.007472290191799402, + "v_perp_norm": 2.217841148376465, + "embed_lm_head_v_norm": 1.322728157043457, + "embed_lm_head_cos_v_neg_g": 0.08619722723960876, + "layer_1_v_norm": 0.4914834499359131, + "layer_1_cos_v_neg_g": 0.11219263076782227, + "layer_2_v_norm": 0.5090928077697754, + "layer_2_cos_v_neg_g": 0.056465860456228256, + "layer_3_v_norm": 0.5030264258384705, + "layer_3_cos_v_neg_g": 0.0520532950758934, + "layer_4_v_norm": 0.5028313398361206, + "layer_4_cos_v_neg_g": 0.05926665663719177, + "layer_5_v_norm": 0.49612703919410706, + "layer_5_cos_v_neg_g": 0.04297104477882385, + "layer_6_v_norm": 0.5053552985191345, + "layer_6_cos_v_neg_g": 0.05242384597659111, + "layer_7_v_norm": 0.5127381086349487, + "layer_7_cos_v_neg_g": 0.05935901403427124, + "layer_8_v_norm": 0.5080755949020386, + "layer_8_cos_v_neg_g": 0.05629905313253403, + "layer_9_v_norm": 0.5190827250480652, + "layer_9_cos_v_neg_g": 0.054057929664850235, + "layer_10_v_norm": 0.5343239903450012, + "layer_10_cos_v_neg_g": 0.0644037127494812, + "layer_11_v_norm": 0.5438159108161926, + "layer_11_cos_v_neg_g": 0.08555920422077179, + "layer_12_v_norm": 0.5379640460014343, + "layer_12_cos_v_neg_g": 0.12185163050889969, + "block0_q_v_norm": 0.14433789253234863, + "block0_q_cos_v_neg_g": 0.13937823474407196, + "block0_k_v_norm": 0.1427609771490097, + "block0_k_cos_v_neg_g": 0.13045307993888855, + "block0_v_v_norm": 0.1399664729833603, + "block0_v_cos_v_neg_g": 0.2036721110343933, + "block0_o_v_norm": 0.14462117850780487, + "block0_o_cos_v_neg_g": 0.13432031869888306, + "block0_mlp_win_v_norm": 0.2968218922615051, + "block0_mlp_win_cos_v_neg_g": 0.08031412959098816, + "block0_mlp_wout_v_norm": 0.2676020562648773, + "block0_mlp_wout_cos_v_neg_g": 0.15347090363502502, + "block3_q_v_norm": 0.15184395015239716, + "block3_q_cos_v_neg_g": 0.0773157998919487, + "block3_k_v_norm": 0.14251495897769928, + "block3_k_cos_v_neg_g": 0.06749702244997025, + "block3_v_v_norm": 0.13060225546360016, + "block3_v_cos_v_neg_g": 0.03692319244146347, + "block3_o_v_norm": 0.13626715540885925, + "block3_o_cos_v_neg_g": 0.1682867407798767, + "block3_mlp_win_v_norm": 0.31553521752357483, + "block3_mlp_win_cos_v_neg_g": 0.05857923626899719, + "block3_mlp_wout_v_norm": 0.2722563147544861, + "block3_mlp_wout_cos_v_neg_g": 0.22745294868946075, + "block7_q_v_norm": 0.16023941338062286, + "block7_q_cos_v_neg_g": 0.06927802413702011, + "block7_k_v_norm": 0.15417270362377167, + "block7_k_cos_v_neg_g": 0.20246340334415436, + "block7_v_v_norm": 0.13813500106334686, + "block7_v_cos_v_neg_g": 0.04381556063890457, + "block7_o_v_norm": 0.1456691175699234, + "block7_o_cos_v_neg_g": 0.21569445729255676, + "block7_mlp_win_v_norm": 0.30821889638900757, + "block7_mlp_win_cos_v_neg_g": 0.07317249476909637, + "block7_mlp_wout_v_norm": 0.2706950902938843, + "block7_mlp_wout_cos_v_neg_g": 0.20688718557357788, + "block11_q_v_norm": 0.16343100368976593, + "block11_q_cos_v_neg_g": 0.11852621287107468, + "block11_k_v_norm": 0.15820401906967163, + "block11_k_cos_v_neg_g": 0.1893547922372818, + "block11_v_v_norm": 0.14731864631175995, + "block11_v_cos_v_neg_g": 0.06738144904375076, + "block11_o_v_norm": 0.15892450511455536, + "block11_o_cos_v_neg_g": 0.22781778872013092, + "block11_mlp_win_v_norm": 0.3242853283882141, + "block11_mlp_win_cos_v_neg_g": 0.11915307492017746, + "block11_mlp_wout_v_norm": 0.29223912954330444, + "block11_mlp_wout_cos_v_neg_g": 0.1855194866657257, + "embed_lm_head_sharpness": 0.0002578867133706808, + "layer_1_sharpness": 0.003644851967692375, + "layer_2_sharpness": 0.0003508617519401014, + "layer_3_sharpness": 0.001173549098894, + "layer_4_sharpness": 0.001176809542812407, + "layer_5_sharpness": 0.0008443673141300678, + "layer_6_sharpness": 0.0011896553914994001, + "layer_7_sharpness": 0.001192708034068346, + "layer_8_sharpness": 0.0020607803016901016, + "layer_9_sharpness": 0.0015363596612587571, + "layer_10_sharpness": 0.0008307351963594556, + "layer_11_sharpness": 0.0007330342195928097, + "layer_12_sharpness": 0.0022186783608049154, + "block0_q_sharpness": -4.022650682600215e-05, + "block0_k_sharpness": 8.395424811169505e-05, + "block0_v_sharpness": 0.001761081744916737, + "block0_o_sharpness": 0.002064526081085205, + "block0_mlp_win_sharpness": 0.0013563843676820397, + "block0_mlp_wout_sharpness": 0.0017664816696196795, + "block3_q_sharpness": 0.0009126659133471549, + "block3_k_sharpness": 0.00043095892760902643, + "block3_v_sharpness": 0.0017074979841709137, + "block3_o_sharpness": 0.0006038590800017118, + "block3_mlp_win_sharpness": 0.00017880159430205822, + "block3_mlp_wout_sharpness": 0.00048040004912763834, + "block7_q_sharpness": 7.9810903116595e-05, + "block7_k_sharpness": 0.00017811472935136408, + "block7_v_sharpness": 0.003673213766887784, + "block7_o_sharpness": 0.0003355188819114119, + "block7_mlp_win_sharpness": 0.0006526710349135101, + "block7_mlp_wout_sharpness": 0.0006994957802817225, + "block11_q_sharpness": 8.854511543177068e-05, + "block11_k_sharpness": 0.0001455463352613151, + "block11_v_sharpness": 0.0006172315333969891, + "block11_o_sharpness": 0.00014363310765475035, + "block11_mlp_win_sharpness": 0.0006651167059317231, + "block11_mlp_wout_sharpness": 0.002480566967278719, + "sum_layer_numerators": 0.004433077812310942, + "block_diag_sharpness": 0.0013987720193876796, + "cross_layer_sharpness": 0.005630963929502768 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..ba0bd4e00c1ed6cfe8737d17ed1d09f1900f34ee --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_8500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.709477424621582, + "total_l1_linf_norm": 15236.673828125, + "total_spectral_norm": 1.7094773054122925, + "embed_lm_head_update_fnorm": 1.0044893026351929, + "embed_lm_head_max_l1_linf_norm": 0.26370254158973694, + "embed_lm_head_max_spectral_norm": 0.15431742370128632, + "layer_1_update_fnorm": 0.39061209559440613, + "layer_1_max_l1_linf_norm": 0.49942004680633545, + "layer_1_max_spectral_norm": 0.06932101398706436, + "layer_2_update_fnorm": 0.39296337962150574, + "layer_2_max_l1_linf_norm": 0.4510742127895355, + "layer_2_max_spectral_norm": 0.0620206855237484, + "layer_3_update_fnorm": 0.3887344300746918, + "layer_3_max_l1_linf_norm": 0.5534698963165283, + "layer_3_max_spectral_norm": 0.06616140902042389, + "layer_4_update_fnorm": 0.39335042238235474, + "layer_4_max_l1_linf_norm": 0.6265084743499756, + "layer_4_max_spectral_norm": 0.06893342733383179, + "layer_5_update_fnorm": 0.3860902488231659, + "layer_5_max_l1_linf_norm": 0.48974621295928955, + "layer_5_max_spectral_norm": 0.059774454683065414, + "layer_6_update_fnorm": 0.3927508592605591, + "layer_6_max_l1_linf_norm": 0.5059581398963928, + "layer_6_max_spectral_norm": 0.06352037936449051, + "layer_7_update_fnorm": 0.39813047647476196, + "layer_7_max_l1_linf_norm": 0.476579874753952, + "layer_7_max_spectral_norm": 0.058915674686431885, + "layer_8_update_fnorm": 0.39702504873275757, + "layer_8_max_l1_linf_norm": 0.4215151369571686, + "layer_8_max_spectral_norm": 0.05492870509624481, + "layer_9_update_fnorm": 0.4045889973640442, + "layer_9_max_l1_linf_norm": 0.45270347595214844, + "layer_9_max_spectral_norm": 0.049110542982816696, + "layer_10_update_fnorm": 0.4131793975830078, + "layer_10_max_l1_linf_norm": 0.4290243983268738, + "layer_10_max_spectral_norm": 0.037059418857097626, + "layer_11_update_fnorm": 0.4181429147720337, + "layer_11_max_l1_linf_norm": 0.4431498050689697, + "layer_11_max_spectral_norm": 0.04212496429681778, + "layer_12_update_fnorm": 0.41444912552833557, + "layer_12_max_l1_linf_norm": 0.4635826349258423, + "layer_12_max_spectral_norm": 0.05891934782266617, + "block0_q_update_fnorm": 0.11705382913351059, + "block0_q_max_l1_linf_norm": 0.1385180652141571, + "block0_q_max_spectral_norm": 0.03677193820476532, + "block0_k_update_fnorm": 0.11566992104053497, + "block0_k_max_l1_linf_norm": 0.16261228919029236, + "block0_k_max_spectral_norm": 0.04571432247757912, + "block0_v_update_fnorm": 0.11521083116531372, + "block0_v_max_l1_linf_norm": 0.13933297991752625, + "block0_v_max_spectral_norm": 0.03282058984041214, + "block0_o_update_fnorm": 0.11632376909255981, + "block0_o_max_l1_linf_norm": 0.11424589157104492, + "block0_o_max_spectral_norm": 0.026805050671100616, + "block0_mlp_win_update_fnorm": 0.23137201368808746, + "block0_mlp_win_max_l1_linf_norm": 0.11983216553926468, + "block0_mlp_win_max_spectral_norm": 0.0337720550596714, + "block0_mlp_wout_update_fnorm": 0.21231834590435028, + "block0_mlp_wout_max_l1_linf_norm": 0.49942004680633545, + "block0_mlp_wout_max_spectral_norm": 0.06932101398706436, + "block3_q_update_fnorm": 0.12050290405750275, + "block3_q_max_l1_linf_norm": 0.1463107168674469, + "block3_q_max_spectral_norm": 0.039296187460422516, + "block3_k_update_fnorm": 0.11702679097652435, + "block3_k_max_l1_linf_norm": 0.20700129866600037, + "block3_k_max_spectral_norm": 0.03322291001677513, + "block3_v_update_fnorm": 0.10613133013248444, + "block3_v_max_l1_linf_norm": 0.15187141299247742, + "block3_v_max_spectral_norm": 0.03188745677471161, + "block3_o_update_fnorm": 0.11082682758569717, + "block3_o_max_l1_linf_norm": 0.1353497952222824, + "block3_o_max_spectral_norm": 0.03582628816366196, + "block3_mlp_win_update_fnorm": 0.24215221405029297, + "block3_mlp_win_max_l1_linf_norm": 0.15031391382217407, + "block3_mlp_win_max_spectral_norm": 0.05307016521692276, + "block3_mlp_wout_update_fnorm": 0.21028144657611847, + "block3_mlp_wout_max_l1_linf_norm": 0.6265084743499756, + "block3_mlp_wout_max_spectral_norm": 0.06893342733383179, + "block7_q_update_fnorm": 0.12435606867074966, + "block7_q_max_l1_linf_norm": 0.126082181930542, + "block7_q_max_spectral_norm": 0.018759524449706078, + "block7_k_update_fnorm": 0.12025361508131027, + "block7_k_max_l1_linf_norm": 0.1301349550485611, + "block7_k_max_spectral_norm": 0.019976604729890823, + "block7_v_update_fnorm": 0.11010176688432693, + "block7_v_max_l1_linf_norm": 0.11383870244026184, + "block7_v_max_spectral_norm": 0.019486699253320694, + "block7_o_update_fnorm": 0.11519153416156769, + "block7_o_max_l1_linf_norm": 0.10958976298570633, + "block7_o_max_spectral_norm": 0.019719455391168594, + "block7_mlp_win_update_fnorm": 0.23876895010471344, + "block7_mlp_win_max_l1_linf_norm": 0.13261282444000244, + "block7_mlp_win_max_spectral_norm": 0.03374241292476654, + "block7_mlp_wout_update_fnorm": 0.21266111731529236, + "block7_mlp_wout_max_l1_linf_norm": 0.4215151369571686, + "block7_mlp_wout_max_spectral_norm": 0.05492870509624481, + "block11_q_update_fnorm": 0.12522836029529572, + "block11_q_max_l1_linf_norm": 0.11914069950580597, + "block11_q_max_spectral_norm": 0.020034413784742355, + "block11_k_update_fnorm": 0.12164080888032913, + "block11_k_max_l1_linf_norm": 0.1411360502243042, + "block11_k_max_spectral_norm": 0.018007678911089897, + "block11_v_update_fnorm": 0.11546380817890167, + "block11_v_max_l1_linf_norm": 0.12549494206905365, + "block11_v_max_spectral_norm": 0.023720722645521164, + "block11_o_update_fnorm": 0.12189405411481857, + "block11_o_max_l1_linf_norm": 0.1211271807551384, + "block11_o_max_spectral_norm": 0.026408202946186066, + "block11_mlp_win_update_fnorm": 0.24822773039340973, + "block11_mlp_win_max_l1_linf_norm": 0.13046211004257202, + "block11_mlp_win_max_spectral_norm": 0.03596791252493858, + "block11_mlp_wout_update_fnorm": 0.22669675946235657, + "block11_mlp_wout_max_l1_linf_norm": 0.4635826349258423, + "block11_mlp_wout_max_spectral_norm": 0.05891934782266617, + "total_sharpness": 0.004451566841453314, + "block_total_sharpness": 0.006122348364442587, + "v_norm_block": 1.3832257986068726, + "v_T_H_v_block": 0.011713972315192223, + "v_norm": 1.709477424621582, + "ip_v_neg_g_hvp": 0.03697413206100464, + "cos_v_neg_g_hvp": 0.05961567535996437, + "g_hvp_norm": 0.36280572414398193, + "ip_v_neg_g_t": 0.03724788501858711, + "cos_v_neg_g_t": 0.0754331722855568, + "g_t_norm": 0.288852334022522, + "g_norm": 0.36280572414398193, + "hv_norm": 0.32331687211990356, + "cos_v_hv": 0.02353682555258274, + "hg_norm": 8.077611923217773, + "cos_g_hg": 0.5522642731666565, + "v_parallel_norm": 0.005910384934395552, + "v_perp_norm": 1.7094671726226807, + "embed_lm_head_v_norm": 1.0044893026351929, + "embed_lm_head_cos_v_neg_g": 0.08533385396003723, + "layer_1_v_norm": 0.39061209559440613, + "layer_1_cos_v_neg_g": 0.11140799522399902, + "layer_2_v_norm": 0.39296337962150574, + "layer_2_cos_v_neg_g": 0.0572139210999012, + "layer_3_v_norm": 0.3887344300746918, + "layer_3_cos_v_neg_g": 0.04494676738977432, + "layer_4_v_norm": 0.39335042238235474, + "layer_4_cos_v_neg_g": 0.04982791095972061, + "layer_5_v_norm": 0.3860902488231659, + "layer_5_cos_v_neg_g": 0.038536835461854935, + "layer_6_v_norm": 0.3927508592605591, + "layer_6_cos_v_neg_g": 0.049023497849702835, + "layer_7_v_norm": 0.39813047647476196, + "layer_7_cos_v_neg_g": 0.05504680052399635, + "layer_8_v_norm": 0.39702504873275757, + "layer_8_cos_v_neg_g": 0.05463612452149391, + "layer_9_v_norm": 0.4045889973640442, + "layer_9_cos_v_neg_g": 0.05425798520445824, + "layer_10_v_norm": 0.4131793975830078, + "layer_10_cos_v_neg_g": 0.0617358423769474, + "layer_11_v_norm": 0.4181429445743561, + "layer_11_cos_v_neg_g": 0.08231375366449356, + "layer_12_v_norm": 0.41444912552833557, + "layer_12_cos_v_neg_g": 0.11314310133457184, + "block0_q_v_norm": 0.11705382913351059, + "block0_q_cos_v_neg_g": 0.13219688832759857, + "block0_k_v_norm": 0.11566992104053497, + "block0_k_cos_v_neg_g": 0.13389518857002258, + "block0_v_v_norm": 0.11521083116531372, + "block0_v_cos_v_neg_g": 0.20012643933296204, + "block0_o_v_norm": 0.11632376909255981, + "block0_o_cos_v_neg_g": 0.13322781026363373, + "block0_mlp_win_v_norm": 0.23137201368808746, + "block0_mlp_win_cos_v_neg_g": 0.07987986505031586, + "block0_mlp_wout_v_norm": 0.21231834590435028, + "block0_mlp_wout_cos_v_neg_g": 0.1500939577817917, + "block3_q_v_norm": 0.12050290405750275, + "block3_q_cos_v_neg_g": 0.06782832741737366, + "block3_k_v_norm": 0.11702679097652435, + "block3_k_cos_v_neg_g": 0.056588273495435715, + "block3_v_v_norm": 0.10613133013248444, + "block3_v_cos_v_neg_g": 0.02850467897951603, + "block3_o_v_norm": 0.11082682758569717, + "block3_o_cos_v_neg_g": 0.1495208442211151, + "block3_mlp_win_v_norm": 0.24215221405029297, + "block3_mlp_win_cos_v_neg_g": 0.0564558282494545, + "block3_mlp_wout_v_norm": 0.21028144657611847, + "block3_mlp_wout_cos_v_neg_g": 0.20784704387187958, + "block7_q_v_norm": 0.12435606867074966, + "block7_q_cos_v_neg_g": 0.0711314007639885, + "block7_k_v_norm": 0.12025361508131027, + "block7_k_cos_v_neg_g": 0.20117247104644775, + "block7_v_v_norm": 0.11010176688432693, + "block7_v_cos_v_neg_g": 0.04073739051818848, + "block7_o_v_norm": 0.11519153416156769, + "block7_o_cos_v_neg_g": 0.20925118029117584, + "block7_mlp_win_v_norm": 0.23876895010471344, + "block7_mlp_win_cos_v_neg_g": 0.06957372277975082, + "block7_mlp_wout_v_norm": 0.21266111731529236, + "block7_mlp_wout_cos_v_neg_g": 0.19625619053840637, + "block11_q_v_norm": 0.12522836029529572, + "block11_q_cos_v_neg_g": 0.11188676208257675, + "block11_k_v_norm": 0.12164080888032913, + "block11_k_cos_v_neg_g": 0.18165621161460876, + "block11_v_v_norm": 0.11546380817890167, + "block11_v_cos_v_neg_g": 0.058714669197797775, + "block11_o_v_norm": 0.12189405411481857, + "block11_o_cos_v_neg_g": 0.215713769197464, + "block11_mlp_win_v_norm": 0.24822773039340973, + "block11_mlp_win_cos_v_neg_g": 0.11153071373701096, + "block11_mlp_wout_v_norm": 0.22669675946235657, + "block11_mlp_wout_cos_v_neg_g": 0.16904710233211517, + "embed_lm_head_sharpness": 0.0003043402684852481, + "layer_1_sharpness": 0.0029876301996409893, + "layer_2_sharpness": 0.0003651826991699636, + "layer_3_sharpness": 0.0009499869192950428, + "layer_4_sharpness": 0.001033055130392313, + "layer_5_sharpness": 0.0006809777696616948, + "layer_6_sharpness": 0.0011719155590981245, + "layer_7_sharpness": 0.0011069453321397305, + "layer_8_sharpness": 0.0018262865487486124, + "layer_9_sharpness": 0.0015331920003518462, + "layer_10_sharpness": 0.0008175967377610505, + "layer_11_sharpness": 0.000707299739588052, + "layer_12_sharpness": 0.0025324781890958548, + "block0_q_sharpness": -0.000372523907572031, + "block0_k_sharpness": -0.00025813336833380163, + "block0_v_sharpness": 0.0022214369382709265, + "block0_o_sharpness": 0.0027340876404196024, + "block0_mlp_win_sharpness": 0.0006274354527704418, + "block0_mlp_wout_sharpness": 0.0017644459148868918, + "block3_q_sharpness": 0.000994947156868875, + "block3_k_sharpness": 0.0005459547392092645, + "block3_v_sharpness": 0.0012423356529325247, + "block3_o_sharpness": 0.0005991264479234815, + "block3_mlp_win_sharpness": 0.00017767120152711868, + "block3_mlp_wout_sharpness": 0.000324591645039618, + "block7_q_sharpness": 0.00015488533244933933, + "block7_k_sharpness": 0.0002903050044551492, + "block7_v_sharpness": 0.0031481480691581964, + "block7_o_sharpness": 0.00030764401890337467, + "block7_mlp_win_sharpness": 0.0006250090664252639, + "block7_mlp_wout_sharpness": 0.0006574101280421019, + "block11_q_sharpness": 9.987973317038268e-05, + "block11_k_sharpness": 0.00015088320651557297, + "block11_v_sharpness": 0.0005441331304609776, + "block11_o_sharpness": 0.00011080265539931133, + "block11_mlp_win_sharpness": 0.0005969949415884912, + "block11_mlp_wout_sharpness": 0.0033441553823649883, + "sum_layer_numerators": 0.0025104658569987093, + "block_diag_sharpness": 0.0013121036948503337, + "cross_layer_sharpness": 0.004810244669592253 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9000.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..64e1b2bad067a59136768655b339e8fbf68026dd --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.1861917972564697, + "total_l1_linf_norm": 10616.384765625, + "total_spectral_norm": 1.1861917972564697, + "embed_lm_head_update_fnorm": 0.6727346777915955, + "embed_lm_head_max_l1_linf_norm": 0.19430068135261536, + "embed_lm_head_max_spectral_norm": 0.11103832721710205, + "layer_1_update_fnorm": 0.2779086232185364, + "layer_1_max_l1_linf_norm": 0.38743311166763306, + "layer_1_max_spectral_norm": 0.05279107764363289, + "layer_2_update_fnorm": 0.2805851101875305, + "layer_2_max_l1_linf_norm": 0.33474498987197876, + "layer_2_max_spectral_norm": 0.044534068554639816, + "layer_3_update_fnorm": 0.27840638160705566, + "layer_3_max_l1_linf_norm": 0.3747895658016205, + "layer_3_max_spectral_norm": 0.05461161583662033, + "layer_4_update_fnorm": 0.27790260314941406, + "layer_4_max_l1_linf_norm": 0.3596121072769165, + "layer_4_max_spectral_norm": 0.0529799722135067, + "layer_5_update_fnorm": 0.2751838266849518, + "layer_5_max_l1_linf_norm": 0.3163667917251587, + "layer_5_max_spectral_norm": 0.043507907539606094, + "layer_6_update_fnorm": 0.2769117057323456, + "layer_6_max_l1_linf_norm": 0.35404640436172485, + "layer_6_max_spectral_norm": 0.04684983193874359, + "layer_7_update_fnorm": 0.27923429012298584, + "layer_7_max_l1_linf_norm": 0.3304109573364258, + "layer_7_max_spectral_norm": 0.04204203933477402, + "layer_8_update_fnorm": 0.27858275175094604, + "layer_8_max_l1_linf_norm": 0.30862292647361755, + "layer_8_max_spectral_norm": 0.043040014803409576, + "layer_9_update_fnorm": 0.2827110290527344, + "layer_9_max_l1_linf_norm": 0.2959446907043457, + "layer_9_max_spectral_norm": 0.03585997596383095, + "layer_10_update_fnorm": 0.29176342487335205, + "layer_10_max_l1_linf_norm": 0.3077080249786377, + "layer_10_max_spectral_norm": 0.029279794543981552, + "layer_11_update_fnorm": 0.29389551281929016, + "layer_11_max_l1_linf_norm": 0.31129664182662964, + "layer_11_max_spectral_norm": 0.03630388528108597, + "layer_12_update_fnorm": 0.2904575765132904, + "layer_12_max_l1_linf_norm": 0.33661559224128723, + "layer_12_max_spectral_norm": 0.04041178897023201, + "block0_q_update_fnorm": 0.08190722018480301, + "block0_q_max_l1_linf_norm": 0.0925031304359436, + "block0_q_max_spectral_norm": 0.024459926411509514, + "block0_k_update_fnorm": 0.08080362528562546, + "block0_k_max_l1_linf_norm": 0.10059453547000885, + "block0_k_max_spectral_norm": 0.027510955929756165, + "block0_v_update_fnorm": 0.08294052630662918, + "block0_v_max_l1_linf_norm": 0.09048649668693542, + "block0_v_max_spectral_norm": 0.024864530190825462, + "block0_o_update_fnorm": 0.0843081995844841, + "block0_o_max_l1_linf_norm": 0.08988557755947113, + "block0_o_max_spectral_norm": 0.0230035912245512, + "block0_mlp_win_update_fnorm": 0.16244572401046753, + "block0_mlp_win_max_l1_linf_norm": 0.11663369834423065, + "block0_mlp_win_max_spectral_norm": 0.02619280107319355, + "block0_mlp_wout_update_fnorm": 0.15354537963867188, + "block0_mlp_wout_max_l1_linf_norm": 0.38743311166763306, + "block0_mlp_wout_max_spectral_norm": 0.05279107764363289, + "block3_q_update_fnorm": 0.08234526962041855, + "block3_q_max_l1_linf_norm": 0.09668727219104767, + "block3_q_max_spectral_norm": 0.024727147072553635, + "block3_k_update_fnorm": 0.0806298702955246, + "block3_k_max_l1_linf_norm": 0.10398785769939423, + "block3_k_max_spectral_norm": 0.02337001822888851, + "block3_v_update_fnorm": 0.07842489331960678, + "block3_v_max_l1_linf_norm": 0.09281091392040253, + "block3_v_max_spectral_norm": 0.026418305933475494, + "block3_o_update_fnorm": 0.07869718968868256, + "block3_o_max_l1_linf_norm": 0.0872444361448288, + "block3_o_max_spectral_norm": 0.026529038324952126, + "block3_mlp_win_update_fnorm": 0.16898861527442932, + "block3_mlp_win_max_l1_linf_norm": 0.15143480896949768, + "block3_mlp_win_max_spectral_norm": 0.03740786015987396, + "block3_mlp_wout_update_fnorm": 0.15164126455783844, + "block3_mlp_wout_max_l1_linf_norm": 0.3596121072769165, + "block3_mlp_wout_max_spectral_norm": 0.0529799722135067, + "block7_q_update_fnorm": 0.0864829421043396, + "block7_q_max_l1_linf_norm": 0.11947746574878693, + "block7_q_max_spectral_norm": 0.018936458975076675, + "block7_k_update_fnorm": 0.08477316796779633, + "block7_k_max_l1_linf_norm": 0.09711450338363647, + "block7_k_max_spectral_norm": 0.017956480383872986, + "block7_v_update_fnorm": 0.07918737083673477, + "block7_v_max_l1_linf_norm": 0.08408576995134354, + "block7_v_max_spectral_norm": 0.020406652241945267, + "block7_o_update_fnorm": 0.08200342953205109, + "block7_o_max_l1_linf_norm": 0.07874958217144012, + "block7_o_max_spectral_norm": 0.020922783762216568, + "block7_mlp_win_update_fnorm": 0.16582559049129486, + "block7_mlp_win_max_l1_linf_norm": 0.09639346599578857, + "block7_mlp_win_max_spectral_norm": 0.02273804508149624, + "block7_mlp_wout_update_fnorm": 0.1496926099061966, + "block7_mlp_wout_max_l1_linf_norm": 0.30862292647361755, + "block7_mlp_wout_max_spectral_norm": 0.043040014803409576, + "block11_q_update_fnorm": 0.08684699237346649, + "block11_q_max_l1_linf_norm": 0.09028741717338562, + "block11_q_max_spectral_norm": 0.017728442326188087, + "block11_k_update_fnorm": 0.0848826915025711, + "block11_k_max_l1_linf_norm": 0.09116975963115692, + "block11_k_max_spectral_norm": 0.013771941885352135, + "block11_v_update_fnorm": 0.08389176428318024, + "block11_v_max_l1_linf_norm": 0.10047294199466705, + "block11_v_max_spectral_norm": 0.029860427603125572, + "block11_o_update_fnorm": 0.08614756166934967, + "block11_o_max_l1_linf_norm": 0.08893586695194244, + "block11_o_max_spectral_norm": 0.024900343269109726, + "block11_mlp_win_update_fnorm": 0.17227287590503693, + "block11_mlp_win_max_l1_linf_norm": 0.11589940637350082, + "block11_mlp_win_max_spectral_norm": 0.03756217658519745, + "block11_mlp_wout_update_fnorm": 0.15948495268821716, + "block11_mlp_wout_max_l1_linf_norm": 0.33661559224128723, + "block11_mlp_wout_max_spectral_norm": 0.04041178897023201, + "total_sharpness": 0.010530171915888786, + "block_total_sharpness": 0.014344132505357265, + "v_norm_block": 0.9769745469093323, + "v_T_H_v_block": 0.013691176660358906, + "v_norm": 1.1861917972564697, + "ip_v_neg_g_hvp": 0.03310517221689224, + "cos_v_neg_g_hvp": 0.07084152102470398, + "g_hvp_norm": 0.3939608335494995, + "ip_v_neg_g_t": 0.033159177750349045, + "cos_v_neg_g_t": 0.08990988880395889, + "g_t_norm": 0.3109147846698761, + "g_norm": 0.3939608335494995, + "hv_norm": 0.44801488518714905, + "cos_v_hv": 0.027880333364009857, + "hg_norm": 15.245104789733887, + "cos_g_hg": 0.579650342464447, + "v_parallel_norm": 0.003935809712857008, + "v_perp_norm": 1.1861852407455444, + "embed_lm_head_v_norm": 0.6727346777915955, + "embed_lm_head_cos_v_neg_g": 0.09885610640048981, + "layer_1_v_norm": 0.2779086232185364, + "layer_1_cos_v_neg_g": 0.13310682773590088, + "layer_2_v_norm": 0.2805851101875305, + "layer_2_cos_v_neg_g": 0.052250269800424576, + "layer_3_v_norm": 0.27840638160705566, + "layer_3_cos_v_neg_g": 0.05349886044859886, + "layer_4_v_norm": 0.27790260314941406, + "layer_4_cos_v_neg_g": 0.05442966893315315, + "layer_5_v_norm": 0.2751838266849518, + "layer_5_cos_v_neg_g": 0.04763838276267052, + "layer_6_v_norm": 0.2769117057323456, + "layer_6_cos_v_neg_g": 0.06007509306073189, + "layer_7_v_norm": 0.27923429012298584, + "layer_7_cos_v_neg_g": 0.0662858858704567, + "layer_8_v_norm": 0.27858275175094604, + "layer_8_cos_v_neg_g": 0.06580707430839539, + "layer_9_v_norm": 0.2827110290527344, + "layer_9_cos_v_neg_g": 0.06711257249116898, + "layer_10_v_norm": 0.29176342487335205, + "layer_10_cos_v_neg_g": 0.08251621574163437, + "layer_11_v_norm": 0.2938954532146454, + "layer_11_cos_v_neg_g": 0.10874477028846741, + "layer_12_v_norm": 0.2904575765132904, + "layer_12_cos_v_neg_g": 0.14090755581855774, + "block0_q_v_norm": 0.08190722018480301, + "block0_q_cos_v_neg_g": 0.162115678191185, + "block0_k_v_norm": 0.08080362528562546, + "block0_k_cos_v_neg_g": 0.1571468561887741, + "block0_v_v_norm": 0.08294052630662918, + "block0_v_cos_v_neg_g": 0.23901815712451935, + "block0_o_v_norm": 0.0843081995844841, + "block0_o_cos_v_neg_g": 0.1528182476758957, + "block0_mlp_win_v_norm": 0.16244572401046753, + "block0_mlp_win_cos_v_neg_g": 0.08970164507627487, + "block0_mlp_wout_v_norm": 0.15354537963867188, + "block0_mlp_wout_cos_v_neg_g": 0.18009185791015625, + "block3_q_v_norm": 0.08234526962041855, + "block3_q_cos_v_neg_g": 0.0763913244009018, + "block3_k_v_norm": 0.0806298702955246, + "block3_k_cos_v_neg_g": 0.03218967095017433, + "block3_v_v_norm": 0.07842489331960678, + "block3_v_cos_v_neg_g": 0.04267812520265579, + "block3_o_v_norm": 0.07869718968868256, + "block3_o_cos_v_neg_g": 0.1932317465543747, + "block3_mlp_win_v_norm": 0.16898861527442932, + "block3_mlp_win_cos_v_neg_g": 0.0668429359793663, + "block3_mlp_wout_v_norm": 0.15164126455783844, + "block3_mlp_wout_cos_v_neg_g": 0.25272631645202637, + "block7_q_v_norm": 0.0864829421043396, + "block7_q_cos_v_neg_g": 0.07793930172920227, + "block7_k_v_norm": 0.08477316796779633, + "block7_k_cos_v_neg_g": 0.2419213205575943, + "block7_v_v_norm": 0.07918737083673477, + "block7_v_cos_v_neg_g": 0.04991546645760536, + "block7_o_v_norm": 0.08200342953205109, + "block7_o_cos_v_neg_g": 0.2549186646938324, + "block7_mlp_win_v_norm": 0.16582559049129486, + "block7_mlp_win_cos_v_neg_g": 0.08237706869840622, + "block7_mlp_wout_v_norm": 0.1496926099061966, + "block7_mlp_wout_cos_v_neg_g": 0.22132983803749084, + "block11_q_v_norm": 0.08684699237346649, + "block11_q_cos_v_neg_g": 0.13835689425468445, + "block11_k_v_norm": 0.0848826915025711, + "block11_k_cos_v_neg_g": 0.21539875864982605, + "block11_v_v_norm": 0.08389176428318024, + "block11_v_cos_v_neg_g": 0.08621235191822052, + "block11_o_v_norm": 0.08614756166934967, + "block11_o_cos_v_neg_g": 0.27694830298423767, + "block11_mlp_win_v_norm": 0.17227287590503693, + "block11_mlp_win_cos_v_neg_g": 0.13633579015731812, + "block11_mlp_wout_v_norm": 0.15948495268821716, + "block11_mlp_wout_cos_v_neg_g": 0.19596484303474426, + "embed_lm_head_sharpness": 0.0004962910315953195, + "layer_1_sharpness": 0.010448403656482697, + "layer_2_sharpness": 0.0038236118853092194, + "layer_3_sharpness": 0.004272351507097483, + "layer_4_sharpness": 0.0024261700455099344, + "layer_5_sharpness": 0.0010991535382345319, + "layer_6_sharpness": 0.0021722649689763784, + "layer_7_sharpness": 0.0015248989220708609, + "layer_8_sharpness": 0.002809360157698393, + "layer_9_sharpness": 0.0023667828645557165, + "layer_10_sharpness": 0.0018245120299980044, + "layer_11_sharpness": 0.0016270980704575777, + "layer_12_sharpness": 0.0028494105208665133, + "block0_q_sharpness": 0.0032919077202677727, + "block0_k_sharpness": 0.004427594132721424, + "block0_v_sharpness": 0.005559502635151148, + "block0_o_sharpness": 0.004439906217157841, + "block0_mlp_win_sharpness": 0.0013631958281621337, + "block0_mlp_wout_sharpness": 0.002777897287160158, + "block3_q_sharpness": 0.000776222615968436, + "block3_k_sharpness": 0.0012396853417158127, + "block3_v_sharpness": 0.004456543829292059, + "block3_o_sharpness": 0.0012100638123229146, + "block3_mlp_win_sharpness": 0.00027432944625616074, + "block3_mlp_wout_sharpness": 0.0006152747082524002, + "block7_q_sharpness": 0.00017149309860542417, + "block7_k_sharpness": 0.00028931183624081314, + "block7_v_sharpness": 0.004067935980856419, + "block7_o_sharpness": 0.0006027516210451722, + "block7_mlp_win_sharpness": 0.0007506847032345831, + "block7_mlp_wout_sharpness": 0.000905725930351764, + "block11_q_sharpness": 0.0002314581215614453, + "block11_k_sharpness": 0.00024367638980038464, + "block11_v_sharpness": 0.0010174023918807507, + "block11_o_sharpness": 0.0002639148442540318, + "block11_mlp_win_sharpness": 0.0009367129532620311, + "block11_mlp_wout_sharpness": 0.0025389916263520718, + "sum_layer_numerators": 0.0029386569290065015, + "block_diag_sharpness": 0.003078806461087543, + "cross_layer_sharpness": 0.011265326044269722 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9500.json b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..a34eaf66ac8228934984923f0581e03f39b5f59e --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/sharpness_step_9500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.5957697629928589, + "total_l1_linf_norm": 5336.6923828125, + "total_spectral_norm": 0.5957698822021484, + "embed_lm_head_update_fnorm": 0.33634060621261597, + "embed_lm_head_max_l1_linf_norm": 0.08789154887199402, + "embed_lm_head_max_spectral_norm": 0.05925030633807182, + "layer_1_update_fnorm": 0.14251062273979187, + "layer_1_max_l1_linf_norm": 0.18231439590454102, + "layer_1_max_spectral_norm": 0.025319213047623634, + "layer_2_update_fnorm": 0.14323127269744873, + "layer_2_max_l1_linf_norm": 0.15859870612621307, + "layer_2_max_spectral_norm": 0.02220260351896286, + "layer_3_update_fnorm": 0.13952453434467316, + "layer_3_max_l1_linf_norm": 0.1614806354045868, + "layer_3_max_spectral_norm": 0.02443283051252365, + "layer_4_update_fnorm": 0.13973818719387054, + "layer_4_max_l1_linf_norm": 0.17695528268814087, + "layer_4_max_spectral_norm": 0.027071814984083176, + "layer_5_update_fnorm": 0.13832013309001923, + "layer_5_max_l1_linf_norm": 0.16862022876739502, + "layer_5_max_spectral_norm": 0.024969488382339478, + "layer_6_update_fnorm": 0.13976606726646423, + "layer_6_max_l1_linf_norm": 0.19760459661483765, + "layer_6_max_spectral_norm": 0.02843998186290264, + "layer_7_update_fnorm": 0.14121191203594208, + "layer_7_max_l1_linf_norm": 0.19705970585346222, + "layer_7_max_spectral_norm": 0.026152653619647026, + "layer_8_update_fnorm": 0.14096328616142273, + "layer_8_max_l1_linf_norm": 0.18466442823410034, + "layer_8_max_spectral_norm": 0.025718100368976593, + "layer_9_update_fnorm": 0.1425647735595703, + "layer_9_max_l1_linf_norm": 0.15707524120807648, + "layer_9_max_spectral_norm": 0.01842482015490532, + "layer_10_update_fnorm": 0.14460872113704681, + "layer_10_max_l1_linf_norm": 0.1506590098142624, + "layer_10_max_spectral_norm": 0.013527142815291882, + "layer_11_update_fnorm": 0.14619210362434387, + "layer_11_max_l1_linf_norm": 0.15407949686050415, + "layer_11_max_spectral_norm": 0.015050284564495087, + "layer_12_update_fnorm": 0.14460796117782593, + "layer_12_max_l1_linf_norm": 0.15997835993766785, + "layer_12_max_spectral_norm": 0.019738126546144485, + "block0_q_update_fnorm": 0.042372316122055054, + "block0_q_max_l1_linf_norm": 0.04734749346971512, + "block0_q_max_spectral_norm": 0.012597575783729553, + "block0_k_update_fnorm": 0.04209234565496445, + "block0_k_max_l1_linf_norm": 0.052966825664043427, + "block0_k_max_spectral_norm": 0.013977395370602608, + "block0_v_update_fnorm": 0.042037483304739, + "block0_v_max_l1_linf_norm": 0.05168541520833969, + "block0_v_max_spectral_norm": 0.012951529584825039, + "block0_o_update_fnorm": 0.04266460984945297, + "block0_o_max_l1_linf_norm": 0.04089886695146561, + "block0_o_max_spectral_norm": 0.009528388269245625, + "block0_mlp_win_update_fnorm": 0.08289950340986252, + "block0_mlp_win_max_l1_linf_norm": 0.05761025846004486, + "block0_mlp_win_max_spectral_norm": 0.012868307530879974, + "block0_mlp_wout_update_fnorm": 0.07918142527341843, + "block0_mlp_wout_max_l1_linf_norm": 0.18231439590454102, + "block0_mlp_wout_max_spectral_norm": 0.025319213047623634, + "block3_q_update_fnorm": 0.041917409747838974, + "block3_q_max_l1_linf_norm": 0.047230709344148636, + "block3_q_max_spectral_norm": 0.012311993166804314, + "block3_k_update_fnorm": 0.041124723851680756, + "block3_k_max_l1_linf_norm": 0.05278825759887695, + "block3_k_max_spectral_norm": 0.012346138246357441, + "block3_v_update_fnorm": 0.038392674177885056, + "block3_v_max_l1_linf_norm": 0.04462940990924835, + "block3_v_max_spectral_norm": 0.00921112671494484, + "block3_o_update_fnorm": 0.03961130604147911, + "block3_o_max_l1_linf_norm": 0.045903280377388, + "block3_o_max_spectral_norm": 0.012489832937717438, + "block3_mlp_win_update_fnorm": 0.08394917845726013, + "block3_mlp_win_max_l1_linf_norm": 0.06052950397133827, + "block3_mlp_win_max_spectral_norm": 0.017865771427750587, + "block3_mlp_wout_update_fnorm": 0.07729000598192215, + "block3_mlp_wout_max_l1_linf_norm": 0.17695528268814087, + "block3_mlp_wout_max_spectral_norm": 0.027071814984083176, + "block7_q_update_fnorm": 0.04223141446709633, + "block7_q_max_l1_linf_norm": 0.04052164778113365, + "block7_q_max_spectral_norm": 0.00604340760037303, + "block7_k_update_fnorm": 0.04166630655527115, + "block7_k_max_l1_linf_norm": 0.04347403347492218, + "block7_k_max_spectral_norm": 0.007184877526015043, + "block7_v_update_fnorm": 0.04004717990756035, + "block7_v_max_l1_linf_norm": 0.04161662235856056, + "block7_v_max_spectral_norm": 0.006741262506693602, + "block7_o_update_fnorm": 0.04102736711502075, + "block7_o_max_l1_linf_norm": 0.039802491664886475, + "block7_o_max_spectral_norm": 0.00717569375410676, + "block7_mlp_win_update_fnorm": 0.08388018608093262, + "block7_mlp_win_max_l1_linf_norm": 0.045030612498521805, + "block7_mlp_win_max_spectral_norm": 0.01094347145408392, + "block7_mlp_wout_update_fnorm": 0.07756434381008148, + "block7_mlp_wout_max_l1_linf_norm": 0.18466442823410034, + "block7_mlp_wout_max_spectral_norm": 0.025718100368976593, + "block11_q_update_fnorm": 0.04242873191833496, + "block11_q_max_l1_linf_norm": 0.04039379209280014, + "block11_q_max_spectral_norm": 0.006324506364762783, + "block11_k_update_fnorm": 0.04189883917570114, + "block11_k_max_l1_linf_norm": 0.04434067755937576, + "block11_k_max_spectral_norm": 0.006670913193374872, + "block11_v_update_fnorm": 0.04099098592996597, + "block11_v_max_l1_linf_norm": 0.041220180690288544, + "block11_v_max_spectral_norm": 0.008032162673771381, + "block11_o_update_fnorm": 0.04247819259762764, + "block11_o_max_l1_linf_norm": 0.041230931878089905, + "block11_o_max_spectral_norm": 0.009965108707547188, + "block11_mlp_win_update_fnorm": 0.0852568969130516, + "block11_mlp_win_max_l1_linf_norm": 0.0437755212187767, + "block11_mlp_win_max_spectral_norm": 0.01212221384048462, + "block11_mlp_wout_update_fnorm": 0.08117149025201797, + "block11_mlp_wout_max_l1_linf_norm": 0.15997835993766785, + "block11_mlp_wout_max_spectral_norm": 0.019738126546144485, + "total_sharpness": 0.008056272752583027, + "block_total_sharpness": 0.01084946934133768, + "v_norm_block": 0.49174872040748596, + "v_T_H_v_block": 0.0026235838886350393, + "v_norm": 0.5957697629928589, + "ip_v_neg_g_hvp": 0.014149151742458344, + "cos_v_neg_g_hvp": 0.06603246182203293, + "g_hvp_norm": 0.35966193675994873, + "ip_v_neg_g_t": 0.014262601733207703, + "cos_v_neg_g_t": 0.09168603271245956, + "g_t_norm": 0.2611061632633209, + "g_norm": 0.35966193675994873, + "hv_norm": 0.19903992116451263, + "cos_v_hv": 0.024114178493618965, + "hg_norm": 11.507922172546387, + "cos_g_hg": 0.5389235019683838, + "v_parallel_norm": 0.0020753967110067606, + "v_perp_norm": 0.5957661867141724, + "embed_lm_head_v_norm": 0.33634060621261597, + "embed_lm_head_cos_v_neg_g": 0.09798939526081085, + "layer_1_v_norm": 0.14251062273979187, + "layer_1_cos_v_neg_g": 0.12203361839056015, + "layer_2_v_norm": 0.14323127269744873, + "layer_2_cos_v_neg_g": 0.05795705318450928, + "layer_3_v_norm": 0.13952453434467316, + "layer_3_cos_v_neg_g": 0.05103868246078491, + "layer_4_v_norm": 0.13973818719387054, + "layer_4_cos_v_neg_g": 0.055234815925359726, + "layer_5_v_norm": 0.13832013309001923, + "layer_5_cos_v_neg_g": 0.04379188269376755, + "layer_6_v_norm": 0.13976605236530304, + "layer_6_cos_v_neg_g": 0.053720779716968536, + "layer_7_v_norm": 0.14121191203594208, + "layer_7_cos_v_neg_g": 0.061170466244220734, + "layer_8_v_norm": 0.14096328616142273, + "layer_8_cos_v_neg_g": 0.059312768280506134, + "layer_9_v_norm": 0.1425647735595703, + "layer_9_cos_v_neg_g": 0.057636212557554245, + "layer_10_v_norm": 0.14460872113704681, + "layer_10_cos_v_neg_g": 0.07035857439041138, + "layer_11_v_norm": 0.14619211852550507, + "layer_11_cos_v_neg_g": 0.09295081347227097, + "layer_12_v_norm": 0.14460796117782593, + "layer_12_cos_v_neg_g": 0.12740401923656464, + "block0_q_v_norm": 0.042372316122055054, + "block0_q_cos_v_neg_g": 0.15799401700496674, + "block0_k_v_norm": 0.04209234565496445, + "block0_k_cos_v_neg_g": 0.16673678159713745, + "block0_v_v_norm": 0.042037483304739, + "block0_v_cos_v_neg_g": 0.23944541811943054, + "block0_o_v_norm": 0.04266460984945297, + "block0_o_cos_v_neg_g": 0.14266884326934814, + "block0_mlp_win_v_norm": 0.08289950340986252, + "block0_mlp_win_cos_v_neg_g": 0.08331748098134995, + "block0_mlp_wout_v_norm": 0.07918142527341843, + "block0_mlp_wout_cos_v_neg_g": 0.16164439916610718, + "block3_q_v_norm": 0.041917409747838974, + "block3_q_cos_v_neg_g": 0.07279745489358902, + "block3_k_v_norm": 0.041124723851680756, + "block3_k_cos_v_neg_g": 0.047212082892656326, + "block3_v_v_norm": 0.038392674177885056, + "block3_v_cos_v_neg_g": 0.03329124674201012, + "block3_o_v_norm": 0.03961130604147911, + "block3_o_cos_v_neg_g": 0.15023693442344666, + "block3_mlp_win_v_norm": 0.08394917845726013, + "block3_mlp_win_cos_v_neg_g": 0.05952930450439453, + "block3_mlp_wout_v_norm": 0.07729000598192215, + "block3_mlp_wout_cos_v_neg_g": 0.2199818193912506, + "block7_q_v_norm": 0.04223141446709633, + "block7_q_cos_v_neg_g": 0.06958942115306854, + "block7_k_v_norm": 0.04166630655527115, + "block7_k_cos_v_neg_g": 0.1987675428390503, + "block7_v_v_norm": 0.04004717990756035, + "block7_v_cos_v_neg_g": 0.04516010358929634, + "block7_o_v_norm": 0.04102736711502075, + "block7_o_cos_v_neg_g": 0.21428965032100677, + "block7_mlp_win_v_norm": 0.08388018608093262, + "block7_mlp_win_cos_v_neg_g": 0.07407069951295853, + "block7_mlp_wout_v_norm": 0.07756434381008148, + "block7_mlp_wout_cos_v_neg_g": 0.20201349258422852, + "block11_q_v_norm": 0.04242873191833496, + "block11_q_cos_v_neg_g": 0.1215704083442688, + "block11_k_v_norm": 0.04189883917570114, + "block11_k_cos_v_neg_g": 0.1935325413942337, + "block11_v_v_norm": 0.04099098592996597, + "block11_v_cos_v_neg_g": 0.06886659562587738, + "block11_o_v_norm": 0.04247819259762764, + "block11_o_cos_v_neg_g": 0.23697955906391144, + "block11_mlp_win_v_norm": 0.0852568969130516, + "block11_mlp_win_cos_v_neg_g": 0.1262386590242386, + "block11_mlp_wout_v_norm": 0.08117149025201797, + "block11_mlp_wout_cos_v_neg_g": 0.17697255313396454, + "embed_lm_head_sharpness": 0.00041777544538490474, + "layer_1_sharpness": 0.008150517009198666, + "layer_2_sharpness": 0.0012088149087503552, + "layer_3_sharpness": 0.0013440011534839869, + "layer_4_sharpness": 0.0017372873844578862, + "layer_5_sharpness": 0.0012066661147400737, + "layer_6_sharpness": 0.0025746996980160475, + "layer_7_sharpness": 0.0020788589026778936, + "layer_8_sharpness": 0.004197297617793083, + "layer_9_sharpness": 0.002621781313791871, + "layer_10_sharpness": 0.0012729053851217031, + "layer_11_sharpness": 0.000983205740340054, + "layer_12_sharpness": 0.0023012515157461166, + "block0_q_sharpness": 0.0014576853718608618, + "block0_k_sharpness": 0.0018306864658370614, + "block0_v_sharpness": 0.007201907690614462, + "block0_o_sharpness": 0.0033554069232195616, + "block0_mlp_win_sharpness": 0.0013336921110749245, + "block0_mlp_wout_sharpness": 0.00255579617805779, + "block3_q_sharpness": 0.0009581726626493037, + "block3_k_sharpness": 0.00042394158663228154, + "block3_v_sharpness": 0.003042213385924697, + "block3_o_sharpness": 0.0008672147523611784, + "block3_mlp_win_sharpness": 0.0002588255447335541, + "block3_mlp_wout_sharpness": 0.0005741543718613684, + "block7_q_sharpness": 0.00012749673624057323, + "block7_k_sharpness": 0.00018272774468641728, + "block7_v_sharpness": 0.006750826723873615, + "block7_o_sharpness": 0.00039572513196617365, + "block7_mlp_win_sharpness": 0.0011574032250791788, + "block7_mlp_wout_sharpness": 0.0016341105801984668, + "block11_q_sharpness": 0.00010839918104466051, + "block11_k_sharpness": 0.00017846051196102053, + "block11_v_sharpness": 0.0006173451547510922, + "block11_o_sharpness": 0.00017232594836968929, + "block11_mlp_win_sharpness": 0.000655069132335484, + "block11_mlp_wout_sharpness": 0.0026250940281897783, + "sum_layer_numerators": 0.0005976981050395832, + "block_diag_sharpness": 0.0024716979758950815, + "cross_layer_sharpness": 0.008377771365442599 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/training_log.txt b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..8a06f044a6574103c0b5d252df36f2dd6c53cda9 --- /dev/null +++ b/layer_wise_new_code_rand/opt_adam_alr_0.001_mlr_0.01_seed_45_b308fe66-961b-4633-be8c-c20f705665a1/training_log.txt @@ -0,0 +1,11788 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +import nano_GPT_qkvonorm_pure +from nano_GPT_qkvonorm_pure import GPT, GPTConfig + +# Import debug utilities +# from debug_utils import setup_debugpy + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes, + shuffle_files=False, random_seed=None): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + self.shuffle_files = shuffle_files + self.random_seed = random_seed + self._rng = random.Random(random_seed) if shuffle_files and random_seed is not None else None + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + if self.shuffle_files: + self._shuffle_files() + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + next_shard = (self.current_shard + 1) % len(self.files) + if next_shard == 0 and self.shuffle_files: + self._shuffle_files() + self.current_shard = next_shard + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + + def _shuffle_files(self): + if self._rng is not None: + self._rng.shuffle(self.files) + else: + random.shuffle(self.files) + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + + all_param_groups["embed_lm_head"] = list(model.lm_head.parameters()) + + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # Add fine-grained params for selected layers (0, 3, 7, 11) + selected_layers = [0, 3, 7, 11] + for layer_idx in selected_layers: + block = blocks[layer_idx] + prefix = f"block{layer_idx}" + # Attention: Q, K, V, O + all_param_groups[f"{prefix}_q"] = [block.attn.q_w.weight] + all_param_groups[f"{prefix}_k"] = [block.attn.k_w.weight] + all_param_groups[f"{prefix}_v"] = [block.attn.v_w.weight] + all_param_groups[f"{prefix}_o"] = [block.attn.c_proj.weight] + # MLP: c_fc (win) and c_proj (wout) + all_param_groups[f"{prefix}_mlp_win"] = [block.mlp.c_fc.weight] + all_param_groups[f"{prefix}_mlp_wout"] = [block.mlp.c_proj.weight] + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + original_flash = nano_GPT_qkvonorm_pure.FLASH + nano_GPT_qkvonorm_pure.FLASH = 0 + print0(f"[Enhanced Sharpness @ Step {step}] Disabled FLASH attention for HVP (was {original_flash})") + + # Get block parameter indices for cross-layer analysis (need this before loop) + block_param_indices = set() + for group_name, param_group in all_param_groups.items(): + if group_name.startswith("layer_"): + for p in param_group: + if id(p) in param_to_idx: + block_param_indices.add(param_to_idx[id(p)]) + + # Initialize accumulators for all quantities we need + grads_hvp = None + hvp_v_total = None + hvp_v_block = None + hvp_g_accum = None + layer_hvp_accum = {} + + + group_names_to_process = [gn for gn, pg in all_param_groups.items() + if pg and any(id(p) in param_to_idx for p in pg)] + + if last_training_batches is not None and len(last_training_batches) > 0: + + batch_iterator = [(x, y) for x, y in last_training_batches] + n_batches = len(batch_iterator) + print0(f"[Enhanced Sharpness @ Step {step}] Using {n_batches} microbatches for HVP (out of {grad_accum_steps} training microbatches)") + restore_loader = False + else: + # Fallback: use new batches from train_loader (should rarely happen) + print0(f"[Enhanced Sharpness @ Step {step}] WARNING: last_training_batches is None/empty, using {grad_accum_steps} new batches (inconsistent)") + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + n_batches = grad_accum_steps # Use same number as training for consistency + batch_iterator = [] + shard_was_changed = False + for _ in range(n_batches): + x_hvp, y_hvp = train_loader.next_batch() + batch_iterator.append((x_hvp, y_hvp)) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + restore_loader = True + + + print0(f"[Enhanced Sharpness @ Step {step}] Computing HVPs for {n_batches} microbatches") + for mb_idx, (x_hvp, y_hvp) in enumerate(batch_iterator): + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + + + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + grads_mb = torch.autograd.grad(loss_mb, model.parameters(), create_graph=True, allow_unused=True) + + # Compute H·v (total sharpness) + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_mb, update_direction_v) if g is not None) + + if not isinstance(v_dot_g_total, torch.Tensor): + v_dot_g_total = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_total_mb = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + # Compute H·v_block (block-only sharpness) + if block_param_indices: + v_dot_g_block = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in block_param_indices if grads_mb[i] is not None) + if not isinstance(v_dot_g_block, torch.Tensor): + v_dot_g_block = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_block_mb = torch.autograd.grad(v_dot_g_block, model.parameters(), retain_graph=True, allow_unused=True) + else: + + hvp_v_block_mb = [None] * len(list(model.parameters())) + + + g_dot_g = sum(torch.sum(g * g) for g in grads_mb if g is not None) + if not isinstance(g_dot_g, torch.Tensor): + g_dot_g = torch.tensor(0.0, device=device, requires_grad=True) + + + hvp_g_mb_raw = torch.autograd.grad(g_dot_g, model.parameters(), + retain_graph=True, allow_unused=True) + hvp_g_mb = [h / 2.0 if h is not None else None for h in hvp_g_mb_raw] + + # Compute per-layer H_kk·v_k (for layer-wise sharpness) + for group_idx, group_name in enumerate(group_names_to_process): + param_group = all_param_groups[group_name] + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + is_last_layer = (group_idx == len(group_names_to_process) - 1) + is_last_microbatch = (mb_idx == n_batches - 1) + need_retain = not (is_last_layer and is_last_microbatch) + + try: + v_dot_g_layer = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in indices if grads_mb[i] is not None) + + if not isinstance(v_dot_g_layer, torch.Tensor): + v_dot_g_layer = torch.tensor(0.0, device=device, requires_grad=True) + + hvp_layer_mb = torch.autograd.grad(v_dot_g_layer, model.parameters(), + retain_graph=need_retain, + allow_unused=True) + + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_layer_mb] + else: + layer_hvp_accum[group_name] = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + ] + + # Accumulate layer HVP + # if group_name not in layer_hvp_accum: + # layer_hvp_accum[group_name] = [h.detach() / n_batches if h is not None else None for h in hvp_layer_mb] + # else: + # layer_hvp_accum[group_name] = [ + # (h_acc + h.detach() / n_batches) if (h is not None and h_acc is not None) + # else (h.detach() / n_batches if h is not None else h_acc) + # for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + # ] + # del hvp_layer_mb, v_dot_g_layer + # torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error computing layer HVP for '{group_name}' in microbatch {mb_idx}: {e}") + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = None + + # 6. Accumulate all quantities + if grads_hvp is None: + grads_hvp = [(g.detach() / n_batches).cpu() if g is not None else None for g in grads_mb] + hvp_v_total = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_total_mb] + hvp_v_block = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_block_mb] + hvp_g_accum = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_g_mb] + else: + grads_hvp = [ + (g_acc + (g.detach() / n_batches).cpu()) if (g is not None and g_acc is not None) + else ((g.detach() / n_batches).cpu() if g is not None else g_acc) + for g_acc, g in zip(grads_hvp, grads_mb) + ] + hvp_v_total = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_total, hvp_v_total_mb) + ] + hvp_v_block = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_block, hvp_v_block_mb) + ] + hvp_g_accum = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_g_accum, hvp_g_mb) + ] + + + + if mb_idx % max(1, n_batches // 4) == 0: + print0(f"[Enhanced Sharpness @ Step {step}] Processed microbatch {mb_idx + 1}/{n_batches}") + + + if restore_loader: + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + print0(f"[Enhanced Sharpness @ Step {step}] Finished computing all HVPs for {n_batches} microbatches") + grads_hvp = [g.to(device) if g is not None else None for g in grads_hvp] + hvp_v_total = [h.to(device) if h is not None else None for h in hvp_v_total] + hvp_v_block = [h.to(device) if h is not None else None for h in hvp_v_block] + hvp_g_accum = [h.to(device) if h is not None else None for h in hvp_g_accum] + for group_name in layer_hvp_accum: + if layer_hvp_accum[group_name] is not None: + layer_hvp_accum[group_name] = [h.to(device) if h is not None else None for h in layer_hvp_accum[group_name]] + # --- Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + # hvp_v_total is already computed in the loop above + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_v_total, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_total, torch.Tensor): + vhp_dot_v_total = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_total, torch.Tensor): + v_norm_sq_total = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + + print0(f"[Enhanced Sharpness @ Step {step}] Calculating BLOCK-ONLY total sharpness...") + # hvp_v_block is already computed in the loop above + if block_param_indices: # Only compute if there are block parameters + # Compute v_block^T H v_block (only sum over block indices) + vhp_dot_v_block = sum(torch.sum(hvp_v_block[i] * update_direction_v[i]) + for i in block_param_indices if hvp_v_block[i] is not None) + + v_norm_sq_block = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in block_param_indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_block, torch.Tensor): + vhp_dot_v_block = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_block, torch.Tensor): + v_norm_sq_block = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_block, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_block, op=dist.ReduceOp.AVG) + + if v_norm_sq_block.item() > 1e-12: + analysis_results["block_total_sharpness"] = (vhp_dot_v_block / v_norm_sq_block).item() + else: + analysis_results["block_total_sharpness"] = 0.0 + + analysis_results["v_norm_block"] = torch.sqrt(v_norm_sq_block).item() + analysis_results["v_T_H_v_block"] = vhp_dot_v_block.item() + else: + # No block parameters + analysis_results["block_total_sharpness"] = 0.0 + analysis_results["v_norm_block"] = 0.0 + analysis_results["v_T_H_v_block"] = 0.0 + + torch.cuda.empty_cache() + + # ---- Alignment metrics between update v and (negative) gradient g ---- + eps = 1e-12 + v_norm = torch.sqrt(v_norm_sq_total + eps) + analysis_results["v_norm"] = v_norm.item() + + # --- Version 1: g_hvp --- + ip_v_neg_g_hvp = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + g_hvp_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + + if not isinstance(ip_v_neg_g_hvp, torch.Tensor): + ip_v_neg_g_hvp = torch.tensor(0.0, device=device) + if not isinstance(g_hvp_norm_sq, torch.Tensor): + g_hvp_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_v_neg_g_hvp, op=dist.ReduceOp.AVG) + dist.all_reduce(g_hvp_norm_sq, op=dist.ReduceOp.AVG) + g_hvp_norm = torch.sqrt(g_hvp_norm_sq + eps) + analysis_results["ip_v_neg_g_hvp"] = ip_v_neg_g_hvp.item() + analysis_results["cos_v_neg_g_hvp"] = (ip_v_neg_g_hvp / (v_norm * g_hvp_norm + eps)).item() + analysis_results["g_hvp_norm"] = g_hvp_norm.item() + + # --- Version 2: g_t (original gradient that produced v) --- + # last_training_gradient is the actual gradient from training that led to the update v + if last_training_gradient is not None: + ip_v_neg_g_t = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, last_training_gradient) if g is not None) + g_t_norm_sq = sum(torch.sum(g * g) for g in last_training_gradient if g is not None) + dist.all_reduce(ip_v_neg_g_t, op=dist.ReduceOp.AVG) + dist.all_reduce(g_t_norm_sq, op=dist.ReduceOp.AVG) + g_t_norm = torch.sqrt(g_t_norm_sq + eps) + analysis_results["ip_v_neg_g_t"] = ip_v_neg_g_t.item() + analysis_results["cos_v_neg_g_t"] = (ip_v_neg_g_t / (v_norm * g_t_norm + eps)).item() + analysis_results["g_t_norm"] = g_t_norm.item() + else: + print0(f"[Enhanced Sharpness @ Step {step}] Warning: last_training_gradient is None, skipping g_t metrics") + + # Keep backward compatibility aliases (g_norm uses g_hvp for now) + g_norm_sq = g_hvp_norm_sq + g_norm = g_hvp_norm + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_v_total if hvp is not None) + if not isinstance(hv_norm_sq, torch.Tensor): + hv_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg ---- + # hvp_g_accum is already computed in the loop above + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_accum) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_accum if hg is not None) + if not isinstance(ip_g_hg, torch.Tensor): + ip_g_hg = torch.tensor(0.0, device=device) + if not isinstance(hg_norm_sq, torch.Tensor): + hg_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + if not isinstance(v_parallel_norm_sq, torch.Tensor): + v_parallel_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(torch.clamp(v_norm_sq_total - v_parallel_norm_sq, min=0.0) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + # Ensure they are tensors + if not isinstance(v_norm_sq_layer, torch.Tensor): + v_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(g_norm_sq_layer, torch.Tensor): + g_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(ip_v_neg_g_layer, torch.Tensor): + ip_v_neg_g_layer = torch.tensor(0.0, device=device) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + if group_name not in layer_hvp_accum or layer_hvp_accum[group_name] is None: + print0(f"[Enhanced Sharpness @ Step {step}] No HVP data for '{group_name}', skipping") + analysis_results[f"{group_name}_sharpness"] = 0.0 + continue + + hvp_group_result = layer_hvp_accum[group_name] + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_group, torch.Tensor): + vhp_dot_v_group = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_group, torch.Tensor): + v_norm_sq_group = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- Calculate block-diagonal approximation and cross-layer interaction --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating block-diagonal and cross-layer sharpness...") + + sum_layer_numerators = 0.0 + for layer in range(1, NUM_LAYERS + 1): + layer_name = f"layer_{layer}" + if f"{layer_name}_sharpness" in analysis_results and f"{layer_name}_v_norm" in analysis_results: + s_k = analysis_results[f"{layer_name}_sharpness"] + v_k_norm = analysis_results[f"{layer_name}_v_norm"] + sum_layer_numerators += s_k * (v_k_norm ** 2) + + analysis_results["sum_layer_numerators"] = sum_layer_numerators + + # Block-diagonal sharpness (using block ||v||²) + v_norm_block = analysis_results.get("v_norm_block", 0) + v_norm_sq_block_val = v_norm_block ** 2 if v_norm_block else 1e-12 + + if v_norm_sq_block_val > 1e-12: + analysis_results["block_diag_sharpness"] = sum_layer_numerators / v_norm_sq_block_val + else: + analysis_results["block_diag_sharpness"] = 0.0 + + # Cross-layer interaction = block_total - block_diag + block_total = analysis_results.get("block_total_sharpness", 0) + block_diag = analysis_results.get("block_diag_sharpness", 0) + analysis_results["cross_layer_sharpness"] = block_total - block_diag + + print0(f"[Enhanced Sharpness @ Step {step}] block_total={block_total:.6f}, block_diag={block_diag:.6f}, cross_layer={block_total - block_diag:.6f}") + + # --- 8. Cleanup --- + nano_GPT_qkvonorm_pure.FLASH = original_flash + print0(f"[Enhanced Sharpness @ Step {step}] Restored FLASH attention to {original_flash}") + + print0(f"[Enhanced Sharpness @ Step {step}] Restoring parameters back to θ_{{t+1}}...") + with torch.no_grad(): + for p, v in zip(model.parameters(), update_direction_v): + p.data.add_(v) + + if prev_training_mode: + model.train() + else: + model.eval() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del hvp_v_total, hvp_v_block, hvp_g_accum, layer_hvp_accum + del vhp_dot_v_total, v_norm_sq_total + del vhp_dot_v_block, v_norm_sq_block + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + + # Version 1: g_hvp (new batch, computed at θ_t during HVP calculation) + if 'cos_v_neg_g_hvp' in results: + misc_parts.append(f"cos_v_-g_hvp:{results['cos_v_neg_g_hvp']:.4e}") + if 'g_hvp_norm' in results: + misc_parts.append(f"g_hvp_norm:{results['g_hvp_norm']:.4e}") + + # Version 2: g_t (original gradient that produced v) + if 'cos_v_neg_g_t' in results: + misc_parts.append(f"cos_v_-g_t:{results['cos_v_neg_g_t']:.4e}") + if 'g_t_norm' in results: + misc_parts.append(f"g_t_norm:{results['g_t_norm']:.4e}") + + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d8|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + parser.add_argument("--shuffle_files", action="store_true") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d8", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # Setup debugpy for remote debugging (only activates if DEBUGPY env var is set) + # setup_debugpy(rank=ddp_rank, force=True) + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + nano_GPT_qkvonorm_pure.FLASH = args.flash # Set module-level FLASH for training + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d8": GPTConfig(block_size=1024, vocab_size=50257, n_layer=8, n_head=8, n_embd=512), + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader( + args.input_bin, B, T, ddp_rank, ddp_world_size, + shuffle_files=args.shuffle_files, random_seed=args.seed + ) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + base_module = model.module if ddp else model + # If compiled, unwrap to get the original module + if hasattr(base_module, "_orig_mod"): + base_module = base_module._orig_mod + + raw_params = list(raw_model_uncompiled.parameters()) + train_params = list(base_module.parameters()) + + assert len(raw_params) == len(train_params), \ + f"Parameter count mismatch: raw_model_uncompiled has {len(raw_params)}, training model has {len(train_params)}" + for i, (rp, tp) in enumerate(zip(raw_params, train_params)): + assert rp.data_ptr() == tp.data_ptr(), \ + f"Parameter {i} has different data_ptr: raw_model_uncompiled and training model do not share parameters!" + print0(f"[Verified] raw_model_uncompiled and training model share the same {len(raw_params)} Parameter objects") + + last_training_update = None + last_training_gradient = None # Store the original gradient that produced the update + last_training_batches = None # Store ALL microbatches (x, y) for consistent HVP calculation + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it, base_lr): + min_lr = base_lr * args.lr_decay_frac + cooldown_iters = int(args.num_iterations * 0.2) + # 1) Warmup: linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it + 1) / args.warmup_iters + # 3) Decay: linear decay from base_lr to min_lr in the last cooldown_iters steps + cooldown_start = args.num_iterations - cooldown_iters + if it >= cooldown_start: + decay_ratio = (it - cooldown_start) / cooldown_iters + return base_lr - decay_ratio * (base_lr - min_lr) + # 2) Stable: constant learning rate at base_lr + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + last_training_update=last_training_update, # Pass the real update captured from training + last_training_gradient=last_training_gradient, # Pass the original gradient g_t + last_training_batches=last_training_batches # Pass ALL microbatches for consistent HVP + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + + # Pre-check if we need to collect microbatches for sharpness analysis + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + + microbatches_this_step = [] if will_analyze_sharpness_next else None + + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + + # Store ALL microbatches for memory-efficient HVP calculation + if will_analyze_sharpness_next: + microbatches_this_step.append((x.detach().clone(), y.detach().clone())) + + if ddp: + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + + #no clipping + # norm = torch.nn.utils.clip_grad_norm_(raw_model_uncompiled.parameters(), float('inf')) + + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + print(raw_model_uncompiled.transformer.h[0].attn.q_w.weight[:5,:5]) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + # Save the original gradient g_t that will produce the update v + last_training_gradient = [ + p.grad.detach().clone() if p.grad is not None else torch.zeros_like(p) + for p in raw_model_uncompiled.parameters() + ] + # Capture ALL microbatches for consistent HVP calculation + # This ensures H is computed on the exact same objective as g_t and v + last_training_batches = microbatches_this_step # Already cloned above + else: + params_before_optimizer_step = None + last_training_batches = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p.detach() - p_before + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group()step:0 validation loss:11.020914 +step:0 train loss:11.022923 +step:1 train loss:10.940911 +step:2 train loss:10.782557 +step:3 train loss:10.574799 +step:4 train loss:10.394936 +step:5 train loss:10.227157 +step:6 train loss:10.089303 +step:7 train loss:10.002908 +step:8 train loss:9.888704 +step:9 train loss:9.825450 +step:10 train loss:9.760768 +step:11 train loss:9.722158 +step:12 train loss:9.695316 +step:13 train loss:9.635995 +step:14 train loss:9.633037 +step:15 train loss:9.602772 +step:16 train loss:9.584594 +step:17 train loss:9.559498 +step:18 train loss:9.539545 +step:19 train loss:9.496760 +step:20 train loss:9.486456 +step:21 train loss:9.435962 +step:22 train loss:9.394182 +step:23 train loss:9.372840 +step:24 train loss:9.298489 +step:25 train loss:9.279387 +step:26 train loss:9.255435 +step:27 train loss:9.202706 +step:28 train loss:9.114538 +step:29 train loss:9.153698 +step:30 train loss:9.040162 +step:31 train loss:9.001989 +step:32 train loss:8.926846 +step:33 train loss:8.913911 +step:34 train loss:8.897972 +step:35 train loss:8.837861 +step:36 train loss:8.782014 +step:37 train loss:8.719945 +step:38 train loss:8.734854 +step:39 train loss:8.636807 +step:40 train loss:8.561672 +step:41 train loss:8.541317 +step:42 train loss:8.505679 +step:43 train loss:8.492922 +step:44 train loss:8.422474 +step:45 train loss:8.370419 +step:46 train loss:8.342919 +step:47 train loss:8.327437 +step:48 train loss:8.230953 +step:49 train loss:8.177289 +step:50 train loss:8.102407 +step:51 train loss:8.098219 +step:52 train loss:8.087238 +step:53 train loss:8.037401 +step:54 train loss:8.026382 +step:55 train loss:7.974658 +step:56 train loss:7.908828 +step:57 train loss:7.864098 +step:58 train loss:7.845258 +step:59 train loss:7.782445 +step:60 train loss:7.753599 +step:61 train loss:7.703329 +step:62 train loss:7.633887 +step:63 train loss:7.660844 +step:64 train loss:7.616975 +step:65 train loss:7.511135 +step:66 train loss:7.519092 +step:67 train loss:7.449054 +step:68 train loss:7.452879 +step:69 train loss:7.432315 +step:70 train loss:7.386244 +step:71 train loss:7.302228 +step:72 train loss:7.317793 +step:73 train loss:7.324233 +step:74 train loss:7.242147 +step:75 train loss:7.270607 +step:76 train loss:7.190233 +step:77 train loss:7.223390 +step:78 train loss:7.224227 +step:79 train loss:7.168193 +step:80 train loss:7.135220 +step:81 train loss:7.200396 +step:82 train loss:7.148789 +step:83 train loss:7.092612 +step:84 train loss:7.065236 +step:85 train loss:7.044433 +step:86 train loss:6.996178 +step:87 train loss:7.044689 +step:88 train loss:7.060214 +step:89 train loss:7.016804 +step:90 train loss:7.034381 +step:91 train loss:6.972364 +step:92 train loss:7.008636 +step:93 train loss:6.952287 +step:94 train loss:6.974030 +step:95 train loss:6.940743 +step:96 train loss:6.913542 +step:97 train loss:6.893144 +step:98 train loss:6.883737 +step:99 train loss:6.899306 +step:100 train loss:6.750422 +step:101 train loss:6.776146 +step:102 train loss:6.824894 +step:103 train loss:6.820837 +step:104 train loss:6.787557 +step:105 train loss:6.806168 +step:106 train loss:6.762933 +step:107 train loss:6.872108 +step:108 train loss:6.790085 +step:109 train loss:6.723820 +step:110 train loss:6.686577 +step:111 train loss:6.749054 +step:112 train loss:6.815083 +step:113 train loss:6.687571 +step:114 train loss:6.712815 +step:115 train loss:6.725850 +step:116 train loss:6.751313 +step:117 train loss:6.699966 +step:118 train loss:6.660129 +step:119 train loss:6.693299 +step:120 train loss:6.686337 +step:121 train loss:6.544958 +step:122 train loss:6.479625 +step:123 train loss:6.667299 +step:124 train loss:6.543469 +step:125 train loss:6.535884 +step:126 train loss:6.606625 +step:127 train loss:6.516934 +step:128 train loss:6.521770 +step:129 train loss:6.549417 +step:130 train loss:6.508154 +step:131 train loss:6.460354 +step:132 train loss:6.515322 +step:133 train loss:6.460588 +step:134 train loss:6.513415 +step:135 train loss:6.481494 +step:136 train loss:6.529848 +step:137 train loss:6.529160 +step:138 train loss:6.459960 +step:139 train loss:6.413277 +step:140 train loss:6.402904 +step:141 train loss:6.409011 +step:142 train loss:6.417142 +step:143 train loss:6.473841 +step:144 train loss:6.415424 +step:145 train loss:6.477232 +step:146 train loss:6.460371 +step:147 train loss:6.405383 +step:148 train loss:6.413648 +step:149 train loss:6.351867 +step:150 train loss:6.430499 +step:151 train loss:6.371810 +step:152 train loss:6.359980 +step:153 train loss:6.352904 +step:154 train loss:6.412978 +step:155 train loss:6.341211 +step:156 train loss:6.341303 +step:157 train loss:6.334659 +step:158 train loss:6.372337 +step:159 train loss:6.383482 +step:160 train loss:6.324106 +step:161 train loss:6.347528 +step:162 train loss:6.324076 +step:163 train loss:6.318801 +step:164 train loss:6.320951 +step:165 train loss:6.334013 +step:166 train loss:6.233709 +step:167 train loss:6.310631 +step:168 train loss:6.288994 +step:169 train loss:6.279792 +step:170 train loss:6.276671 +step:171 train loss:6.278581 +step:172 train loss:6.305984 +step:173 train loss:6.304229 +step:174 train loss:6.273316 +step:175 train loss:6.268032 +step:176 train loss:6.217716 +step:177 train loss:6.204908 +step:178 train loss:6.274304 +step:179 train loss:6.175117 +step:180 train loss:6.263783 +step:181 train loss:6.281218 +step:182 train loss:6.253207 +step:183 train loss:6.216519 +step:184 train loss:6.217050 +step:185 train loss:6.264518 +step:186 train loss:6.224843 +step:187 train loss:6.260315 +step:188 train loss:6.252692 +step:189 train loss:6.274634 +step:190 train loss:6.252544 +step:191 train loss:6.231743 +step:192 train loss:6.117159 +step:193 train loss:6.229366 +step:194 train loss:6.121339 +step:195 train loss:6.089854 +step:196 train loss:6.144424 +step:197 train loss:6.205705 +step:198 train loss:6.193326 +step:199 train loss:6.194337 +step:200 train loss:6.160938 +step:201 train loss:6.183838 +step:202 train loss:6.122082 +step:203 train loss:6.165639 +step:204 train loss:6.142175 +step:205 train loss:6.146211 +step:206 train loss:6.159826 +step:207 train loss:6.130290 +step:208 train loss:6.153118 +step:209 train loss:6.120560 +step:210 train loss:6.082479 +step:211 train loss:6.116026 +step:212 train loss:6.115414 +step:213 train loss:6.105695 +step:214 train loss:6.069640 +step:215 train loss:6.078578 +step:216 train loss:6.181309 +step:217 train loss:6.071005 +step:218 train loss:6.072511 +step:219 train loss:6.117615 +step:220 train loss:6.077429 +step:221 train loss:6.036514 +step:222 train loss:6.081440 +step:223 train loss:6.079333 +step:224 train loss:6.075272 +step:225 train loss:6.004979 +step:226 train loss:6.084621 +step:227 train loss:6.048774 +step:228 train loss:6.086451 +step:229 train loss:6.087188 +step:230 train loss:6.035034 +step:231 train loss:6.021000 +step:232 train loss:6.071829 +step:233 train loss:6.038372 +step:234 train loss:6.042390 +step:235 train loss:6.018773 +step:236 train loss:6.039710 +step:237 train loss:6.058770 +step:238 train loss:6.005401 +step:239 train loss:6.005595 +step:240 train loss:5.934138 +step:241 train loss:6.028712 +step:242 train loss:6.004340 +step:243 train loss:6.024318 +step:244 train loss:5.999624 +step:245 train loss:6.002347 +step:246 train loss:5.935881 +step:247 train loss:5.976734 +step:248 train loss:5.944094 +step:249 train loss:5.970355 +step:250 validation loss:6.007081 +step:250 train loss:5.986445 +step:251 train loss:5.988804 +step:252 train loss:5.976268 +step:253 train loss:5.991528 +step:254 train loss:5.916417 +step:255 train loss:5.940211 +step:256 train loss:5.872519 +step:257 train loss:5.970458 +step:258 train loss:6.017273 +step:259 train loss:5.924368 +step:260 train loss:5.958876 +step:261 train loss:5.921111 +step:262 train loss:5.938430 +step:263 train loss:5.869564 +step:264 train loss:5.913233 +step:265 train loss:5.957623 +step:266 train loss:5.923253 +step:267 train loss:5.980265 +step:268 train loss:5.926081 +step:269 train loss:5.914771 +step:270 train loss:5.884322 +step:271 train loss:5.942665 +step:272 train loss:5.891598 +step:273 train loss:5.889032 +step:274 train loss:5.911115 +step:275 train loss:5.951732 +step:276 train loss:5.941729 +step:277 train loss:5.891877 +step:278 train loss:5.821993 +step:279 train loss:5.876267 +step:280 train loss:5.856813 +step:281 train loss:5.874530 +step:282 train loss:5.776052 +step:283 train loss:5.875964 +step:284 train loss:5.909265 +step:285 train loss:5.874382 +step:286 train loss:5.920499 +step:287 train loss:5.876130 +step:288 train loss:5.933496 +step:289 train loss:5.859081 +step:290 train loss:5.844134 +step:291 train loss:5.931844 +step:292 train loss:5.899637 +step:293 train loss:5.881370 +step:294 train loss:5.870905 +step:295 train loss:5.875544 +step:296 train loss:5.806860 +step:297 train loss:5.831172 +step:298 train loss:5.849290 +step:299 train loss:5.909649 +step:300 train loss:5.824463 +step:301 train loss:5.792143 +step:302 train loss:5.829454 +step:303 train loss:5.826611 +step:304 train loss:5.790762 +step:305 train loss:5.784919 +step:306 train loss:5.833849 +step:307 train loss:5.818426 +step:308 train loss:5.807770 +step:309 train loss:5.843327 +step:310 train loss:5.882392 +step:311 train loss:5.796086 +step:312 train loss:5.848765 +step:313 train loss:5.820163 +step:314 train loss:5.817934 +step:315 train loss:5.833298 +step:316 train loss:5.829249 +step:317 train loss:5.806681 +step:318 train loss:5.841486 +step:319 train loss:5.785355 +step:320 train loss:5.845935 +step:321 train loss:5.804415 +step:322 train loss:5.788249 +step:323 train loss:5.801457 +step:324 train loss:5.772490 +step:325 train loss:5.797523 +step:326 train loss:5.782838 +step:327 train loss:5.746254 +step:328 train loss:5.736540 +step:329 train loss:5.788116 +step:330 train loss:5.844589 +step:331 train loss:5.849187 +step:332 train loss:5.716542 +step:333 train loss:5.776114 +step:334 train loss:5.717112 +step:335 train loss:5.759642 +step:336 train loss:5.771858 +step:337 train loss:5.815908 +step:338 train loss:5.725073 +step:339 train loss:5.770765 +step:340 train loss:5.769457 +step:341 train loss:5.732840 +step:342 train loss:5.748930 +step:343 train loss:5.743362 +step:344 train loss:5.740058 +step:345 train loss:5.772359 +step:346 train loss:5.714961 +step:347 train loss:5.732242 +step:348 train loss:5.641799 +step:349 train loss:5.741906 +step:350 train loss:5.669247 +step:351 train loss:5.743448 +step:352 train loss:5.671403 +step:353 train loss:5.641610 +step:354 train loss:5.738551 +step:355 train loss:5.677132 +step:356 train loss:5.686129 +step:357 train loss:5.713307 +step:358 train loss:5.661626 +step:359 train loss:5.683300 +step:360 train loss:5.676486 +step:361 train loss:5.683086 +step:362 train loss:5.710306 +step:363 train loss:5.732545 +step:364 train loss:5.807077 +step:365 train loss:5.749321 +step:366 train loss:5.706432 +step:367 train loss:5.720984 +step:368 train loss:5.670223 +step:369 train loss:5.728469 +step:370 train loss:5.709250 +step:371 train loss:5.668459 +step:372 train loss:5.716699 +step:373 train loss:5.694426 +step:374 train loss:5.698766 +step:375 train loss:5.673959 +step:376 train loss:5.668785 +step:377 train loss:5.685427 +step:378 train loss:5.675014 +step:379 train loss:5.668880 +step:380 train loss:5.666991 +step:381 train loss:5.655082 +step:382 train loss:5.654219 +step:383 train loss:5.671744 +step:384 train loss:5.641021 +step:385 train loss:5.645590 +step:386 train loss:5.599587 +step:387 train loss:5.676223 +step:388 train loss:5.634662 +step:389 train loss:5.601382 +step:390 train loss:5.595348 +step:391 train loss:5.627402 +step:392 train loss:5.645447 +step:393 train loss:5.553495 +step:394 train loss:5.596231 +step:395 train loss:5.580050 +step:396 train loss:5.614195 +step:397 train loss:5.513305 +step:398 train loss:5.648664 +step:399 train loss:5.625460 +step:400 train loss:5.653508 +step:401 train loss:5.557540 +step:402 train loss:5.600239 +step:403 train loss:5.561351 +step:404 train loss:5.636475 +step:405 train loss:5.586010 +step:406 train loss:5.556811 +step:407 train loss:5.576928 +step:408 train loss:5.543480 +step:409 train loss:5.598422 +step:410 train loss:5.563380 +step:411 train loss:5.617928 +step:412 train loss:5.586744 +step:413 train loss:5.517964 +step:414 train loss:5.522964 +step:415 train loss:5.607810 +step:416 train loss:5.583215 +step:417 train loss:5.590734 +step:418 train loss:5.587310 +step:419 train loss:5.592172 +step:420 train loss:5.580529 +step:421 train loss:5.588320 +step:422 train loss:5.544404 +step:423 train loss:5.557934 +step:424 train loss:5.583052 +step:425 train loss:5.507345 +step:426 train loss:5.604358 +step:427 train loss:5.459242 +step:428 train loss:5.559739 +step:429 train loss:5.546385 +step:430 train loss:5.601068 +step:431 train loss:5.487722 +step:432 train loss:5.569877 +step:433 train loss:5.457901 +step:434 train loss:5.544853 +step:435 train loss:5.473037 +step:436 train loss:5.558007 +step:437 train loss:5.494479 +step:438 train loss:5.601243 +step:439 train loss:5.521643 +step:440 train loss:5.640078 +step:441 train loss:5.519680 +step:442 train loss:5.535760 +step:443 train loss:5.565726 +step:444 train loss:5.434361 +step:445 train loss:5.503775 +step:446 train loss:5.522635 +step:447 train loss:5.516638 +step:448 train loss:5.491282 +step:449 train loss:5.569002 +step:450 train loss:5.463281 +step:451 train loss:5.548919 +step:452 train loss:5.474891 +step:453 train loss:5.504493 +step:454 train loss:5.458753 +step:455 train loss:5.497141 +step:456 train loss:5.489549 +step:457 train loss:5.474687 +step:458 train loss:5.462746 +step:459 train loss:5.472116 +step:460 train loss:5.471699 +step:461 train loss:5.495523 +step:462 train loss:5.404348 +step:463 train loss:5.425218 +step:464 train loss:5.580759 +step:465 train loss:5.470654 +step:466 train loss:5.514356 +step:467 train loss:5.357575 +step:468 train loss:5.488995 +step:469 train loss:5.451249 +step:470 train loss:5.447095 +step:471 train loss:5.404829 +step:472 train loss:5.490647 +step:473 train loss:5.461050 +step:474 train loss:5.525259 +step:475 train loss:5.373921 +step:476 train loss:5.434029 +step:477 train loss:5.341230 +step:478 train loss:5.386981 +step:479 train loss:5.354163 +step:480 train loss:5.462024 +step:481 train loss:5.455630 +step:482 train loss:5.448833 +step:483 train loss:5.408997 +step:484 train loss:5.337455 +step:485 train loss:5.473812 +step:486 train loss:5.364227 +step:487 train loss:5.446902 +step:488 train loss:5.438700 +step:489 train loss:5.488753 +step:490 train loss:5.416672 +step:491 train loss:5.419174 +step:492 train loss:5.361409 +step:493 train loss:5.455046 +step:494 train loss:5.429006 +step:495 train loss:5.409595 +step:496 train loss:5.416568 +step:497 train loss:5.427306 +step:498 train loss:5.457312 +step:499 train loss:5.454523 +step:500 validation loss:5.464614 total_sharp:1.8090e-01 L1_sharp:5.0976e-01 L2_sharp:5.1436e-02 L3_sharp:4.1004e-02 L4_sharp:2.6740e-02 L5_sharp:2.7437e-02 L6_sharp:2.2577e-02 L7_sharp:2.4750e-02 L8_sharp:1.6811e-02 L9_sharp:1.3779e-02 L10_sharp:7.4205e-03 L11_sharp:7.8046e-03 L12_sharp:2.0047e-02 total_fnorm:1.4061e+00 total_l1_linf:1.2254e+04 total_spectral:1.4061e+00 L1_fnorm:2.6890e-01 L2_fnorm:2.7155e-01 L3_fnorm:2.8082e-01 L4_fnorm:2.8364e-01 L5_fnorm:2.8431e-01 L6_fnorm:2.9105e-01 L7_fnorm:3.0651e-01 L8_fnorm:3.1799e-01 L9_fnorm:3.2133e-01 L10_fnorm:3.1536e-01 L11_fnorm:3.1131e-01 L12_fnorm:2.9763e-01 L1_l1linf:4.5082e-01 L2_l1linf:3.3650e-01 L3_l1linf:3.6241e-01 L4_l1linf:3.6427e-01 L5_l1linf:3.4558e-01 L6_l1linf:3.9614e-01 L7_l1linf:3.8845e-01 L8_l1linf:3.9740e-01 L9_l1linf:3.8512e-01 L10_l1linf:3.7557e-01 L11_l1linf:4.1650e-01 L12_l1linf:3.8137e-01 L1_spectral:6.8640e-02 L2_spectral:5.8609e-02 L3_spectral:5.8545e-02 L4_spectral:6.7208e-02 L5_spectral:7.6583e-02 L6_spectral:7.5607e-02 L7_spectral:7.1978e-02 L8_spectral:6.8227e-02 L9_spectral:6.4166e-02 L10_spectral:6.5225e-02 L11_spectral:7.2922e-02 L12_spectral:9.1340e-02 v_norm:1.4061e+00 cos_v_-g_hvp:1.6245e-01 g_hvp_norm:7.8196e-01 cos_v_-g_t:1.6939e-01 g_t_norm:7.4946e-01 hv_norm:1.6826e+00 cos_v_hv:1.5118e-01 hg_norm:1.0556e+01 cos_g_hg:8.2226e-01 v_par:1.3170e-02 v_perp:1.4060e+00 L1_cos_v_neg_g:2.8774e-01 L1_v_norm:2.6890e-01 L2_cos_v_neg_g:3.0483e-01 L2_v_norm:2.7155e-01 L3_cos_v_neg_g:2.9814e-01 L3_v_norm:2.8082e-01 L4_cos_v_neg_g:3.0135e-01 L4_v_norm:2.8364e-01 L5_cos_v_neg_g:2.9431e-01 L5_v_norm:2.8431e-01 L6_cos_v_neg_g:2.8886e-01 L6_v_norm:2.9105e-01 L7_cos_v_neg_g:3.1050e-01 L7_v_norm:3.0651e-01 L8_cos_v_neg_g:3.3071e-01 L8_v_norm:3.1799e-01 L9_cos_v_neg_g:2.7126e-01 L9_v_norm:3.2133e-01 L10_cos_v_neg_g:2.8403e-01 L10_v_norm:3.1536e-01 L11_cos_v_neg_g:2.6810e-01 L11_v_norm:3.1131e-01 L12_cos_v_neg_g:2.5293e-01 L12_v_norm:2.9763e-01 +step:500 train loss:5.435794 +step:501 train loss:5.431766 +step:502 train loss:5.433019 +step:503 train loss:5.447366 +step:504 train loss:5.411199 +step:505 train loss:5.457380 +step:506 train loss:5.411667 +step:507 train loss:5.414048 +step:508 train loss:5.371136 +step:509 train loss:5.423559 +step:510 train loss:5.350664 +step:511 train loss:5.368139 +step:512 train loss:5.331748 +step:513 train loss:5.341966 +step:514 train loss:5.364935 +step:515 train loss:5.355782 +step:516 train loss:5.283457 +step:517 train loss:5.360837 +step:518 train loss:5.393634 +step:519 train loss:5.416598 +step:520 train loss:5.361151 +step:521 train loss:5.324719 +step:522 train loss:5.316842 +step:523 train loss:5.372781 +step:524 train loss:5.348797 +step:525 train loss:5.394373 +step:526 train loss:5.286778 +step:527 train loss:5.317671 +step:528 train loss:5.303278 +step:529 train loss:5.368252 +step:530 train loss:5.334764 +step:531 train loss:5.345864 +step:532 train loss:5.374488 +step:533 train loss:5.283498 +step:534 train loss:5.360626 +step:535 train loss:5.258132 +step:536 train loss:5.315641 +step:537 train loss:5.236885 +step:538 train loss:5.317672 +step:539 train loss:5.258889 +step:540 train loss:5.300973 +step:541 train loss:5.284562 +step:542 train loss:5.397739 +step:543 train loss:5.274705 +step:544 train loss:5.356853 +step:545 train loss:5.316624 +step:546 train loss:5.308156 +step:547 train loss:5.224744 +step:548 train loss:5.274626 +step:549 train loss:5.214034 +step:550 train loss:5.276945 +step:551 train loss:5.179437 +step:552 train loss:5.302115 +step:553 train loss:5.165274 +step:554 train loss:5.364296 +step:555 train loss:5.300357 +step:556 train loss:5.254099 +step:557 train loss:5.265605 +step:558 train loss:5.232100 +step:559 train loss:5.213682 +step:560 train loss:5.201107 +step:561 train loss:5.181767 +step:562 train loss:5.220174 +step:563 train loss:5.186643 +step:564 train loss:5.232949 +step:565 train loss:5.242298 +step:566 train loss:5.256658 +step:567 train loss:5.343660 +step:568 train loss:5.264514 +step:569 train loss:5.287045 +step:570 train loss:5.303216 +step:571 train loss:5.255345 +step:572 train loss:5.232234 +step:573 train loss:5.268086 +step:574 train loss:5.471708 +step:575 train loss:5.201785 +step:576 train loss:5.173609 +step:577 train loss:5.158874 +step:578 train loss:5.251344 +step:579 train loss:5.224458 +step:580 train loss:5.258063 +step:581 train loss:5.291742 +step:582 train loss:5.213470 +step:583 train loss:5.221584 +step:584 train loss:5.220088 +step:585 train loss:5.221473 +step:586 train loss:5.148751 +step:587 train loss:5.186863 +step:588 train loss:5.193380 +step:589 train loss:5.152471 +step:590 train loss:5.230997 +step:591 train loss:5.179323 +step:592 train loss:5.182834 +step:593 train loss:5.158670 +step:594 train loss:5.130988 +step:595 train loss:5.176217 +step:596 train loss:5.145827 +step:597 train loss:5.097722 +step:598 train loss:5.081898 +step:599 train loss:5.091213 +step:600 train loss:5.161621 +step:601 train loss:5.140455 +step:602 train loss:5.156354 +step:603 train loss:5.173818 +step:604 train loss:5.226220 +step:605 train loss:5.134812 +step:606 train loss:5.140422 +step:607 train loss:5.146393 +step:608 train loss:5.138957 +step:609 train loss:5.054525 +step:610 train loss:5.118967 +step:611 train loss:5.229623 +step:612 train loss:5.056790 +step:613 train loss:5.147021 +step:614 train loss:5.047384 +step:615 train loss:5.140151 +step:616 train loss:5.075365 +step:617 train loss:5.025138 +step:618 train loss:5.112253 +step:619 train loss:5.074352 +step:620 train loss:5.135319 +step:621 train loss:5.058384 +step:622 train loss:5.053076 +step:623 train loss:5.010789 +step:624 train loss:5.080357 +step:625 train loss:5.059249 +step:626 train loss:5.051764 +step:627 train loss:4.990253 +step:628 train loss:5.050312 +step:629 train loss:5.080527 +step:630 train loss:5.010857 +step:631 train loss:5.057000 +step:632 train loss:5.070423 +step:633 train loss:5.073502 +step:634 train loss:5.037304 +step:635 train loss:5.094211 +step:636 train loss:5.030246 +step:637 train loss:5.074115 +step:638 train loss:5.088521 +step:639 train loss:5.076292 +step:640 train loss:5.034944 +step:641 train loss:5.039873 +step:642 train loss:5.059819 +step:643 train loss:5.183814 +step:644 train loss:5.046855 +step:645 train loss:5.004965 +step:646 train loss:5.035377 +step:647 train loss:5.029189 +step:648 train loss:5.130746 +step:649 train loss:4.982181 +step:650 train loss:4.990232 +step:651 train loss:5.049085 +step:652 train loss:5.073815 +step:653 train loss:4.999611 +step:654 train loss:5.025856 +step:655 train loss:4.999592 +step:656 train loss:4.996766 +step:657 train loss:4.935292 +step:658 train loss:5.047809 +step:659 train loss:4.982710 +step:660 train loss:4.922521 +step:661 train loss:4.916289 +step:662 train loss:4.962731 +step:663 train loss:4.961935 +step:664 train loss:4.920562 +step:665 train loss:4.968726 +step:666 train loss:4.944061 +step:667 train loss:4.836987 +step:668 train loss:4.930492 +step:669 train loss:4.933692 +step:670 train loss:4.927019 +step:671 train loss:4.947260 +step:672 train loss:4.926677 +step:673 train loss:5.106537 +step:674 train loss:5.194753 +step:675 train loss:5.093052 +step:676 train loss:5.117164 +step:677 train loss:5.080629 +step:678 train loss:5.098585 +step:679 train loss:5.109535 +step:680 train loss:4.992693 +step:681 train loss:5.066961 +step:682 train loss:5.105790 +step:683 train loss:5.203224 +step:684 train loss:5.069799 +step:685 train loss:5.048430 +step:686 train loss:5.022998 +step:687 train loss:5.044613 +step:688 train loss:4.938105 +step:689 train loss:5.026649 +step:690 train loss:4.999833 +step:691 train loss:5.025085 +step:692 train loss:5.153343 +step:693 train loss:5.030387 +step:694 train loss:4.988936 +step:695 train loss:4.972615 +step:696 train loss:4.956165 +step:697 train loss:4.971465 +step:698 train loss:5.021816 +step:699 train loss:4.912973 +step:700 train loss:4.948959 +step:701 train loss:4.954943 +step:702 train loss:4.939379 +step:703 train loss:4.919886 +step:704 train loss:4.895821 +step:705 train loss:4.907262 +step:706 train loss:4.918909 +step:707 train loss:4.904057 +step:708 train loss:4.871511 +step:709 train loss:4.880362 +step:710 train loss:4.782770 +step:711 train loss:4.873140 +step:712 train loss:4.906890 +step:713 train loss:4.914427 +step:714 train loss:4.851078 +step:715 train loss:4.934077 +step:716 train loss:4.877390 +step:717 train loss:4.814194 +step:718 train loss:4.879029 +step:719 train loss:4.951973 +step:720 train loss:4.851114 +step:721 train loss:4.851220 +step:722 train loss:4.862164 +step:723 train loss:4.781975 +step:724 train loss:4.790186 +step:725 train loss:4.771755 +step:726 train loss:4.844948 +step:727 train loss:4.775874 +step:728 train loss:4.816132 +step:729 train loss:4.801353 +step:730 train loss:4.837860 +step:731 train loss:4.834590 +step:732 train loss:4.820980 +step:733 train loss:4.801273 +step:734 train loss:4.834996 +step:735 train loss:4.837288 +step:736 train loss:4.742289 +step:737 train loss:4.799813 +step:738 train loss:4.746512 +step:739 train loss:4.799494 +step:740 train loss:4.750149 +step:741 train loss:4.825232 +step:742 train loss:4.699893 +step:743 train loss:4.738871 +step:744 train loss:4.733933 +step:745 train loss:4.769301 +step:746 train loss:4.798395 +step:747 train loss:4.628254 +step:748 train loss:4.868313 +step:749 train loss:4.816453 +step:750 validation loss:4.781728 +step:750 train loss:4.760883 +step:751 train loss:4.843709 +step:752 train loss:4.734973 +step:753 train loss:4.706005 +step:754 train loss:4.806556 +step:755 train loss:4.790107 +step:756 train loss:4.757897 +step:757 train loss:4.797689 +step:758 train loss:4.834367 +step:759 train loss:4.783572 +step:760 train loss:4.825921 +step:761 train loss:4.756951 +step:762 train loss:4.886361 +step:763 train loss:4.789600 +step:764 train loss:4.786497 +step:765 train loss:4.703318 +step:766 train loss:4.753939 +step:767 train loss:4.768051 +step:768 train loss:4.765606 +step:769 train loss:4.818432 +step:770 train loss:4.785418 +step:771 train loss:4.757921 +step:772 train loss:4.740490 +step:773 train loss:4.714582 +step:774 train loss:4.670106 +step:775 train loss:4.707207 +step:776 train loss:4.688389 +step:777 train loss:4.711199 +step:778 train loss:4.681263 +step:779 train loss:4.724875 +step:780 train loss:4.692385 +step:781 train loss:4.643075 +step:782 train loss:4.716117 +step:783 train loss:4.654815 +step:784 train loss:4.703229 +step:785 train loss:4.739404 +step:786 train loss:4.664066 +step:787 train loss:4.723528 +step:788 train loss:4.664526 +step:789 train loss:4.630368 +step:790 train loss:4.652102 +step:791 train loss:4.621632 +step:792 train loss:4.647851 +step:793 train loss:4.640368 +step:794 train loss:4.668762 +step:795 train loss:4.635921 +step:796 train loss:4.689741 +step:797 train loss:4.648876 +step:798 train loss:4.630578 +step:799 train loss:4.629406 +step:800 train loss:4.553210 +step:801 train loss:4.642861 +step:802 train loss:4.589986 +step:803 train loss:4.582117 +step:804 train loss:4.601169 +step:805 train loss:4.621654 +step:806 train loss:4.547771 +step:807 train loss:4.628487 +step:808 train loss:4.656928 +step:809 train loss:4.589715 +step:810 train loss:4.666895 +step:811 train loss:4.629973 +step:812 train loss:4.662506 +step:813 train loss:4.564016 +step:814 train loss:4.647518 +step:815 train loss:4.627192 +step:816 train loss:4.604545 +step:817 train loss:4.622948 +step:818 train loss:4.553091 +step:819 train loss:4.612583 +step:820 train loss:4.656166 +step:821 train loss:4.694215 +step:822 train loss:4.593688 +step:823 train loss:4.614507 +step:824 train loss:4.562002 +step:825 train loss:4.633106 +step:826 train loss:4.562151 +step:827 train loss:4.591813 +step:828 train loss:4.598132 +step:829 train loss:4.530500 +step:830 train loss:4.593923 +step:831 train loss:4.576380 +step:832 train loss:4.581359 +step:833 train loss:4.607010 +step:834 train loss:4.583073 +step:835 train loss:4.558449 +step:836 train loss:4.594339 +step:837 train loss:4.552632 +step:838 train loss:4.503183 +step:839 train loss:4.578877 +step:840 train loss:4.536073 +step:841 train loss:4.596928 +step:842 train loss:4.556181 +step:843 train loss:4.527193 +step:844 train loss:4.533702 +step:845 train loss:4.524210 +step:846 train loss:4.505482 +step:847 train loss:4.500072 +step:848 train loss:4.730745 +step:849 train loss:4.473947 +step:850 train loss:4.506480 +step:851 train loss:4.521267 +step:852 train loss:4.497287 +step:853 train loss:4.501052 +step:854 train loss:4.517055 +step:855 train loss:4.507698 +step:856 train loss:4.445236 +step:857 train loss:4.468610 +step:858 train loss:4.525566 +step:859 train loss:4.524630 +step:860 train loss:4.455710 +step:861 train loss:4.558601 +step:862 train loss:4.632000 +step:863 train loss:4.570595 +step:864 train loss:4.593471 +step:865 train loss:4.559829 +step:866 train loss:4.517093 +step:867 train loss:4.566742 +step:868 train loss:4.555470 +step:869 train loss:4.543620 +step:870 train loss:4.595820 +step:871 train loss:4.623187 +step:872 train loss:4.494618 +step:873 train loss:4.590597 +step:874 train loss:4.558406 +step:875 train loss:4.493879 +step:876 train loss:4.531596 +step:877 train loss:4.479774 +step:878 train loss:4.491856 +step:879 train loss:4.503680 +step:880 train loss:4.525762 +step:881 train loss:4.519760 +step:882 train loss:4.486993 +step:883 train loss:4.426564 +step:884 train loss:4.527300 +step:885 train loss:4.469796 +step:886 train loss:4.483094 +step:887 train loss:4.509650 +step:888 train loss:4.521628 +step:889 train loss:4.435938 +step:890 train loss:4.463227 +step:891 train loss:4.455797 +step:892 train loss:4.446906 +step:893 train loss:4.435099 +step:894 train loss:4.400567 +step:895 train loss:4.449619 +step:896 train loss:4.431222 +step:897 train loss:4.434816 +step:898 train loss:4.437211 +step:899 train loss:4.375190 +step:900 train loss:4.420686 +step:901 train loss:4.389843 +step:902 train loss:4.426619 +step:903 train loss:4.446712 +step:904 train loss:4.415554 +step:905 train loss:4.417572 +step:906 train loss:4.462505 +step:907 train loss:4.358164 +step:908 train loss:4.383770 +step:909 train loss:4.461163 +step:910 train loss:4.441759 +step:911 train loss:4.443048 +step:912 train loss:4.420304 +step:913 train loss:4.447851 +step:914 train loss:4.453891 +step:915 train loss:4.403281 +step:916 train loss:4.565332 +step:917 train loss:4.424229 +step:918 train loss:4.478158 +step:919 train loss:4.411364 +step:920 train loss:4.403538 +step:921 train loss:4.389829 +step:922 train loss:4.332012 +step:923 train loss:4.338529 +step:924 train loss:4.357439 +step:925 train loss:4.419700 +step:926 train loss:4.434479 +step:927 train loss:4.438251 +step:928 train loss:4.477458 +step:929 train loss:4.445855 +step:930 train loss:4.375883 +step:931 train loss:4.416682 +step:932 train loss:4.475691 +step:933 train loss:4.453259 +step:934 train loss:4.388361 +step:935 train loss:4.354530 +step:936 train loss:4.370765 +step:937 train loss:4.441401 +step:938 train loss:4.381977 +step:939 train loss:4.399584 +step:940 train loss:4.415970 +step:941 train loss:4.369106 +step:942 train loss:4.365756 +step:943 train loss:4.395285 +step:944 train loss:4.415855 +step:945 train loss:4.423033 +step:946 train loss:4.416480 +step:947 train loss:4.423673 +step:948 train loss:4.360301 +step:949 train loss:4.471766 +step:950 train loss:4.382968 +step:951 train loss:4.363698 +step:952 train loss:4.363934 +step:953 train loss:4.381292 +step:954 train loss:4.345184 +step:955 train loss:4.398146 +step:956 train loss:4.354610 +step:957 train loss:4.383826 +step:958 train loss:4.280144 +step:959 train loss:4.375711 +step:960 train loss:4.368978 +step:961 train loss:4.346291 +step:962 train loss:4.297866 +step:963 train loss:4.352955 +step:964 train loss:4.270151 +step:965 train loss:4.292671 +step:966 train loss:4.266027 +step:967 train loss:4.274590 +step:968 train loss:4.361262 +step:969 train loss:4.358953 +step:970 train loss:4.420649 +step:971 train loss:4.310756 +step:972 train loss:4.458304 +step:973 train loss:4.361914 +step:974 train loss:4.264914 +step:975 train loss:4.405295 +step:976 train loss:4.365736 +step:977 train loss:4.317714 +step:978 train loss:4.276205 +step:979 train loss:4.272953 +step:980 train loss:4.334900 +step:981 train loss:4.226897 +step:982 train loss:4.298880 +step:983 train loss:4.283512 +step:984 train loss:4.258872 +step:985 train loss:4.309946 +step:986 train loss:4.291751 +step:987 train loss:4.344143 +step:988 train loss:4.324452 +step:989 train loss:4.298779 +step:990 train loss:4.279364 +step:991 train loss:4.312494 +step:992 train loss:4.251643 +step:993 train loss:4.348849 +step:994 train loss:4.210073 +step:995 train loss:4.303254 +step:996 train loss:4.257530 +step:997 train loss:4.258205 +step:998 train loss:4.308208 +step:999 train loss:4.433492 +step:1000 validation loss:4.280536 total_sharp:9.0270e-02 L1_sharp:1.2830e-01 L2_sharp:2.2719e-02 L3_sharp:2.3002e-02 L4_sharp:2.4492e-02 L5_sharp:3.1769e-02 L6_sharp:2.0724e-02 L7_sharp:1.9305e-02 L8_sharp:1.6338e-02 L9_sharp:1.1373e-02 L10_sharp:8.8667e-03 L11_sharp:7.8021e-03 L12_sharp:1.1725e-02 total_fnorm:1.9400e+00 total_l1_linf:1.6928e+04 total_spectral:1.9400e+00 L1_fnorm:3.6366e-01 L2_fnorm:3.7358e-01 L3_fnorm:3.8345e-01 L4_fnorm:3.7724e-01 L5_fnorm:3.5897e-01 L6_fnorm:3.9133e-01 L7_fnorm:4.0598e-01 L8_fnorm:4.3256e-01 L9_fnorm:4.5926e-01 L10_fnorm:4.7229e-01 L11_fnorm:4.7165e-01 L12_fnorm:4.3368e-01 L1_l1linf:5.3854e-01 L2_l1linf:5.8992e-01 L3_l1linf:6.2922e-01 L4_l1linf:6.8152e-01 L5_l1linf:5.8006e-01 L6_l1linf:6.2706e-01 L7_l1linf:5.7065e-01 L8_l1linf:4.9044e-01 L9_l1linf:5.0701e-01 L10_l1linf:5.2144e-01 L11_l1linf:5.2995e-01 L12_l1linf:5.5351e-01 L1_spectral:8.4234e-02 L2_spectral:9.9682e-02 L3_spectral:9.7022e-02 L4_spectral:1.0566e-01 L5_spectral:9.0660e-02 L6_spectral:1.0057e-01 L7_spectral:8.8274e-02 L8_spectral:7.2720e-02 L9_spectral:7.6774e-02 L10_spectral:7.7136e-02 L11_spectral:8.0077e-02 L12_spectral:9.1660e-02 v_norm:1.9400e+00 cos_v_-g_hvp:1.7630e-01 g_hvp_norm:5.5522e-01 cos_v_-g_t:1.8809e-01 g_t_norm:5.2181e-01 hv_norm:1.2434e+00 cos_v_hv:1.4084e-01 hg_norm:7.4040e+00 cos_g_hg:6.5508e-01 v_par:1.3304e-02 v_perp:1.9399e+00 L1_cos_v_neg_g:3.1870e-01 L1_v_norm:3.6366e-01 L2_cos_v_neg_g:1.9992e-01 L2_v_norm:3.7358e-01 L3_cos_v_neg_g:1.9138e-01 L3_v_norm:3.8345e-01 L4_cos_v_neg_g:2.0675e-01 L4_v_norm:3.7724e-01 L5_cos_v_neg_g:2.1675e-01 L5_v_norm:3.5897e-01 L6_cos_v_neg_g:2.3184e-01 L6_v_norm:3.9133e-01 L7_cos_v_neg_g:2.5778e-01 L7_v_norm:4.0598e-01 L8_cos_v_neg_g:2.5373e-01 L8_v_norm:4.3256e-01 L9_cos_v_neg_g:2.5716e-01 L9_v_norm:4.5926e-01 L10_cos_v_neg_g:3.0805e-01 L10_v_norm:4.7229e-01 L11_cos_v_neg_g:3.2732e-01 L11_v_norm:4.7165e-01 L12_cos_v_neg_g:3.5845e-01 L12_v_norm:4.3368e-01 +step:1000 train loss:4.227985 +step:1001 train loss:4.356333 +step:1002 train loss:4.240284 +step:1003 train loss:4.333374 +step:1004 train loss:4.265871 +step:1005 train loss:4.170986 +step:1006 train loss:4.292577 +step:1007 train loss:4.261709 +step:1008 train loss:4.290588 +step:1009 train loss:4.333427 +step:1010 train loss:4.297335 +step:1011 train loss:4.322587 +step:1012 train loss:4.278359 +step:1013 train loss:4.282935 +step:1014 train loss:4.256848 +step:1015 train loss:4.194541 +step:1016 train loss:4.353389 +step:1017 train loss:4.270795 +step:1018 train loss:4.241447 +step:1019 train loss:4.344280 +step:1020 train loss:4.262351 +step:1021 train loss:4.260397 +step:1022 train loss:4.403845 +step:1023 train loss:4.210940 +step:1024 train loss:4.275980 +step:1025 train loss:4.261453 +step:1026 train loss:4.297763 +step:1027 train loss:4.279867 +step:1028 train loss:4.239909 +step:1029 train loss:4.247699 +step:1030 train loss:4.282141 +step:1031 train loss:4.199177 +step:1032 train loss:4.233822 +step:1033 train loss:4.284074 +step:1034 train loss:4.254761 +step:1035 train loss:4.287876 +step:1036 train loss:4.223917 +step:1037 train loss:4.205496 +step:1038 train loss:4.403747 +step:1039 train loss:4.254204 +step:1040 train loss:4.269505 +step:1041 train loss:4.276265 +step:1042 train loss:4.259789 +step:1043 train loss:4.309058 +step:1044 train loss:4.248126 +step:1045 train loss:4.246745 +step:1046 train loss:4.290753 +step:1047 train loss:4.248537 +step:1048 train loss:4.243948 +step:1049 train loss:4.180339 +step:1050 train loss:4.271973 +step:1051 train loss:4.281486 +step:1052 train loss:4.227528 +step:1053 train loss:4.229374 +step:1054 train loss:4.254979 +step:1055 train loss:4.231144 +step:1056 train loss:4.220089 +step:1057 train loss:4.244066 +step:1058 train loss:4.197895 +step:1059 train loss:4.289103 +step:1060 train loss:4.183978 +step:1061 train loss:4.157035 +step:1062 train loss:4.269152 +step:1063 train loss:4.145448 +step:1064 train loss:4.307704 +step:1065 train loss:4.244694 +step:1066 train loss:4.162166 +step:1067 train loss:4.239383 +step:1068 train loss:4.215455 +step:1069 train loss:4.265912 +step:1070 train loss:4.264439 +step:1071 train loss:4.193478 +step:1072 train loss:4.210628 +step:1073 train loss:4.267878 +step:1074 train loss:4.162611 +step:1075 train loss:4.287750 +step:1076 train loss:4.259158 +step:1077 train loss:4.271770 +step:1078 train loss:4.281525 +step:1079 train loss:4.249671 +step:1080 train loss:4.295095 +step:1081 train loss:4.273926 +step:1082 train loss:4.201555 +step:1083 train loss:4.270075 +step:1084 train loss:4.271629 +step:1085 train loss:4.177864 +step:1086 train loss:4.282587 +step:1087 train loss:4.232011 +step:1088 train loss:4.247946 +step:1089 train loss:4.231977 +step:1090 train loss:4.235978 +step:1091 train loss:4.253172 +step:1092 train loss:4.212201 +step:1093 train loss:4.182084 +step:1094 train loss:4.205931 +step:1095 train loss:4.148570 +step:1096 train loss:4.158854 +step:1097 train loss:4.192610 +step:1098 train loss:4.191090 +step:1099 train loss:4.222806 +step:1100 train loss:4.196538 +step:1101 train loss:4.178745 +step:1102 train loss:4.239276 +step:1103 train loss:4.194777 +step:1104 train loss:4.171737 +step:1105 train loss:4.166370 +step:1106 train loss:4.146170 +step:1107 train loss:4.110397 +step:1108 train loss:4.226758 +step:1109 train loss:4.207737 +step:1110 train loss:4.182449 +step:1111 train loss:4.212645 +step:1112 train loss:4.144533 +step:1113 train loss:4.170701 +step:1114 train loss:4.149333 +step:1115 train loss:4.140079 +step:1116 train loss:4.268720 +step:1117 train loss:4.208657 +step:1118 train loss:4.174078 +step:1119 train loss:4.196504 +step:1120 train loss:4.180732 +step:1121 train loss:4.189057 +step:1122 train loss:4.136643 +step:1123 train loss:4.154060 +step:1124 train loss:4.127431 +step:1125 train loss:4.151416 +step:1126 train loss:4.123950 +step:1127 train loss:4.211711 +step:1128 train loss:4.148354 +step:1129 train loss:4.203654 +step:1130 train loss:4.200550 +step:1131 train loss:4.185226 +step:1132 train loss:4.135231 +step:1133 train loss:4.147000 +step:1134 train loss:4.211220 +step:1135 train loss:4.140079 +step:1136 train loss:4.166678 +step:1137 train loss:4.182746 +step:1138 train loss:4.153756 +step:1139 train loss:4.142865 +step:1140 train loss:4.156664 +step:1141 train loss:4.198425 +step:1142 train loss:4.074340 +step:1143 train loss:4.143846 +step:1144 train loss:4.072278 +step:1145 train loss:4.128709 +step:1146 train loss:4.123930 +step:1147 train loss:4.193383 +step:1148 train loss:4.129997 +step:1149 train loss:4.098474 +step:1150 train loss:4.270809 +step:1151 train loss:4.140758 +step:1152 train loss:4.104856 +step:1153 train loss:4.153221 +step:1154 train loss:4.146029 +step:1155 train loss:4.274593 +step:1156 train loss:4.148238 +step:1157 train loss:4.147197 +step:1158 train loss:4.194322 +step:1159 train loss:4.117599 +step:1160 train loss:4.121009 +step:1161 train loss:4.179185 +step:1162 train loss:4.215962 +step:1163 train loss:4.127598 +step:1164 train loss:4.131547 +step:1165 train loss:4.096280 +step:1166 train loss:4.164661 +step:1167 train loss:4.125277 +step:1168 train loss:4.048151 +step:1169 train loss:4.150123 +step:1170 train loss:4.148252 +step:1171 train loss:4.095085 +step:1172 train loss:4.180326 +step:1173 train loss:4.120481 +step:1174 train loss:4.159606 +step:1175 train loss:4.159532 +step:1176 train loss:4.228780 +step:1177 train loss:4.160519 +step:1178 train loss:4.162257 +step:1179 train loss:4.084008 +step:1180 train loss:4.203891 +step:1181 train loss:4.119805 +step:1182 train loss:4.157652 +step:1183 train loss:4.137713 +step:1184 train loss:4.121714 +step:1185 train loss:4.183255 +step:1186 train loss:4.152170 +step:1187 train loss:4.124881 +step:1188 train loss:4.159561 +step:1189 train loss:4.131903 +step:1190 train loss:4.133890 +step:1191 train loss:4.202965 +step:1192 train loss:4.145972 +step:1193 train loss:4.082381 +step:1194 train loss:4.149137 +step:1195 train loss:4.163820 +step:1196 train loss:4.137619 +step:1197 train loss:4.135161 +step:1198 train loss:4.130183 +step:1199 train loss:4.048217 +step:1200 train loss:4.115858 +step:1201 train loss:4.048721 +step:1202 train loss:4.091279 +step:1203 train loss:4.097309 +step:1204 train loss:4.104164 +step:1205 train loss:4.195164 +step:1206 train loss:4.172832 +step:1207 train loss:4.050744 +step:1208 train loss:4.155595 +step:1209 train loss:4.040795 +step:1210 train loss:4.141643 +step:1211 train loss:4.085268 +step:1212 train loss:4.093977 +step:1213 train loss:4.150812 +step:1214 train loss:4.067897 +step:1215 train loss:4.102051 +step:1216 train loss:4.132877 +step:1217 train loss:4.052400 +step:1218 train loss:4.149033 +step:1219 train loss:4.073428 +step:1220 train loss:4.140045 +step:1221 train loss:4.101916 +step:1222 train loss:4.097757 +step:1223 train loss:4.142039 +step:1224 train loss:4.085247 +step:1225 train loss:4.131499 +step:1226 train loss:4.108185 +step:1227 train loss:4.048584 +step:1228 train loss:4.142715 +step:1229 train loss:4.068905 +step:1230 train loss:4.087636 +step:1231 train loss:4.105583 +step:1232 train loss:3.983759 +step:1233 train loss:4.114104 +step:1234 train loss:4.115374 +step:1235 train loss:4.245557 +step:1236 train loss:4.211067 +step:1237 train loss:4.297884 +step:1238 train loss:4.190153 +step:1239 train loss:4.152921 +step:1240 train loss:4.178102 +step:1241 train loss:4.183045 +step:1242 train loss:4.229110 +step:1243 train loss:4.164249 +step:1244 train loss:4.181858 +step:1245 train loss:4.227913 +step:1246 train loss:4.136192 +step:1247 train loss:4.200522 +step:1248 train loss:4.161309 +step:1249 train loss:4.113043 +step:1250 validation loss:4.139153 +step:1250 train loss:4.117623 +step:1251 train loss:4.160675 +step:1252 train loss:4.154425 +step:1253 train loss:4.182882 +step:1254 train loss:4.125151 +step:1255 train loss:4.232564 +step:1256 train loss:4.164907 +step:1257 train loss:4.157095 +step:1258 train loss:4.138823 +step:1259 train loss:4.085355 +step:1260 train loss:4.169732 +step:1261 train loss:4.060245 +step:1262 train loss:4.053782 +step:1263 train loss:4.070840 +step:1264 train loss:4.153179 +step:1265 train loss:4.072985 +step:1266 train loss:4.185472 +step:1267 train loss:4.109206 +step:1268 train loss:4.040382 +step:1269 train loss:4.139915 +step:1270 train loss:4.082721 +step:1271 train loss:4.072440 +step:1272 train loss:4.112174 +step:1273 train loss:4.059126 +step:1274 train loss:4.024295 +step:1275 train loss:4.142002 +step:1276 train loss:4.025088 +step:1277 train loss:4.076288 +step:1278 train loss:4.104008 +step:1279 train loss:4.025105 +step:1280 train loss:4.115561 +step:1281 train loss:4.093739 +step:1282 train loss:3.983085 +step:1283 train loss:4.095855 +step:1284 train loss:4.022492 +step:1285 train loss:4.126903 +step:1286 train loss:4.124301 +step:1287 train loss:4.042802 +step:1288 train loss:4.083208 +step:1289 train loss:4.052225 +step:1290 train loss:4.086575 +step:1291 train loss:4.044796 +step:1292 train loss:4.020902 +step:1293 train loss:4.004652 +step:1294 train loss:4.105297 +step:1295 train loss:4.057092 +step:1296 train loss:3.986446 +step:1297 train loss:4.067836 +step:1298 train loss:4.071513 +step:1299 train loss:4.080468 +step:1300 train loss:4.064663 +step:1301 train loss:4.136716 +step:1302 train loss:4.090976 +step:1303 train loss:4.077148 +step:1304 train loss:4.057352 +step:1305 train loss:4.086950 +step:1306 train loss:4.101755 +step:1307 train loss:4.031227 +step:1308 train loss:4.062175 +step:1309 train loss:4.053473 +step:1310 train loss:4.075646 +step:1311 train loss:4.038433 +step:1312 train loss:4.044629 +step:1313 train loss:3.991978 +step:1314 train loss:4.058721 +step:1315 train loss:4.072297 +step:1316 train loss:4.003165 +step:1317 train loss:4.083651 +step:1318 train loss:4.056272 +step:1319 train loss:4.065814 +step:1320 train loss:4.082628 +step:1321 train loss:4.083177 +step:1322 train loss:4.143404 +step:1323 train loss:4.100832 +step:1324 train loss:4.010450 +step:1325 train loss:4.090150 +step:1326 train loss:4.226660 +step:1327 train loss:4.139836 +step:1328 train loss:4.125299 +step:1329 train loss:4.015538 +step:1330 train loss:4.031204 +step:1331 train loss:4.078171 +step:1332 train loss:4.012774 +step:1333 train loss:4.053645 +step:1334 train loss:4.089218 +step:1335 train loss:4.123354 +step:1336 train loss:4.108585 +step:1337 train loss:4.056351 +step:1338 train loss:4.071863 +step:1339 train loss:4.123002 +step:1340 train loss:4.143949 +step:1341 train loss:4.109701 +step:1342 train loss:4.143404 +step:1343 train loss:4.059407 +step:1344 train loss:4.033520 +step:1345 train loss:4.036711 +step:1346 train loss:4.175839 +step:1347 train loss:4.042583 +step:1348 train loss:4.097703 +step:1349 train loss:4.125788 +step:1350 train loss:4.117777 +step:1351 train loss:4.092094 +step:1352 train loss:4.124300 +step:1353 train loss:4.086830 +step:1354 train loss:4.051614 +step:1355 train loss:3.996657 +step:1356 train loss:4.108605 +step:1357 train loss:4.038267 +step:1358 train loss:4.036952 +step:1359 train loss:4.080605 +step:1360 train loss:4.068197 +step:1361 train loss:4.087196 +step:1362 train loss:4.071291 +step:1363 train loss:4.038507 +step:1364 train loss:4.094167 +step:1365 train loss:4.003526 +step:1366 train loss:4.056596 +step:1367 train loss:4.204054 +step:1368 train loss:4.048722 +step:1369 train loss:4.060164 +step:1370 train loss:4.063737 +step:1371 train loss:4.089544 +step:1372 train loss:4.072464 +step:1373 train loss:4.144334 +step:1374 train loss:4.089917 +step:1375 train loss:4.027209 +step:1376 train loss:4.062955 +step:1377 train loss:4.060305 +step:1378 train loss:4.028706 +step:1379 train loss:4.083523 +step:1380 train loss:3.983520 +step:1381 train loss:4.091006 +step:1382 train loss:4.088723 +step:1383 train loss:4.080513 +step:1384 train loss:4.070045 +step:1385 train loss:4.185600 +step:1386 train loss:4.146416 +step:1387 train loss:4.037622 +step:1388 train loss:4.067218 +step:1389 train loss:4.043851 +step:1390 train loss:4.071843 +step:1391 train loss:4.100126 +step:1392 train loss:4.077182 +step:1393 train loss:4.038792 +step:1394 train loss:4.008056 +step:1395 train loss:4.051010 +step:1396 train loss:4.069727 +step:1397 train loss:3.993006 +step:1398 train loss:4.074910 +step:1399 train loss:4.077700 +step:1400 train loss:3.984333 +step:1401 train loss:4.070885 +step:1402 train loss:4.034950 +step:1403 train loss:4.039950 +step:1404 train loss:4.007498 +step:1405 train loss:4.085722 +step:1406 train loss:4.026445 +step:1407 train loss:4.016890 +step:1408 train loss:4.024778 +step:1409 train loss:4.072584 +step:1410 train loss:4.037879 +step:1411 train loss:4.039696 +step:1412 train loss:4.027590 +step:1413 train loss:4.158123 +step:1414 train loss:4.074639 +step:1415 train loss:4.033093 +step:1416 train loss:4.083983 +step:1417 train loss:4.033552 +step:1418 train loss:4.043459 +step:1419 train loss:4.025332 +step:1420 train loss:4.044678 +step:1421 train loss:4.046467 +step:1422 train loss:4.020640 +step:1423 train loss:4.036864 +step:1424 train loss:4.031021 +step:1425 train loss:4.086175 +step:1426 train loss:4.046351 +step:1427 train loss:3.999083 +step:1428 train loss:4.064641 +step:1429 train loss:4.034800 +step:1430 train loss:4.064015 +step:1431 train loss:3.969547 +step:1432 train loss:3.997838 +step:1433 train loss:4.058210 +step:1434 train loss:3.983146 +step:1435 train loss:4.101968 +step:1436 train loss:4.127413 +step:1437 train loss:4.046644 +step:1438 train loss:4.030312 +step:1439 train loss:4.010628 +step:1440 train loss:4.001184 +step:1441 train loss:4.004831 +step:1442 train loss:4.048643 +step:1443 train loss:4.011715 +step:1444 train loss:3.975626 +step:1445 train loss:3.996850 +step:1446 train loss:4.017159 +step:1447 train loss:4.062474 +step:1448 train loss:4.075068 +step:1449 train loss:4.037709 +step:1450 train loss:4.070223 +step:1451 train loss:3.992652 +step:1452 train loss:4.199350 +step:1453 train loss:4.063091 +step:1454 train loss:4.010754 +step:1455 train loss:4.012645 +step:1456 train loss:4.054770 +step:1457 train loss:4.040154 +step:1458 train loss:4.015844 +step:1459 train loss:4.028946 +step:1460 train loss:4.009735 +step:1461 train loss:3.949058 +step:1462 train loss:3.971101 +step:1463 train loss:3.991427 +step:1464 train loss:4.044010 +step:1465 train loss:4.037924 +step:1466 train loss:4.036506 +step:1467 train loss:4.042387 +step:1468 train loss:4.069532 +step:1469 train loss:3.979936 +step:1470 train loss:4.060455 +step:1471 train loss:3.989509 +step:1472 train loss:4.061135 +step:1473 train loss:3.986963 +step:1474 train loss:4.089268 +step:1475 train loss:4.027503 +step:1476 train loss:4.059624 +step:1477 train loss:3.965466 +step:1478 train loss:3.995160 +step:1479 train loss:4.016230 +step:1480 train loss:4.123935 +step:1481 train loss:4.006279 +step:1482 train loss:4.027072 +step:1483 train loss:3.991322 +step:1484 train loss:3.975850 +step:1485 train loss:4.002510 +step:1486 train loss:4.063583 +step:1487 train loss:4.037584 +step:1488 train loss:3.992054 +step:1489 train loss:4.046688 +step:1490 train loss:4.065459 +step:1491 train loss:3.970333 +step:1492 train loss:4.041929 +step:1493 train loss:3.991875 +step:1494 train loss:4.020397 +step:1495 train loss:4.049060 +step:1496 train loss:4.034868 +step:1497 train loss:4.043220 +step:1498 train loss:3.981377 +step:1499 train loss:3.984452 +step:1500 validation loss:3.963036 total_sharp:4.1168e-02 L1_sharp:5.5242e-02 L2_sharp:1.3754e-02 L3_sharp:1.1615e-02 L4_sharp:7.4509e-03 L5_sharp:1.0673e-02 L6_sharp:6.9157e-03 L7_sharp:7.2704e-03 L8_sharp:7.7744e-03 L9_sharp:5.8634e-03 L10_sharp:4.1441e-03 L11_sharp:3.4547e-03 L12_sharp:6.9975e-03 total_fnorm:2.0457e+00 total_l1_linf:1.8002e+04 total_spectral:2.0457e+00 L1_fnorm:4.2021e-01 L2_fnorm:4.3838e-01 L3_fnorm:4.3468e-01 L4_fnorm:4.2694e-01 L5_fnorm:4.2756e-01 L6_fnorm:4.4533e-01 L7_fnorm:4.5167e-01 L8_fnorm:4.5583e-01 L9_fnorm:4.7637e-01 L10_fnorm:4.8564e-01 L11_fnorm:4.8636e-01 L12_fnorm:4.6304e-01 L1_l1linf:5.0914e-01 L2_l1linf:7.0692e-01 L3_l1linf:6.2384e-01 L4_l1linf:7.3077e-01 L5_l1linf:7.0203e-01 L6_l1linf:6.2344e-01 L7_l1linf:5.3866e-01 L8_l1linf:4.8060e-01 L9_l1linf:4.9280e-01 L10_l1linf:5.0449e-01 L11_l1linf:5.0613e-01 L12_l1linf:5.2310e-01 L1_spectral:8.3899e-02 L2_spectral:1.0285e-01 L3_spectral:9.6266e-02 L4_spectral:1.0054e-01 L5_spectral:9.3686e-02 L6_spectral:9.2814e-02 L7_spectral:7.6222e-02 L8_spectral:6.0293e-02 L9_spectral:6.4772e-02 L10_spectral:6.5856e-02 L11_spectral:6.6926e-02 L12_spectral:8.5914e-02 v_norm:2.0457e+00 cos_v_-g_hvp:1.3970e-01 g_hvp_norm:4.0840e-01 cos_v_-g_t:1.5564e-01 g_t_norm:3.6693e-01 hv_norm:9.5726e-01 cos_v_hv:8.7979e-02 hg_norm:4.5574e+00 cos_g_hg:4.9484e-01 v_par:1.3092e-02 v_perp:2.0457e+00 L1_cos_v_neg_g:2.3336e-01 L1_v_norm:4.2021e-01 L2_cos_v_neg_g:1.4624e-01 L2_v_norm:4.3838e-01 L3_cos_v_neg_g:1.4781e-01 L3_v_norm:4.3468e-01 L4_cos_v_neg_g:1.5414e-01 L4_v_norm:4.2694e-01 L5_cos_v_neg_g:1.2346e-01 L5_v_norm:4.2756e-01 L6_cos_v_neg_g:1.4676e-01 L6_v_norm:4.4533e-01 L7_cos_v_neg_g:1.6619e-01 L7_v_norm:4.5167e-01 L8_cos_v_neg_g:1.5756e-01 L8_v_norm:4.5583e-01 L9_cos_v_neg_g:1.5939e-01 L9_v_norm:4.7637e-01 L10_cos_v_neg_g:1.9002e-01 L10_v_norm:4.8564e-01 L11_cos_v_neg_g:2.1640e-01 L11_v_norm:4.8636e-01 L12_cos_v_neg_g:2.6235e-01 L12_v_norm:4.6304e-01 +step:1500 train loss:4.013128 +step:1501 train loss:3.950852 +step:1502 train loss:4.018481 +step:1503 train loss:4.096491 +step:1504 train loss:3.956325 +step:1505 train loss:4.015347 +step:1506 train loss:4.017736 +step:1507 train loss:3.978290 +step:1508 train loss:3.914424 +step:1509 train loss:4.068686 +step:1510 train loss:4.058962 +step:1511 train loss:3.997776 +step:1512 train loss:3.994714 +step:1513 train loss:4.059496 +step:1514 train loss:4.035226 +step:1515 train loss:4.113547 +step:1516 train loss:4.009198 +step:1517 train loss:4.064755 +step:1518 train loss:4.056168 +step:1519 train loss:3.964764 +step:1520 train loss:3.984169 +step:1521 train loss:4.022061 +step:1522 train loss:3.890314 +step:1523 train loss:4.040344 +step:1524 train loss:4.029728 +step:1525 train loss:3.980049 +step:1526 train loss:4.078416 +step:1527 train loss:3.956435 +step:1528 train loss:4.028764 +step:1529 train loss:3.999946 +step:1530 train loss:4.032537 +step:1531 train loss:3.910901 +step:1532 train loss:3.949393 +step:1533 train loss:3.974479 +step:1534 train loss:3.974731 +step:1535 train loss:3.995420 +step:1536 train loss:3.942985 +step:1537 train loss:4.053718 +step:1538 train loss:4.022973 +step:1539 train loss:4.029220 +step:1540 train loss:4.010421 +step:1541 train loss:3.991805 +step:1542 train loss:3.975256 +step:1543 train loss:3.956829 +step:1544 train loss:3.900902 +step:1545 train loss:3.913024 +step:1546 train loss:3.959033 +step:1547 train loss:3.971855 +step:1548 train loss:3.955422 +step:1549 train loss:3.928679 +step:1550 train loss:3.976967 +step:1551 train loss:4.037352 +step:1552 train loss:4.022051 +step:1553 train loss:3.959287 +step:1554 train loss:3.958631 +step:1555 train loss:3.981556 +step:1556 train loss:3.929804 +step:1557 train loss:3.946361 +step:1558 train loss:3.967721 +step:1559 train loss:3.950215 +step:1560 train loss:4.003234 +step:1561 train loss:3.997249 +step:1562 train loss:3.991273 +step:1563 train loss:3.938056 +step:1564 train loss:3.908033 +step:1565 train loss:3.973439 +step:1566 train loss:4.010877 +step:1567 train loss:3.944032 +step:1568 train loss:4.005996 +step:1569 train loss:3.957714 +step:1570 train loss:3.936286 +step:1571 train loss:3.928303 +step:1572 train loss:3.951350 +step:1573 train loss:3.940450 +step:1574 train loss:3.966108 +step:1575 train loss:4.056062 +step:1576 train loss:3.956511 +step:1577 train loss:3.999150 +step:1578 train loss:3.944180 +step:1579 train loss:3.985721 +step:1580 train loss:3.942743 +step:1581 train loss:3.953621 +step:1582 train loss:3.909322 +step:1583 train loss:3.951169 +step:1584 train loss:3.897046 +step:1585 train loss:3.898304 +step:1586 train loss:3.911627 +step:1587 train loss:3.946553 +step:1588 train loss:3.934936 +step:1589 train loss:3.905210 +step:1590 train loss:3.943796 +step:1591 train loss:3.993029 +step:1592 train loss:3.935925 +step:1593 train loss:3.951006 +step:1594 train loss:3.941207 +step:1595 train loss:3.916621 +step:1596 train loss:4.020094 +step:1597 train loss:3.956955 +step:1598 train loss:3.971897 +step:1599 train loss:3.978205 +step:1600 train loss:3.979635 +step:1601 train loss:3.913915 +step:1602 train loss:3.972828 +step:1603 train loss:3.911938 +step:1604 train loss:3.960854 +step:1605 train loss:3.962266 +step:1606 train loss:3.882086 +step:1607 train loss:3.979971 +step:1608 train loss:3.926817 +step:1609 train loss:3.910221 +step:1610 train loss:3.949102 +step:1611 train loss:3.934475 +step:1612 train loss:3.989507 +step:1613 train loss:3.961349 +step:1614 train loss:3.927573 +step:1615 train loss:3.961571 +step:1616 train loss:3.904798 +step:1617 train loss:3.994131 +step:1618 train loss:3.986998 +step:1619 train loss:3.908075 +step:1620 train loss:3.989417 +step:1621 train loss:3.964188 +step:1622 train loss:3.923512 +step:1623 train loss:4.106142 +step:1624 train loss:3.898884 +step:1625 train loss:3.960180 +step:1626 train loss:3.966339 +step:1627 train loss:3.902301 +step:1628 train loss:3.936621 +step:1629 train loss:3.942768 +step:1630 train loss:3.928011 +step:1631 train loss:3.961203 +step:1632 train loss:3.945260 +step:1633 train loss:3.991944 +step:1634 train loss:3.972657 +step:1635 train loss:3.908620 +step:1636 train loss:3.982055 +step:1637 train loss:3.957753 +step:1638 train loss:3.826948 +step:1639 train loss:4.008300 +step:1640 train loss:3.890012 +step:1641 train loss:3.907300 +step:1642 train loss:3.984100 +step:1643 train loss:3.869490 +step:1644 train loss:3.926878 +step:1645 train loss:3.921821 +step:1646 train loss:3.992785 +step:1647 train loss:3.959859 +step:1648 train loss:3.884778 +step:1649 train loss:3.928480 +step:1650 train loss:3.966502 +step:1651 train loss:3.907902 +step:1652 train loss:4.015901 +step:1653 train loss:3.929420 +step:1654 train loss:3.859360 +step:1655 train loss:3.970032 +step:1656 train loss:3.901440 +step:1657 train loss:3.903526 +step:1658 train loss:3.963147 +step:1659 train loss:3.873796 +step:1660 train loss:3.932685 +step:1661 train loss:3.926042 +step:1662 train loss:3.842431 +step:1663 train loss:3.964881 +step:1664 train loss:3.905633 +step:1665 train loss:3.866978 +step:1666 train loss:4.034276 +step:1667 train loss:3.890817 +step:1668 train loss:3.937434 +step:1669 train loss:3.901956 +step:1670 train loss:3.905385 +step:1671 train loss:3.941279 +step:1672 train loss:3.954480 +step:1673 train loss:3.943141 +step:1674 train loss:3.948098 +step:1675 train loss:3.895747 +step:1676 train loss:3.878465 +step:1677 train loss:3.938102 +step:1678 train loss:3.906510 +step:1679 train loss:3.907550 +step:1680 train loss:3.857579 +step:1681 train loss:3.952100 +step:1682 train loss:3.846368 +step:1683 train loss:3.914641 +step:1684 train loss:3.879683 +step:1685 train loss:3.956568 +step:1686 train loss:3.971773 +step:1687 train loss:3.978498 +step:1688 train loss:4.003441 +step:1689 train loss:3.901621 +step:1690 train loss:3.901516 +step:1691 train loss:3.954874 +step:1692 train loss:3.906312 +step:1693 train loss:3.904303 +step:1694 train loss:3.974925 +step:1695 train loss:3.924558 +step:1696 train loss:3.915425 +step:1697 train loss:3.888961 +step:1698 train loss:3.924551 +step:1699 train loss:3.899281 +step:1700 train loss:3.982840 +step:1701 train loss:3.890129 +step:1702 train loss:3.862860 +step:1703 train loss:3.920176 +step:1704 train loss:3.896947 +step:1705 train loss:3.901069 +step:1706 train loss:3.973166 +step:1707 train loss:3.879529 +step:1708 train loss:3.863924 +step:1709 train loss:3.918551 +step:1710 train loss:3.960512 +step:1711 train loss:3.900950 +step:1712 train loss:3.931574 +step:1713 train loss:3.917866 +step:1714 train loss:3.902520 +step:1715 train loss:3.923775 +step:1716 train loss:3.923580 +step:1717 train loss:3.965958 +step:1718 train loss:3.920448 +step:1719 train loss:3.891552 +step:1720 train loss:4.001027 +step:1721 train loss:3.899359 +step:1722 train loss:3.896974 +step:1723 train loss:3.930847 +step:1724 train loss:3.906994 +step:1725 train loss:3.988380 +step:1726 train loss:3.899550 +step:1727 train loss:3.937239 +step:1728 train loss:3.976082 +step:1729 train loss:4.104000 +step:1730 train loss:3.950540 +step:1731 train loss:3.951983 +step:1732 train loss:3.927912 +step:1733 train loss:3.912941 +step:1734 train loss:3.894875 +step:1735 train loss:3.913108 +step:1736 train loss:3.958129 +step:1737 train loss:3.923770 +step:1738 train loss:3.869293 +step:1739 train loss:3.918345 +step:1740 train loss:3.893336 +step:1741 train loss:3.913618 +step:1742 train loss:3.903606 +step:1743 train loss:3.943359 +step:1744 train loss:3.904013 +step:1745 train loss:3.975640 +step:1746 train loss:3.958326 +step:1747 train loss:3.891126 +step:1748 train loss:3.875711 +step:1749 train loss:3.945795 +step:1750 validation loss:3.871175 +step:1750 train loss:3.968522 +step:1751 train loss:3.867771 +step:1752 train loss:3.912617 +step:1753 train loss:3.981249 +step:1754 train loss:3.922717 +step:1755 train loss:3.881584 +step:1756 train loss:3.896856 +step:1757 train loss:3.955770 +step:1758 train loss:3.925353 +step:1759 train loss:3.908341 +step:1760 train loss:3.969461 +step:1761 train loss:3.972123 +step:1762 train loss:3.967771 +step:1763 train loss:3.976939 +step:1764 train loss:3.972914 +step:1765 train loss:3.934209 +step:1766 train loss:3.924220 +step:1767 train loss:3.932674 +step:1768 train loss:3.847844 +step:1769 train loss:3.920698 +step:1770 train loss:3.888244 +step:1771 train loss:3.926868 +step:1772 train loss:3.971858 +step:1773 train loss:3.916193 +step:1774 train loss:3.966966 +step:1775 train loss:3.945932 +step:1776 train loss:3.957287 +step:1777 train loss:4.038214 +step:1778 train loss:3.900022 +step:1779 train loss:4.044339 +step:1780 train loss:3.966619 +step:1781 train loss:3.951493 +step:1782 train loss:4.012208 +step:1783 train loss:3.960668 +step:1784 train loss:3.936848 +step:1785 train loss:3.977142 +step:1786 train loss:3.921476 +step:1787 train loss:3.936799 +step:1788 train loss:3.935325 +step:1789 train loss:3.895446 +step:1790 train loss:3.924502 +step:1791 train loss:3.934584 +step:1792 train loss:3.915962 +step:1793 train loss:3.920002 +step:1794 train loss:3.939229 +step:1795 train loss:3.960836 +step:1796 train loss:3.881013 +step:1797 train loss:3.961673 +step:1798 train loss:3.916259 +step:1799 train loss:3.949224 +step:1800 train loss:3.926706 +step:1801 train loss:3.826848 +step:1802 train loss:3.919090 +step:1803 train loss:3.932856 +step:1804 train loss:3.916497 +step:1805 train loss:3.900414 +step:1806 train loss:3.871735 +step:1807 train loss:3.861123 +step:1808 train loss:3.935253 +step:1809 train loss:3.921271 +step:1810 train loss:3.932016 +step:1811 train loss:3.874056 +step:1812 train loss:3.879112 +step:1813 train loss:3.849424 +step:1814 train loss:3.906063 +step:1815 train loss:3.914391 +step:1816 train loss:3.870295 +step:1817 train loss:3.950385 +step:1818 train loss:3.946545 +step:1819 train loss:3.894157 +step:1820 train loss:3.882499 +step:1821 train loss:3.830930 +step:1822 train loss:3.928912 +step:1823 train loss:3.898515 +step:1824 train loss:3.923769 +step:1825 train loss:3.977871 +step:1826 train loss:3.873106 +step:1827 train loss:3.867413 +step:1828 train loss:3.910870 +step:1829 train loss:3.901696 +step:1830 train loss:3.975778 +step:1831 train loss:3.888522 +step:1832 train loss:3.903806 +step:1833 train loss:3.979038 +step:1834 train loss:3.926789 +step:1835 train loss:3.939394 +step:1836 train loss:3.889018 +step:1837 train loss:3.890691 +step:1838 train loss:3.919477 +step:1839 train loss:3.897417 +step:1840 train loss:3.911485 +step:1841 train loss:3.874011 +step:1842 train loss:3.859458 +step:1843 train loss:3.923251 +step:1844 train loss:3.918846 +step:1845 train loss:3.908317 +step:1846 train loss:4.035766 +step:1847 train loss:3.842061 +step:1848 train loss:3.999526 +step:1849 train loss:3.921477 +step:1850 train loss:3.823203 +step:1851 train loss:4.060334 +step:1852 train loss:3.915661 +step:1853 train loss:3.912713 +step:1854 train loss:3.908267 +step:1855 train loss:3.901612 +step:1856 train loss:4.066026 +step:1857 train loss:3.894104 +step:1858 train loss:3.985251 +step:1859 train loss:3.921576 +step:1860 train loss:3.904827 +step:1861 train loss:3.958859 +step:1862 train loss:3.894289 +step:1863 train loss:3.888103 +step:1864 train loss:3.926263 +step:1865 train loss:3.823466 +step:1866 train loss:3.926011 +step:1867 train loss:3.892171 +step:1868 train loss:3.964226 +step:1869 train loss:3.886502 +step:1870 train loss:3.846576 +step:1871 train loss:3.890960 +step:1872 train loss:3.860845 +step:1873 train loss:3.856517 +step:1874 train loss:3.983035 +step:1875 train loss:3.845359 +step:1876 train loss:3.961238 +step:1877 train loss:3.915074 +step:1878 train loss:3.957535 +step:1879 train loss:3.924948 +step:1880 train loss:3.863106 +step:1881 train loss:3.962353 +step:1882 train loss:3.883449 +step:1883 train loss:3.953184 +step:1884 train loss:3.937755 +step:1885 train loss:3.946715 +step:1886 train loss:3.957068 +step:1887 train loss:3.927243 +step:1888 train loss:3.904700 +step:1889 train loss:4.073741 +step:1890 train loss:3.853657 +step:1891 train loss:3.885983 +step:1892 train loss:3.944730 +step:1893 train loss:3.946742 +step:1894 train loss:3.903523 +step:1895 train loss:3.961719 +step:1896 train loss:3.929978 +step:1897 train loss:3.923254 +step:1898 train loss:3.892457 +step:1899 train loss:3.875788 +step:1900 train loss:3.890599 +step:1901 train loss:3.981080 +step:1902 train loss:3.885683 +step:1903 train loss:3.869830 +step:1904 train loss:3.892476 +step:1905 train loss:3.986483 +step:1906 train loss:3.954227 +step:1907 train loss:3.940892 +step:1908 train loss:3.863001 +step:1909 train loss:3.886318 +step:1910 train loss:3.942401 +step:1911 train loss:3.841489 +step:1912 train loss:3.882505 +step:1913 train loss:3.813885 +step:1914 train loss:3.883626 +step:1915 train loss:3.835078 +step:1916 train loss:3.809496 +step:1917 train loss:3.942228 +step:1918 train loss:3.960849 +step:1919 train loss:3.886785 +step:1920 train loss:3.829966 +step:1921 train loss:3.907880 +step:1922 train loss:3.878325 +step:1923 train loss:3.892810 +step:1924 train loss:3.833517 +step:1925 train loss:3.899770 +step:1926 train loss:3.862816 +step:1927 train loss:3.907329 +step:1928 train loss:3.894479 +step:1929 train loss:3.890228 +step:1930 train loss:3.876311 +step:1931 train loss:3.833606 +step:1932 train loss:3.882530 +step:1933 train loss:3.872429 +step:1934 train loss:3.962581 +step:1935 train loss:3.864012 +step:1936 train loss:3.873657 +step:1937 train loss:3.868814 +step:1938 train loss:3.885342 +step:1939 train loss:3.863801 +step:1940 train loss:3.841556 +step:1941 train loss:3.885646 +step:1942 train loss:3.927403 +step:1943 train loss:3.871824 +step:1944 train loss:3.915891 +step:1945 train loss:4.078170 +step:1946 train loss:3.881689 +step:1947 train loss:3.823141 +step:1948 train loss:3.866121 +step:1949 train loss:3.827994 +step:1950 train loss:3.826859 +step:1951 train loss:3.785539 +step:1952 train loss:3.865148 +step:1953 train loss:3.892320 +step:1954 train loss:3.858760 +step:1955 train loss:3.899365 +step:1956 train loss:3.857958 +step:1957 train loss:3.881142 +step:1958 train loss:3.894114 +step:1959 train loss:3.881071 +step:1960 train loss:3.870494 +step:1961 train loss:3.998498 +step:1962 train loss:3.900417 +step:1963 train loss:3.865256 +step:1964 train loss:3.900756 +step:1965 train loss:3.888225 +step:1966 train loss:3.905459 +step:1967 train loss:3.951227 +step:1968 train loss:3.824686 +step:1969 train loss:4.008672 +step:1970 train loss:3.886807 +step:1971 train loss:4.004279 +step:1972 train loss:3.926134 +step:1973 train loss:3.915175 +step:1974 train loss:3.927510 +step:1975 train loss:3.919791 +step:1976 train loss:4.030751 +step:1977 train loss:4.058344 +step:1978 train loss:4.056479 +step:1979 train loss:3.978672 +step:1980 train loss:3.978741 +step:1981 train loss:3.952307 +step:1982 train loss:3.968215 +step:1983 train loss:3.938888 +step:1984 train loss:3.962143 +step:1985 train loss:3.925315 +step:1986 train loss:3.948925 +step:1987 train loss:3.909179 +step:1988 train loss:3.927412 +step:1989 train loss:3.994183 +step:1990 train loss:3.907431 +step:1991 train loss:3.866948 +step:1992 train loss:3.960967 +step:1993 train loss:3.872486 +step:1994 train loss:3.903021 +step:1995 train loss:3.988074 +step:1996 train loss:3.900036 +step:1997 train loss:3.900011 +step:1998 train loss:3.919239 +step:1999 train loss:3.866304 +step:2000 validation loss:3.822218 total_sharp:1.1615e-02 L1_sharp:2.5904e-02 L2_sharp:1.7938e-03 L3_sharp:1.8473e-03 L4_sharp:2.4240e-03 L5_sharp:3.2338e-03 L6_sharp:2.7759e-03 L7_sharp:3.5612e-03 L8_sharp:4.5730e-03 L9_sharp:2.9107e-03 L10_sharp:2.0937e-03 L11_sharp:1.9466e-03 L12_sharp:3.0747e-03 total_fnorm:1.8997e+00 total_l1_linf:1.6231e+04 total_spectral:1.8997e+00 L1_fnorm:2.8436e-01 L2_fnorm:3.3221e-01 L3_fnorm:3.4616e-01 L4_fnorm:3.3851e-01 L5_fnorm:3.3618e-01 L6_fnorm:3.7254e-01 L7_fnorm:3.9577e-01 L8_fnorm:4.3638e-01 L9_fnorm:4.6329e-01 L10_fnorm:4.7246e-01 L11_fnorm:4.7590e-01 L12_fnorm:4.5734e-01 L1_l1linf:4.0298e-01 L2_l1linf:4.3742e-01 L3_l1linf:5.3723e-01 L4_l1linf:4.4434e-01 L5_l1linf:4.0205e-01 L6_l1linf:5.1501e-01 L7_l1linf:4.6806e-01 L8_l1linf:4.7545e-01 L9_l1linf:4.8801e-01 L10_l1linf:5.0260e-01 L11_l1linf:5.0374e-01 L12_l1linf:4.7684e-01 L1_spectral:5.1867e-02 L2_spectral:6.5595e-02 L3_spectral:6.5131e-02 L4_spectral:6.3846e-02 L5_spectral:6.5660e-02 L6_spectral:6.5653e-02 L7_spectral:5.8016e-02 L8_spectral:5.6514e-02 L9_spectral:5.7453e-02 L10_spectral:5.7123e-02 L11_spectral:5.7476e-02 L12_spectral:8.2797e-02 v_norm:1.8997e+00 cos_v_-g_hvp:1.1847e-01 g_hvp_norm:2.8903e-01 cos_v_-g_t:1.5153e-01 g_t_norm:2.2638e-01 hv_norm:3.4439e-01 cos_v_hv:6.4070e-02 hg_norm:4.9087e+00 cos_g_hg:3.4557e-01 v_par:1.8228e-02 v_perp:1.8996e+00 L1_cos_v_neg_g:1.8755e-01 L1_v_norm:2.8436e-01 L2_cos_v_neg_g:1.1484e-01 L2_v_norm:3.3221e-01 L3_cos_v_neg_g:1.1573e-01 L3_v_norm:3.4616e-01 L4_cos_v_neg_g:1.0245e-01 L4_v_norm:3.3851e-01 L5_cos_v_neg_g:8.6160e-02 L5_v_norm:3.3618e-01 L6_cos_v_neg_g:1.1295e-01 L6_v_norm:3.7254e-01 L7_cos_v_neg_g:1.3333e-01 L7_v_norm:3.9577e-01 L8_cos_v_neg_g:1.3666e-01 L8_v_norm:4.3638e-01 L9_cos_v_neg_g:1.4412e-01 L9_v_norm:4.6329e-01 L10_cos_v_neg_g:1.7027e-01 L10_v_norm:4.7246e-01 L11_cos_v_neg_g:1.8138e-01 L11_v_norm:4.7590e-01 L12_cos_v_neg_g:2.1079e-01 L12_v_norm:4.5734e-01 +step:2000 train loss:3.875913 +step:2001 train loss:3.853200 +step:2002 train loss:3.873427 +step:2003 train loss:3.914240 +step:2004 train loss:3.830253 +step:2005 train loss:3.896467 +step:2006 train loss:3.894521 +step:2007 train loss:3.778563 +step:2008 train loss:3.835776 +step:2009 train loss:3.873823 +step:2010 train loss:3.890191 +step:2011 train loss:3.830557 +step:2012 train loss:3.890199 +step:2013 train loss:3.828951 +step:2014 train loss:3.923609 +step:2015 train loss:3.918015 +step:2016 train loss:3.892432 +step:2017 train loss:3.876241 +step:2018 train loss:3.921343 +step:2019 train loss:3.842169 +step:2020 train loss:3.883104 +step:2021 train loss:3.896916 +step:2022 train loss:3.950758 +step:2023 train loss:3.876671 +step:2024 train loss:3.907741 +step:2025 train loss:3.885243 +step:2026 train loss:3.783808 +step:2027 train loss:3.786497 +step:2028 train loss:3.880486 +step:2029 train loss:3.830893 +step:2030 train loss:3.896042 +step:2031 train loss:3.934757 +step:2032 train loss:3.836071 +step:2033 train loss:3.871946 +step:2034 train loss:3.876257 +step:2035 train loss:3.872832 +step:2036 train loss:3.852349 +step:2037 train loss:3.875684 +step:2038 train loss:3.872060 +step:2039 train loss:3.897557 +step:2040 train loss:3.767308 +step:2041 train loss:3.773653 +step:2042 train loss:3.846111 +step:2043 train loss:3.854069 +step:2044 train loss:3.921150 +step:2045 train loss:3.859606 +step:2046 train loss:3.883128 +step:2047 train loss:3.818928 +step:2048 train loss:3.866604 +step:2049 train loss:3.863963 +step:2050 train loss:3.877907 +step:2051 train loss:3.902456 +step:2052 train loss:3.869820 +step:2053 train loss:3.982146 +step:2054 train loss:3.890612 +step:2055 train loss:3.892586 +step:2056 train loss:3.875762 +step:2057 train loss:3.896507 +step:2058 train loss:3.902708 +step:2059 train loss:3.850789 +step:2060 train loss:3.824868 +step:2061 train loss:3.864993 +step:2062 train loss:3.849426 +step:2063 train loss:3.915531 +step:2064 train loss:3.865626 +step:2065 train loss:3.910772 +step:2066 train loss:3.866647 +step:2067 train loss:3.990418 +step:2068 train loss:3.843136 +step:2069 train loss:3.783180 +step:2070 train loss:3.821438 +step:2071 train loss:3.825999 +step:2072 train loss:3.862225 +step:2073 train loss:3.906997 +step:2074 train loss:3.813873 +step:2075 train loss:3.874404 +step:2076 train loss:3.855718 +step:2077 train loss:3.825503 +step:2078 train loss:3.863466 +step:2079 train loss:3.880678 +step:2080 train loss:3.830752 +step:2081 train loss:3.881205 +step:2082 train loss:3.866030 +step:2083 train loss:3.890925 +step:2084 train loss:3.841382 +step:2085 train loss:3.838587 +step:2086 train loss:3.898720 +step:2087 train loss:3.889436 +step:2088 train loss:3.890254 +step:2089 train loss:3.900438 +step:2090 train loss:3.840502 +step:2091 train loss:3.869638 +step:2092 train loss:3.875755 +step:2093 train loss:3.832319 +step:2094 train loss:3.914732 +step:2095 train loss:3.791857 +step:2096 train loss:3.901711 +step:2097 train loss:3.841563 +step:2098 train loss:3.820895 +step:2099 train loss:3.871753 +step:2100 train loss:3.875693 +step:2101 train loss:3.896776 +step:2102 train loss:3.869091 +step:2103 train loss:3.871055 +step:2104 train loss:3.876921 +step:2105 train loss:3.843421 +step:2106 train loss:3.896689 +step:2107 train loss:3.816322 +step:2108 train loss:3.835234 +step:2109 train loss:3.824059 +step:2110 train loss:3.843083 +step:2111 train loss:3.945272 +step:2112 train loss:3.819479 +step:2113 train loss:3.820162 +step:2114 train loss:3.870100 +step:2115 train loss:3.842476 +step:2116 train loss:3.882600 +step:2117 train loss:3.816400 +step:2118 train loss:3.804452 +step:2119 train loss:3.811954 +step:2120 train loss:3.811850 +step:2121 train loss:3.837316 +step:2122 train loss:3.825123 +step:2123 train loss:3.872083 +step:2124 train loss:3.880768 +step:2125 train loss:3.838337 +step:2126 train loss:3.861647 +step:2127 train loss:3.869729 +step:2128 train loss:3.845571 +step:2129 train loss:3.907238 +step:2130 train loss:3.894650 +step:2131 train loss:3.793375 +step:2132 train loss:3.825384 +step:2133 train loss:3.819771 +step:2134 train loss:3.887200 +step:2135 train loss:3.903984 +step:2136 train loss:4.014673 +step:2137 train loss:3.851767 +step:2138 train loss:3.837642 +step:2139 train loss:3.880829 +step:2140 train loss:3.815581 +step:2141 train loss:3.838720 +step:2142 train loss:3.803631 +step:2143 train loss:3.839545 +step:2144 train loss:3.918190 +step:2145 train loss:3.883185 +step:2146 train loss:3.815202 +step:2147 train loss:3.837005 +step:2148 train loss:3.871593 +step:2149 train loss:3.824603 +step:2150 train loss:3.893548 +step:2151 train loss:3.885997 +step:2152 train loss:3.826924 +step:2153 train loss:3.832396 +step:2154 train loss:3.803263 +step:2155 train loss:3.805534 +step:2156 train loss:3.942181 +step:2157 train loss:3.839842 +step:2158 train loss:3.866713 +step:2159 train loss:3.971195 +step:2160 train loss:3.859424 +step:2161 train loss:3.863077 +step:2162 train loss:3.926975 +step:2163 train loss:3.897759 +step:2164 train loss:3.924828 +step:2165 train loss:3.851477 +step:2166 train loss:3.819780 +step:2167 train loss:3.841496 +step:2168 train loss:3.828161 +step:2169 train loss:3.850069 +step:2170 train loss:3.881944 +step:2171 train loss:3.865950 +step:2172 train loss:3.884643 +step:2173 train loss:3.801059 +step:2174 train loss:3.857039 +step:2175 train loss:3.841761 +step:2176 train loss:3.793245 +step:2177 train loss:3.893722 +step:2178 train loss:3.819197 +step:2179 train loss:3.852254 +step:2180 train loss:3.850212 +step:2181 train loss:3.795515 +step:2182 train loss:3.881337 +step:2183 train loss:3.879883 +step:2184 train loss:3.874541 +step:2185 train loss:3.862408 +step:2186 train loss:3.814541 +step:2187 train loss:3.808466 +step:2188 train loss:3.866926 +step:2189 train loss:3.774533 +step:2190 train loss:3.830413 +step:2191 train loss:3.854456 +step:2192 train loss:3.921834 +step:2193 train loss:3.847742 +step:2194 train loss:3.843245 +step:2195 train loss:3.855713 +step:2196 train loss:3.832930 +step:2197 train loss:3.824718 +step:2198 train loss:3.837783 +step:2199 train loss:3.829626 +step:2200 train loss:3.861689 +step:2201 train loss:3.909091 +step:2202 train loss:3.860975 +step:2203 train loss:3.837120 +step:2204 train loss:3.863566 +step:2205 train loss:3.850047 +step:2206 train loss:3.878252 +step:2207 train loss:3.869816 +step:2208 train loss:3.810745 +step:2209 train loss:3.999521 +step:2210 train loss:3.853282 +step:2211 train loss:3.816285 +step:2212 train loss:3.924628 +step:2213 train loss:3.979028 +step:2214 train loss:3.905289 +step:2215 train loss:3.831154 +step:2216 train loss:3.840474 +step:2217 train loss:3.895198 +step:2218 train loss:3.834007 +step:2219 train loss:3.825993 +step:2220 train loss:3.867908 +step:2221 train loss:3.849915 +step:2222 train loss:3.893884 +step:2223 train loss:3.866144 +step:2224 train loss:3.836101 +step:2225 train loss:3.932394 +step:2226 train loss:3.870015 +step:2227 train loss:3.871691 +step:2228 train loss:3.903330 +step:2229 train loss:3.830848 +step:2230 train loss:3.783820 +step:2231 train loss:3.904896 +step:2232 train loss:3.888999 +step:2233 train loss:3.972094 +step:2234 train loss:3.830169 +step:2235 train loss:3.781409 +step:2236 train loss:3.710504 +step:2237 train loss:3.811169 +step:2238 train loss:3.851169 +step:2239 train loss:3.859692 +step:2240 train loss:3.813331 +step:2241 train loss:3.913110 +step:2242 train loss:3.830670 +step:2243 train loss:3.873827 +step:2244 train loss:3.844185 +step:2245 train loss:3.847755 +step:2246 train loss:3.853085 +step:2247 train loss:3.826250 +step:2248 train loss:3.802810 +step:2249 train loss:3.856493 +step:2250 validation loss:3.781624 +step:2250 train loss:3.840503 +step:2251 train loss:3.846649 +step:2252 train loss:3.893325 +step:2253 train loss:3.809671 +step:2254 train loss:3.899752 +step:2255 train loss:3.816758 +step:2256 train loss:3.876374 +step:2257 train loss:3.794059 +step:2258 train loss:3.862771 +step:2259 train loss:3.842975 +step:2260 train loss:3.834218 +step:2261 train loss:3.862815 +step:2262 train loss:3.823254 +step:2263 train loss:3.802732 +step:2264 train loss:3.872811 +step:2265 train loss:3.824091 +step:2266 train loss:3.826298 +step:2267 train loss:3.826272 +step:2268 train loss:3.823109 +step:2269 train loss:3.843310 +step:2270 train loss:3.841965 +step:2271 train loss:3.821311 +step:2272 train loss:3.803571 +step:2273 train loss:3.872080 +step:2274 train loss:3.824364 +step:2275 train loss:3.872188 +step:2276 train loss:3.816354 +step:2277 train loss:3.835951 +step:2278 train loss:3.851223 +step:2279 train loss:3.821086 +step:2280 train loss:3.774323 +step:2281 train loss:3.843895 +step:2282 train loss:3.754557 +step:2283 train loss:3.848830 +step:2284 train loss:3.767207 +step:2285 train loss:3.855550 +step:2286 train loss:3.820305 +step:2287 train loss:3.846547 +step:2288 train loss:3.802533 +step:2289 train loss:3.991982 +step:2290 train loss:4.163057 +step:2291 train loss:3.853675 +step:2292 train loss:3.853640 +step:2293 train loss:3.871964 +step:2294 train loss:3.817949 +step:2295 train loss:3.839893 +step:2296 train loss:3.808128 +step:2297 train loss:3.764695 +step:2298 train loss:3.865313 +step:2299 train loss:3.772861 +step:2300 train loss:3.768295 +step:2301 train loss:3.807659 +step:2302 train loss:3.866308 +step:2303 train loss:3.825308 +step:2304 train loss:3.834636 +step:2305 train loss:3.789121 +step:2306 train loss:3.809561 +step:2307 train loss:3.773602 +step:2308 train loss:3.787481 +step:2309 train loss:3.831348 +step:2310 train loss:3.851720 +step:2311 train loss:3.796935 +step:2312 train loss:3.780642 +step:2313 train loss:3.873144 +step:2314 train loss:3.785721 +step:2315 train loss:3.746054 +step:2316 train loss:3.746728 +step:2317 train loss:3.763295 +step:2318 train loss:3.866557 +step:2319 train loss:3.800877 +step:2320 train loss:3.779783 +step:2321 train loss:3.820509 +step:2322 train loss:3.846674 +step:2323 train loss:3.791973 +step:2324 train loss:3.843755 +step:2325 train loss:3.778522 +step:2326 train loss:3.794281 +step:2327 train loss:3.796246 +step:2328 train loss:3.792331 +step:2329 train loss:3.842805 +step:2330 train loss:3.827854 +step:2331 train loss:3.849449 +step:2332 train loss:3.765574 +step:2333 train loss:3.849308 +step:2334 train loss:4.019297 +step:2335 train loss:3.904616 +step:2336 train loss:3.794635 +step:2337 train loss:3.849868 +step:2338 train loss:3.789619 +step:2339 train loss:3.780899 +step:2340 train loss:3.772997 +step:2341 train loss:3.815563 +step:2342 train loss:3.765237 +step:2343 train loss:3.826984 +step:2344 train loss:3.834448 +step:2345 train loss:3.787821 +step:2346 train loss:3.747940 +step:2347 train loss:3.770967 +step:2348 train loss:3.818005 +step:2349 train loss:3.847855 +step:2350 train loss:3.796125 +step:2351 train loss:3.818617 +step:2352 train loss:3.869760 +step:2353 train loss:3.792302 +step:2354 train loss:3.777063 +step:2355 train loss:3.815831 +step:2356 train loss:3.795309 +step:2357 train loss:3.807761 +step:2358 train loss:3.791717 +step:2359 train loss:3.794878 +step:2360 train loss:3.765482 +step:2361 train loss:3.770152 +step:2362 train loss:3.746132 +step:2363 train loss:3.756091 +step:2364 train loss:3.833277 +step:2365 train loss:3.788466 +step:2366 train loss:3.854891 +step:2367 train loss:3.779817 +step:2368 train loss:3.775850 +step:2369 train loss:3.794439 +step:2370 train loss:3.826560 +step:2371 train loss:3.808837 +step:2372 train loss:3.830941 +step:2373 train loss:3.801861 +step:2374 train loss:3.785807 +step:2375 train loss:3.793925 +step:2376 train loss:3.787520 +step:2377 train loss:3.777741 +step:2378 train loss:3.748782 +step:2379 train loss:3.769480 +step:2380 train loss:3.781049 +step:2381 train loss:3.771287 +step:2382 train loss:3.780384 +step:2383 train loss:3.827512 +step:2384 train loss:3.721233 +step:2385 train loss:3.808565 +step:2386 train loss:3.805765 +step:2387 train loss:3.801031 +step:2388 train loss:3.777350 +step:2389 train loss:3.778651 +step:2390 train loss:3.813751 +step:2391 train loss:3.794657 +step:2392 train loss:3.790858 +step:2393 train loss:3.736696 +step:2394 train loss:3.836317 +step:2395 train loss:3.807705 +step:2396 train loss:3.808772 +step:2397 train loss:3.774455 +step:2398 train loss:3.778694 +step:2399 train loss:3.722368 +step:2400 train loss:3.742025 +step:2401 train loss:3.725706 +step:2402 train loss:3.751458 +step:2403 train loss:3.794082 +step:2404 train loss:3.801131 +step:2405 train loss:3.754731 +step:2406 train loss:3.754978 +step:2407 train loss:3.786469 +step:2408 train loss:3.863758 +step:2409 train loss:3.820686 +step:2410 train loss:3.849097 +step:2411 train loss:3.788218 +step:2412 train loss:3.798248 +step:2413 train loss:3.835899 +step:2414 train loss:3.723650 +step:2415 train loss:3.822379 +step:2416 train loss:3.801095 +step:2417 train loss:3.818621 +step:2418 train loss:3.847576 +step:2419 train loss:3.847928 +step:2420 train loss:3.714312 +step:2421 train loss:3.790131 +step:2422 train loss:3.869037 +step:2423 train loss:3.817704 +step:2424 train loss:3.805116 +step:2425 train loss:3.767840 +step:2426 train loss:3.776639 +step:2427 train loss:3.775949 +step:2428 train loss:3.759996 +step:2429 train loss:3.849127 +step:2430 train loss:3.777173 +step:2431 train loss:3.815404 +step:2432 train loss:3.759429 +step:2433 train loss:3.747861 +step:2434 train loss:3.781683 +step:2435 train loss:3.782069 +step:2436 train loss:3.841716 +step:2437 train loss:3.797554 +step:2438 train loss:3.832600 +step:2439 train loss:3.839846 +step:2440 train loss:3.783071 +step:2441 train loss:3.760879 +step:2442 train loss:3.779634 +step:2443 train loss:3.748652 +step:2444 train loss:3.739133 +step:2445 train loss:3.840046 +step:2446 train loss:3.823886 +step:2447 train loss:3.812494 +step:2448 train loss:3.832184 +step:2449 train loss:3.798492 +step:2450 train loss:3.735212 +step:2451 train loss:3.891407 +step:2452 train loss:3.789145 +step:2453 train loss:3.780728 +step:2454 train loss:3.754890 +step:2455 train loss:3.824883 +step:2456 train loss:3.775331 +step:2457 train loss:3.802061 +step:2458 train loss:3.855347 +step:2459 train loss:3.753905 +step:2460 train loss:3.779154 +step:2461 train loss:3.777156 +step:2462 train loss:3.804933 +step:2463 train loss:3.859895 +step:2464 train loss:3.808348 +step:2465 train loss:3.818705 +step:2466 train loss:3.816418 +step:2467 train loss:3.773733 +step:2468 train loss:3.780784 +step:2469 train loss:3.846597 +step:2470 train loss:3.794680 +step:2471 train loss:3.811225 +step:2472 train loss:3.768340 +step:2473 train loss:3.813468 +step:2474 train loss:3.793447 +step:2475 train loss:3.797806 +step:2476 train loss:3.767614 +step:2477 train loss:3.778467 +step:2478 train loss:3.771359 +step:2479 train loss:3.845093 +step:2480 train loss:3.784192 +step:2481 train loss:3.763840 +step:2482 train loss:3.752170 +step:2483 train loss:3.763130 +step:2484 train loss:3.819017 +step:2485 train loss:3.807489 +step:2486 train loss:3.769764 +step:2487 train loss:3.836783 +step:2488 train loss:3.781480 +step:2489 train loss:3.790649 +step:2490 train loss:3.721645 +step:2491 train loss:3.809482 +step:2492 train loss:3.800248 +step:2493 train loss:3.774484 +step:2494 train loss:3.871385 +step:2495 train loss:3.742867 +step:2496 train loss:3.745880 +step:2497 train loss:3.778744 +step:2498 train loss:3.760309 +step:2499 train loss:3.849975 +step:2500 validation loss:3.730253 total_sharp:1.1372e-02 L1_sharp:1.5569e-02 L2_sharp:1.3594e-03 L3_sharp:2.5265e-03 L4_sharp:2.8864e-03 L5_sharp:2.6404e-03 L6_sharp:3.3167e-03 L7_sharp:3.1911e-03 L8_sharp:4.1785e-03 L9_sharp:2.4516e-03 L10_sharp:1.5307e-03 L11_sharp:1.3666e-03 L12_sharp:2.5550e-03 total_fnorm:2.0380e+00 total_l1_linf:1.7799e+04 total_spectral:2.0380e+00 L1_fnorm:3.9092e-01 L2_fnorm:4.3057e-01 L3_fnorm:4.2586e-01 L4_fnorm:4.1502e-01 L5_fnorm:3.9950e-01 L6_fnorm:4.3195e-01 L7_fnorm:4.4820e-01 L8_fnorm:4.6387e-01 L9_fnorm:4.9134e-01 L10_fnorm:4.9880e-01 L11_fnorm:4.9750e-01 L12_fnorm:4.8257e-01 L1_l1linf:4.9573e-01 L2_l1linf:5.2232e-01 L3_l1linf:6.0543e-01 L4_l1linf:6.3951e-01 L5_l1linf:4.4873e-01 L6_l1linf:5.3595e-01 L7_l1linf:4.8802e-01 L8_l1linf:4.7883e-01 L9_l1linf:5.0317e-01 L10_l1linf:5.1694e-01 L11_l1linf:5.1100e-01 L12_l1linf:5.1657e-01 L1_spectral:6.1625e-02 L2_spectral:7.4354e-02 L3_spectral:7.1916e-02 L4_spectral:8.0494e-02 L5_spectral:6.4290e-02 L6_spectral:6.6439e-02 L7_spectral:5.5845e-02 L8_spectral:5.2545e-02 L9_spectral:5.4380e-02 L10_spectral:5.3496e-02 L11_spectral:5.4218e-02 L12_spectral:8.1145e-02 v_norm:2.0380e+00 cos_v_-g_hvp:1.1581e-01 g_hvp_norm:3.0438e-01 cos_v_-g_t:1.4348e-01 g_t_norm:2.4655e-01 hv_norm:4.3800e-01 cos_v_hv:5.2915e-02 hg_norm:5.2981e+00 cos_g_hg:3.1480e-01 v_par:1.6229e-02 v_perp:2.0379e+00 L1_cos_v_neg_g:2.0189e-01 L1_v_norm:3.9092e-01 L2_cos_v_neg_g:1.0678e-01 L2_v_norm:4.3057e-01 L3_cos_v_neg_g:1.0899e-01 L3_v_norm:4.2586e-01 L4_cos_v_neg_g:8.7777e-02 L4_v_norm:4.1502e-01 L5_cos_v_neg_g:8.2998e-02 L5_v_norm:3.9950e-01 L6_cos_v_neg_g:1.0809e-01 L6_v_norm:4.3195e-01 L7_cos_v_neg_g:1.2717e-01 L7_v_norm:4.4820e-01 L8_cos_v_neg_g:1.2812e-01 L8_v_norm:4.6387e-01 L9_cos_v_neg_g:1.2540e-01 L9_v_norm:4.9134e-01 L10_cos_v_neg_g:1.4517e-01 L10_v_norm:4.9880e-01 L11_cos_v_neg_g:1.6688e-01 L11_v_norm:4.9750e-01 L12_cos_v_neg_g:1.8692e-01 L12_v_norm:4.8257e-01 +step:2500 train loss:3.770953 +step:2501 train loss:3.803149 +step:2502 train loss:3.823049 +step:2503 train loss:3.758820 +step:2504 train loss:3.767859 +step:2505 train loss:3.855210 +step:2506 train loss:3.828152 +step:2507 train loss:3.737795 +step:2508 train loss:3.787779 +step:2509 train loss:3.786873 +step:2510 train loss:3.764244 +step:2511 train loss:3.729876 +step:2512 train loss:3.811992 +step:2513 train loss:3.787199 +step:2514 train loss:3.754514 +step:2515 train loss:3.772426 +step:2516 train loss:3.778466 +step:2517 train loss:3.761930 +step:2518 train loss:3.751897 +step:2519 train loss:3.789585 +step:2520 train loss:3.750485 +step:2521 train loss:3.791985 +step:2522 train loss:3.786198 +step:2523 train loss:3.734910 +step:2524 train loss:3.773665 +step:2525 train loss:3.830091 +step:2526 train loss:3.747527 +step:2527 train loss:3.814938 +step:2528 train loss:3.883541 +step:2529 train loss:3.731757 +step:2530 train loss:3.760335 +step:2531 train loss:3.751934 +step:2532 train loss:3.841484 +step:2533 train loss:3.713005 +step:2534 train loss:3.718640 +step:2535 train loss:3.760106 +step:2536 train loss:3.725468 +step:2537 train loss:3.753695 +step:2538 train loss:3.795471 +step:2539 train loss:3.699424 +step:2540 train loss:3.779336 +step:2541 train loss:3.787584 +step:2542 train loss:3.817425 +step:2543 train loss:3.729632 +step:2544 train loss:3.822855 +step:2545 train loss:3.754735 +step:2546 train loss:3.778567 +step:2547 train loss:3.773146 +step:2548 train loss:3.796919 +step:2549 train loss:3.786046 +step:2550 train loss:3.787972 +step:2551 train loss:3.782319 +step:2552 train loss:3.827831 +step:2553 train loss:3.835483 +step:2554 train loss:3.790771 +step:2555 train loss:3.764441 +step:2556 train loss:3.773423 +step:2557 train loss:3.761335 +step:2558 train loss:3.788704 +step:2559 train loss:3.786916 +step:2560 train loss:3.821907 +step:2561 train loss:3.735295 +step:2562 train loss:3.783320 +step:2563 train loss:3.769436 +step:2564 train loss:3.732393 +step:2565 train loss:3.812418 +step:2566 train loss:3.781595 +step:2567 train loss:3.711978 +step:2568 train loss:3.797547 +step:2569 train loss:3.807628 +step:2570 train loss:3.763679 +step:2571 train loss:3.766348 +step:2572 train loss:3.822440 +step:2573 train loss:3.788836 +step:2574 train loss:3.709115 +step:2575 train loss:3.778003 +step:2576 train loss:3.757888 +step:2577 train loss:3.719491 +step:2578 train loss:3.905821 +step:2579 train loss:3.741212 +step:2580 train loss:3.758368 +step:2581 train loss:3.788038 +step:2582 train loss:3.758821 +step:2583 train loss:3.844645 +step:2584 train loss:3.788764 +step:2585 train loss:3.743028 +step:2586 train loss:3.732235 +step:2587 train loss:3.762330 +step:2588 train loss:3.762951 +step:2589 train loss:3.821860 +step:2590 train loss:3.692925 +step:2591 train loss:3.771307 +step:2592 train loss:3.749086 +step:2593 train loss:3.718096 +step:2594 train loss:3.763714 +step:2595 train loss:3.817140 +step:2596 train loss:3.721056 +step:2597 train loss:3.810374 +step:2598 train loss:3.780008 +step:2599 train loss:3.821987 +step:2600 train loss:3.710152 +step:2601 train loss:3.762648 +step:2602 train loss:3.752718 +step:2603 train loss:3.778782 +step:2604 train loss:3.848660 +step:2605 train loss:3.804671 +step:2606 train loss:3.806477 +step:2607 train loss:3.802946 +step:2608 train loss:3.706999 +step:2609 train loss:3.772309 +step:2610 train loss:3.744488 +step:2611 train loss:3.797129 +step:2612 train loss:3.837734 +step:2613 train loss:3.780564 +step:2614 train loss:3.800357 +step:2615 train loss:3.754623 +step:2616 train loss:3.665393 +step:2617 train loss:3.736669 +step:2618 train loss:3.714214 +step:2619 train loss:3.750367 +step:2620 train loss:3.778819 +step:2621 train loss:3.891822 +step:2622 train loss:3.841890 +step:2623 train loss:3.864442 +step:2624 train loss:3.783843 +step:2625 train loss:3.811954 +step:2626 train loss:3.754864 +step:2627 train loss:3.724300 +step:2628 train loss:3.760908 +step:2629 train loss:3.776779 +step:2630 train loss:3.739890 +step:2631 train loss:3.767902 +step:2632 train loss:3.778509 +step:2633 train loss:3.730878 +step:2634 train loss:3.762351 +step:2635 train loss:3.809901 +step:2636 train loss:3.775859 +step:2637 train loss:3.767334 +step:2638 train loss:3.748872 +step:2639 train loss:3.809067 +step:2640 train loss:3.739061 +step:2641 train loss:3.834290 +step:2642 train loss:3.740902 +step:2643 train loss:3.757442 +step:2644 train loss:3.743905 +step:2645 train loss:3.775740 +step:2646 train loss:3.745087 +step:2647 train loss:3.763298 +step:2648 train loss:3.755867 +step:2649 train loss:3.771451 +step:2650 train loss:3.739431 +step:2651 train loss:3.772715 +step:2652 train loss:3.694758 +step:2653 train loss:3.812549 +step:2654 train loss:3.745818 +step:2655 train loss:3.835098 +step:2656 train loss:3.767563 +step:2657 train loss:3.898924 +step:2658 train loss:3.901029 +step:2659 train loss:3.764002 +step:2660 train loss:3.816608 +step:2661 train loss:3.724172 +step:2662 train loss:3.735698 +step:2663 train loss:3.798710 +step:2664 train loss:3.729946 +step:2665 train loss:3.789317 +step:2666 train loss:3.765415 +step:2667 train loss:3.767150 +step:2668 train loss:3.733740 +step:2669 train loss:3.799042 +step:2670 train loss:3.753425 +step:2671 train loss:3.731236 +step:2672 train loss:3.770919 +step:2673 train loss:3.774608 +step:2674 train loss:3.794861 +step:2675 train loss:3.770900 +step:2676 train loss:3.765064 +step:2677 train loss:3.762466 +step:2678 train loss:3.731608 +step:2679 train loss:3.838542 +step:2680 train loss:3.790736 +step:2681 train loss:3.691715 +step:2682 train loss:3.774354 +step:2683 train loss:3.748390 +step:2684 train loss:3.774386 +step:2685 train loss:3.777896 +step:2686 train loss:3.723748 +step:2687 train loss:3.779234 +step:2688 train loss:3.753962 +step:2689 train loss:3.764307 +step:2690 train loss:3.810955 +step:2691 train loss:3.799560 +step:2692 train loss:3.780229 +step:2693 train loss:3.758885 +step:2694 train loss:3.753858 +step:2695 train loss:3.783335 +step:2696 train loss:3.767688 +step:2697 train loss:3.799154 +step:2698 train loss:3.782451 +step:2699 train loss:3.757642 +step:2700 train loss:3.811713 +step:2701 train loss:3.766483 +step:2702 train loss:3.759811 +step:2703 train loss:3.732872 +step:2704 train loss:3.753422 +step:2705 train loss:3.788629 +step:2706 train loss:3.767878 +step:2707 train loss:3.794375 +step:2708 train loss:3.691653 +step:2709 train loss:3.790256 +step:2710 train loss:3.755311 +step:2711 train loss:3.807060 +step:2712 train loss:3.762964 +step:2713 train loss:3.719900 +step:2714 train loss:3.842328 +step:2715 train loss:3.783542 +step:2716 train loss:3.768677 +step:2717 train loss:3.772641 +step:2718 train loss:3.814142 +step:2719 train loss:3.812343 +step:2720 train loss:3.826036 +step:2721 train loss:3.754277 +step:2722 train loss:3.741764 +step:2723 train loss:3.772219 +step:2724 train loss:3.790949 +step:2725 train loss:3.792654 +step:2726 train loss:3.767356 +step:2727 train loss:3.784042 +step:2728 train loss:3.729542 +step:2729 train loss:3.770103 +step:2730 train loss:3.768466 +step:2731 train loss:3.778797 +step:2732 train loss:3.754706 +step:2733 train loss:3.826186 +step:2734 train loss:3.759187 +step:2735 train loss:3.768499 +step:2736 train loss:3.771384 +step:2737 train loss:3.771330 +step:2738 train loss:3.737968 +step:2739 train loss:3.771752 +step:2740 train loss:3.803105 +step:2741 train loss:3.881390 +step:2742 train loss:3.936347 +step:2743 train loss:3.805349 +step:2744 train loss:3.777158 +step:2745 train loss:3.832314 +step:2746 train loss:3.761278 +step:2747 train loss:3.767809 +step:2748 train loss:3.772305 +step:2749 train loss:3.786294 +step:2750 validation loss:3.702836 +step:2750 train loss:3.879588 +step:2751 train loss:3.738893 +step:2752 train loss:3.776371 +step:2753 train loss:3.765010 +step:2754 train loss:3.758058 +step:2755 train loss:3.756034 +step:2756 train loss:3.756438 +step:2757 train loss:3.788133 +step:2758 train loss:3.723970 +step:2759 train loss:3.732083 +step:2760 train loss:3.744483 +step:2761 train loss:3.740934 +step:2762 train loss:3.729142 +step:2763 train loss:3.724004 +step:2764 train loss:3.745591 +step:2765 train loss:3.738028 +step:2766 train loss:3.830093 +step:2767 train loss:3.782883 +step:2768 train loss:3.792638 +step:2769 train loss:3.806018 +step:2770 train loss:3.769148 +step:2771 train loss:3.771946 +step:2772 train loss:3.783808 +step:2773 train loss:3.747041 +step:2774 train loss:3.816496 +step:2775 train loss:3.719205 +step:2776 train loss:3.714318 +step:2777 train loss:3.731644 +step:2778 train loss:3.678358 +step:2779 train loss:3.757376 +step:2780 train loss:3.720257 +step:2781 train loss:3.772954 +step:2782 train loss:3.780546 +step:2783 train loss:3.803928 +step:2784 train loss:3.697608 +step:2785 train loss:3.733532 +step:2786 train loss:3.754085 +step:2787 train loss:3.819104 +step:2788 train loss:3.781026 +step:2789 train loss:3.886736 +step:2790 train loss:3.736262 +step:2791 train loss:3.778010 +step:2792 train loss:3.754845 +step:2793 train loss:3.816789 +step:2794 train loss:3.717715 +step:2795 train loss:3.740007 +step:2796 train loss:3.773003 +step:2797 train loss:3.866281 +step:2798 train loss:3.753433 +step:2799 train loss:3.743805 +step:2800 train loss:3.762244 +step:2801 train loss:3.775718 +step:2802 train loss:3.891161 +step:2803 train loss:3.790439 +step:2804 train loss:3.759403 +step:2805 train loss:3.759120 +step:2806 train loss:3.740671 +step:2807 train loss:3.736969 +step:2808 train loss:3.760799 +step:2809 train loss:3.763937 +step:2810 train loss:3.749629 +step:2811 train loss:3.766445 +step:2812 train loss:3.830788 +step:2813 train loss:3.749856 +step:2814 train loss:3.772988 +step:2815 train loss:3.749532 +step:2816 train loss:3.810663 +step:2817 train loss:3.798554 +step:2818 train loss:3.759699 +step:2819 train loss:3.760010 +step:2820 train loss:3.755830 +step:2821 train loss:3.768850 +step:2822 train loss:3.752140 +step:2823 train loss:3.830815 +step:2824 train loss:3.758954 +step:2825 train loss:3.719871 +step:2826 train loss:3.765411 +step:2827 train loss:3.728885 +step:2828 train loss:3.725499 +step:2829 train loss:3.925830 +step:2830 train loss:3.722824 +step:2831 train loss:3.748538 +step:2832 train loss:3.718760 +step:2833 train loss:3.744111 +step:2834 train loss:3.726155 +step:2835 train loss:3.796343 +step:2836 train loss:3.720205 +step:2837 train loss:3.768765 +step:2838 train loss:3.737704 +step:2839 train loss:3.736530 +step:2840 train loss:3.712193 +step:2841 train loss:3.789577 +step:2842 train loss:3.693471 +step:2843 train loss:3.762792 +step:2844 train loss:3.685706 +step:2845 train loss:3.701035 +step:2846 train loss:3.767156 +step:2847 train loss:3.999339 +step:2848 train loss:3.762410 +step:2849 train loss:3.835781 +step:2850 train loss:3.727925 +step:2851 train loss:3.751732 +step:2852 train loss:3.761606 +step:2853 train loss:3.781988 +step:2854 train loss:3.767971 +step:2855 train loss:3.876940 +step:2856 train loss:3.843711 +step:2857 train loss:3.711750 +step:2858 train loss:3.739797 +step:2859 train loss:3.741753 +step:2860 train loss:3.717335 +step:2861 train loss:3.773078 +step:2862 train loss:3.724977 +step:2863 train loss:3.776565 +step:2864 train loss:3.748777 +step:2865 train loss:3.716506 +step:2866 train loss:3.683346 +step:2867 train loss:3.697297 +step:2868 train loss:3.782773 +step:2869 train loss:3.732818 +step:2870 train loss:3.754597 +step:2871 train loss:3.742404 +step:2872 train loss:3.813394 +step:2873 train loss:3.722085 +step:2874 train loss:3.728559 +step:2875 train loss:3.673743 +step:2876 train loss:3.759817 +step:2877 train loss:3.702331 +step:2878 train loss:3.716451 +step:2879 train loss:3.737497 +step:2880 train loss:3.741693 +step:2881 train loss:3.765341 +step:2882 train loss:3.764933 +step:2883 train loss:3.725783 +step:2884 train loss:3.769623 +step:2885 train loss:3.731986 +step:2886 train loss:3.717015 +step:2887 train loss:3.725391 +step:2888 train loss:3.760839 +step:2889 train loss:3.781974 +step:2890 train loss:3.830115 +step:2891 train loss:3.770208 +step:2892 train loss:3.652898 +step:2893 train loss:3.732260 +step:2894 train loss:3.749480 +step:2895 train loss:3.755924 +step:2896 train loss:3.727093 +step:2897 train loss:3.729671 +step:2898 train loss:3.693807 +step:2899 train loss:3.737828 +step:2900 train loss:3.774995 +step:2901 train loss:3.773842 +step:2902 train loss:3.732087 +step:2903 train loss:3.697129 +step:2904 train loss:3.721833 +step:2905 train loss:3.755889 +step:2906 train loss:3.671937 +step:2907 train loss:3.709044 +step:2908 train loss:3.805769 +step:2909 train loss:3.775538 +step:2910 train loss:3.778627 +step:2911 train loss:3.730530 +step:2912 train loss:3.711762 +step:2913 train loss:3.732802 +step:2914 train loss:3.738216 +step:2915 train loss:3.733280 +step:2916 train loss:3.736496 +step:2917 train loss:3.710760 +step:2918 train loss:3.778084 +step:2919 train loss:3.690895 +step:2920 train loss:3.749164 +step:2921 train loss:3.722055 +step:2922 train loss:3.758135 +step:2923 train loss:3.895055 +step:2924 train loss:3.777530 +step:2925 train loss:3.734270 +step:2926 train loss:3.727188 +step:2927 train loss:3.750405 +step:2928 train loss:3.678100 +step:2929 train loss:3.820968 +step:2930 train loss:3.747750 +step:2931 train loss:3.773814 +step:2932 train loss:3.781544 +step:2933 train loss:3.785728 +step:2934 train loss:3.761842 +step:2935 train loss:3.741906 +step:2936 train loss:3.700418 +step:2937 train loss:3.740815 +step:2938 train loss:3.742676 +step:2939 train loss:3.693021 +step:2940 train loss:3.758444 +step:2941 train loss:3.734437 +step:2942 train loss:3.735428 +step:2943 train loss:3.751279 +step:2944 train loss:3.702242 +step:2945 train loss:3.722060 +step:2946 train loss:3.751921 +step:2947 train loss:3.743332 +step:2948 train loss:3.708343 +step:2949 train loss:3.728161 +step:2950 train loss:3.732839 +step:2951 train loss:3.758193 +step:2952 train loss:3.760312 +step:2953 train loss:3.787477 +step:2954 train loss:3.754814 +step:2955 train loss:3.734249 +step:2956 train loss:3.727356 +step:2957 train loss:3.721864 +step:2958 train loss:3.678192 +step:2959 train loss:3.757148 +step:2960 train loss:3.723922 +step:2961 train loss:3.744708 +step:2962 train loss:3.736199 +step:2963 train loss:3.764488 +step:2964 train loss:3.700034 +step:2965 train loss:3.764575 +step:2966 train loss:3.752229 +step:2967 train loss:3.740194 +step:2968 train loss:3.685716 +step:2969 train loss:3.741363 +step:2970 train loss:3.708296 +step:2971 train loss:3.724443 +step:2972 train loss:3.675930 +step:2973 train loss:3.732353 +step:2974 train loss:3.693090 +step:2975 train loss:3.684580 +step:2976 train loss:3.680012 +step:2977 train loss:3.729112 +step:2978 train loss:3.705201 +step:2979 train loss:3.713314 +step:2980 train loss:3.771156 +step:2981 train loss:3.733754 +step:2982 train loss:3.764341 +step:2983 train loss:3.750134 +step:2984 train loss:3.738462 +step:2985 train loss:3.729301 +step:2986 train loss:3.740896 +step:2987 train loss:3.736942 +step:2988 train loss:3.708226 +step:2989 train loss:3.780111 +step:2990 train loss:3.731807 +step:2991 train loss:3.728203 +step:2992 train loss:3.743383 +step:2993 train loss:3.741636 +step:2994 train loss:3.704960 +step:2995 train loss:3.717565 +step:2996 train loss:3.745001 +step:2997 train loss:3.734780 +step:2998 train loss:3.712502 +step:2999 train loss:3.728427 +step:3000 validation loss:3.668613 total_sharp:7.9294e-03 L1_sharp:8.7838e-03 L2_sharp:9.7200e-04 L3_sharp:1.7599e-03 L4_sharp:1.9327e-03 L5_sharp:1.5619e-03 L6_sharp:1.6411e-03 L7_sharp:1.7991e-03 L8_sharp:2.3065e-03 L9_sharp:1.8252e-03 L10_sharp:1.2883e-03 L11_sharp:1.1699e-03 L12_sharp:3.2332e-03 total_fnorm:2.1202e+00 total_l1_linf:1.8715e+04 total_spectral:2.1202e+00 L1_fnorm:4.4801e-01 L2_fnorm:4.6712e-01 L3_fnorm:4.6132e-01 L4_fnorm:4.6165e-01 L5_fnorm:4.5823e-01 L6_fnorm:4.7154e-01 L7_fnorm:4.8299e-01 L8_fnorm:4.8231e-01 L9_fnorm:4.9580e-01 L10_fnorm:5.0888e-01 L11_fnorm:5.1192e-01 L12_fnorm:4.9219e-01 L1_l1linf:5.0328e-01 L2_l1linf:5.3787e-01 L3_l1linf:5.4863e-01 L4_l1linf:5.5551e-01 L5_l1linf:5.2014e-01 L6_l1linf:5.8519e-01 L7_l1linf:5.6761e-01 L8_l1linf:5.0863e-01 L9_l1linf:5.2893e-01 L10_l1linf:5.1269e-01 L11_l1linf:5.2979e-01 L12_l1linf:5.2730e-01 L1_spectral:7.8467e-02 L2_spectral:7.4538e-02 L3_spectral:7.1756e-02 L4_spectral:7.7838e-02 L5_spectral:6.4161e-02 L6_spectral:7.1591e-02 L7_spectral:5.6817e-02 L8_spectral:5.0191e-02 L9_spectral:4.8871e-02 L10_spectral:4.8729e-02 L11_spectral:5.1451e-02 L12_spectral:8.2177e-02 v_norm:2.1202e+00 cos_v_-g_hvp:1.0545e-01 g_hvp_norm:2.8288e-01 cos_v_-g_t:1.3374e-01 g_t_norm:2.2391e-01 hv_norm:3.6957e-01 cos_v_hv:4.5491e-02 hg_norm:2.5338e+00 cos_g_hg:5.7903e-01 v_par:1.5452e-02 v_perp:2.1202e+00 L1_cos_v_neg_g:1.7923e-01 L1_v_norm:4.4801e-01 L2_cos_v_neg_g:9.4748e-02 L2_v_norm:4.6712e-01 L3_cos_v_neg_g:9.1391e-02 L3_v_norm:4.6132e-01 L4_cos_v_neg_g:8.9818e-02 L4_v_norm:4.6165e-01 L5_cos_v_neg_g:7.4015e-02 L5_v_norm:4.5823e-01 L6_cos_v_neg_g:9.0581e-02 L6_v_norm:4.7154e-01 L7_cos_v_neg_g:1.0832e-01 L7_v_norm:4.8299e-01 L8_cos_v_neg_g:1.0814e-01 L8_v_norm:4.8231e-01 L9_cos_v_neg_g:1.0470e-01 L9_v_norm:4.9580e-01 L10_cos_v_neg_g:1.2433e-01 L10_v_norm:5.0888e-01 L11_cos_v_neg_g:1.4547e-01 L11_v_norm:5.1192e-01 L12_cos_v_neg_g:1.8108e-01 L12_v_norm:4.9219e-01 +step:3000 train loss:3.699570 +step:3001 train loss:3.768927 +step:3002 train loss:3.753680 +step:3003 train loss:3.779680 +step:3004 train loss:3.721129 +step:3005 train loss:3.675683 +step:3006 train loss:3.723891 +step:3007 train loss:3.684935 +step:3008 train loss:3.770362 +step:3009 train loss:3.745430 +step:3010 train loss:3.751669 +step:3011 train loss:3.767792 +step:3012 train loss:3.762746 +step:3013 train loss:3.747464 +step:3014 train loss:3.681740 +step:3015 train loss:3.756490 +step:3016 train loss:3.676946 +step:3017 train loss:3.679694 +step:3018 train loss:3.714003 +step:3019 train loss:3.751283 +step:3020 train loss:3.698468 +step:3021 train loss:3.745820 +step:3022 train loss:3.717535 +step:3023 train loss:3.764775 +step:3024 train loss:3.720975 +step:3025 train loss:3.690281 +step:3026 train loss:3.737886 +step:3027 train loss:3.733302 +step:3028 train loss:3.743765 +step:3029 train loss:3.773462 +step:3030 train loss:3.714764 +step:3031 train loss:3.745507 +step:3032 train loss:3.732504 +step:3033 train loss:3.651247 +step:3034 train loss:3.732949 +step:3035 train loss:3.710475 +step:3036 train loss:3.673273 +step:3037 train loss:3.760994 +step:3038 train loss:3.727094 +step:3039 train loss:3.751263 +step:3040 train loss:3.721057 +step:3041 train loss:3.733691 +step:3042 train loss:3.718967 +step:3043 train loss:3.750888 +step:3044 train loss:3.755616 +step:3045 train loss:3.730919 +step:3046 train loss:3.665832 +step:3047 train loss:3.788133 +step:3048 train loss:3.765997 +step:3049 train loss:3.715200 +step:3050 train loss:3.772181 +step:3051 train loss:3.713658 +step:3052 train loss:3.600742 +step:3053 train loss:3.737061 +step:3054 train loss:3.704869 +step:3055 train loss:3.671984 +step:3056 train loss:3.719752 +step:3057 train loss:3.668865 +step:3058 train loss:3.745478 +step:3059 train loss:3.643158 +step:3060 train loss:3.780478 +step:3061 train loss:3.792807 +step:3062 train loss:3.698907 +step:3063 train loss:3.667602 +step:3064 train loss:3.680791 +step:3065 train loss:3.649596 +step:3066 train loss:3.829474 +step:3067 train loss:3.698881 +step:3068 train loss:3.683814 +step:3069 train loss:3.722163 +step:3070 train loss:3.727350 +step:3071 train loss:3.675231 +step:3072 train loss:3.685591 +step:3073 train loss:3.690532 +step:3074 train loss:3.684219 +step:3075 train loss:3.689929 +step:3076 train loss:3.641964 +step:3077 train loss:3.645083 +step:3078 train loss:3.727853 +step:3079 train loss:3.640677 +step:3080 train loss:3.714738 +step:3081 train loss:3.674491 +step:3082 train loss:3.619318 +step:3083 train loss:3.673414 +step:3084 train loss:3.681318 +step:3085 train loss:3.718511 +step:3086 train loss:3.753751 +step:3087 train loss:3.658218 +step:3088 train loss:3.690392 +step:3089 train loss:3.680011 +step:3090 train loss:3.691083 +step:3091 train loss:3.697015 +step:3092 train loss:3.774107 +step:3093 train loss:3.661458 +step:3094 train loss:3.682485 +step:3095 train loss:3.754038 +step:3096 train loss:3.698134 +step:3097 train loss:3.717597 +step:3098 train loss:3.686351 +step:3099 train loss:3.681046 +step:3100 train loss:3.737618 +step:3101 train loss:3.672223 +step:3102 train loss:3.671397 +step:3103 train loss:3.677029 +step:3104 train loss:3.686588 +step:3105 train loss:3.709366 +step:3106 train loss:3.679690 +step:3107 train loss:3.728790 +step:3108 train loss:3.629489 +step:3109 train loss:3.706849 +step:3110 train loss:3.724051 +step:3111 train loss:3.722279 +step:3112 train loss:3.754159 +step:3113 train loss:3.700739 +step:3114 train loss:3.699976 +step:3115 train loss:3.739868 +step:3116 train loss:3.713621 +step:3117 train loss:3.660091 +step:3118 train loss:3.639109 +step:3119 train loss:3.643996 +step:3120 train loss:3.790803 +step:3121 train loss:3.704767 +step:3122 train loss:3.679848 +step:3123 train loss:3.689552 +step:3124 train loss:3.601573 +step:3125 train loss:3.666997 +step:3126 train loss:3.662035 +step:3127 train loss:3.683138 +step:3128 train loss:3.679376 +step:3129 train loss:3.669596 +step:3130 train loss:3.642185 +step:3131 train loss:3.737955 +step:3132 train loss:3.740978 +step:3133 train loss:3.675470 +step:3134 train loss:3.679388 +step:3135 train loss:3.645218 +step:3136 train loss:3.701432 +step:3137 train loss:3.629423 +step:3138 train loss:3.723171 +step:3139 train loss:3.731012 +step:3140 train loss:3.727960 +step:3141 train loss:3.730051 +step:3142 train loss:3.685797 +step:3143 train loss:3.678099 +step:3144 train loss:3.267968 +step:3145 train loss:4.225054 +step:3146 train loss:3.696061 +step:3147 train loss:3.748590 +step:3148 train loss:3.722583 +step:3149 train loss:3.724030 +step:3150 train loss:3.725534 +step:3151 train loss:3.715235 +step:3152 train loss:3.682498 +step:3153 train loss:3.701254 +step:3154 train loss:3.708912 +step:3155 train loss:3.683647 +step:3156 train loss:3.717352 +step:3157 train loss:3.681675 +step:3158 train loss:3.763638 +step:3159 train loss:3.737351 +step:3160 train loss:3.691704 +step:3161 train loss:3.725698 +step:3162 train loss:3.723017 +step:3163 train loss:3.677540 +step:3164 train loss:3.733515 +step:3165 train loss:3.754992 +step:3166 train loss:3.677945 +step:3167 train loss:3.774659 +step:3168 train loss:3.651228 +step:3169 train loss:3.670305 +step:3170 train loss:3.792117 +step:3171 train loss:3.645350 +step:3172 train loss:3.729733 +step:3173 train loss:3.722473 +step:3174 train loss:3.640671 +step:3175 train loss:3.644667 +step:3176 train loss:3.760525 +step:3177 train loss:3.634090 +step:3178 train loss:3.684438 +step:3179 train loss:3.646862 +step:3180 train loss:3.655820 +step:3181 train loss:3.693368 +step:3182 train loss:3.756196 +step:3183 train loss:3.642359 +step:3184 train loss:3.664393 +step:3185 train loss:3.664603 +step:3186 train loss:3.691047 +step:3187 train loss:3.649609 +step:3188 train loss:3.621762 +step:3189 train loss:3.646251 +step:3190 train loss:3.636602 +step:3191 train loss:3.687268 +step:3192 train loss:3.662860 +step:3193 train loss:3.658838 +step:3194 train loss:3.670843 +step:3195 train loss:3.729691 +step:3196 train loss:3.674674 +step:3197 train loss:3.685728 +step:3198 train loss:3.668517 +step:3199 train loss:3.621793 +step:3200 train loss:3.717735 +step:3201 train loss:3.654354 +step:3202 train loss:3.672857 +step:3203 train loss:3.609506 +step:3204 train loss:3.657885 +step:3205 train loss:3.678120 +step:3206 train loss:3.665071 +step:3207 train loss:3.619086 +step:3208 train loss:3.675344 +step:3209 train loss:3.635478 +step:3210 train loss:3.655915 +step:3211 train loss:3.692896 +step:3212 train loss:3.649151 +step:3213 train loss:3.628771 +step:3214 train loss:3.755564 +step:3215 train loss:3.646708 +step:3216 train loss:3.656273 +step:3217 train loss:3.722031 +step:3218 train loss:3.666981 +step:3219 train loss:3.656685 +step:3220 train loss:3.658597 +step:3221 train loss:3.655082 +step:3222 train loss:3.660875 +step:3223 train loss:3.651312 +step:3224 train loss:3.713462 +step:3225 train loss:3.689161 +step:3226 train loss:3.660261 +step:3227 train loss:3.731434 +step:3228 train loss:3.747952 +step:3229 train loss:3.675968 +step:3230 train loss:3.656363 +step:3231 train loss:3.679057 +step:3232 train loss:3.696540 +step:3233 train loss:3.737037 +step:3234 train loss:3.685723 +step:3235 train loss:3.691866 +step:3236 train loss:3.767374 +step:3237 train loss:3.653141 +step:3238 train loss:3.757072 +step:3239 train loss:3.670925 +step:3240 train loss:3.751372 +step:3241 train loss:3.891512 +step:3242 train loss:3.692230 +step:3243 train loss:3.799179 +step:3244 train loss:3.686352 +step:3245 train loss:3.680862 +step:3246 train loss:3.672715 +step:3247 train loss:3.667404 +step:3248 train loss:3.716093 +step:3249 train loss:3.684846 +step:3250 validation loss:3.660418 +step:3250 train loss:3.687005 +step:3251 train loss:3.718984 +step:3252 train loss:3.687623 +step:3253 train loss:3.671983 +step:3254 train loss:3.714167 +step:3255 train loss:3.681136 +step:3256 train loss:3.740191 +step:3257 train loss:3.638345 +step:3258 train loss:3.657149 +step:3259 train loss:3.661100 +step:3260 train loss:3.652560 +step:3261 train loss:3.685484 +step:3262 train loss:3.709806 +step:3263 train loss:3.713278 +step:3264 train loss:3.681238 +step:3265 train loss:3.694716 +step:3266 train loss:3.678870 +step:3267 train loss:3.698807 +step:3268 train loss:3.676199 +step:3269 train loss:3.693157 +step:3270 train loss:3.709198 +step:3271 train loss:3.659151 +step:3272 train loss:3.677933 +step:3273 train loss:3.664383 +step:3274 train loss:3.725416 +step:3275 train loss:3.685982 +step:3276 train loss:3.691940 +step:3277 train loss:3.733060 +step:3278 train loss:3.720457 +step:3279 train loss:3.640627 +step:3280 train loss:3.698743 +step:3281 train loss:3.668880 +step:3282 train loss:3.650141 +step:3283 train loss:3.674471 +step:3284 train loss:3.738857 +step:3285 train loss:3.699703 +step:3286 train loss:3.646607 +step:3287 train loss:3.667476 +step:3288 train loss:3.661867 +step:3289 train loss:3.743427 +step:3290 train loss:3.670264 +step:3291 train loss:3.648232 +step:3292 train loss:3.721358 +step:3293 train loss:3.662155 +step:3294 train loss:3.692795 +step:3295 train loss:3.859241 +step:3296 train loss:3.726254 +step:3297 train loss:3.703067 +step:3298 train loss:3.724130 +step:3299 train loss:3.685586 +step:3300 train loss:3.650152 +step:3301 train loss:3.708259 +step:3302 train loss:3.637959 +step:3303 train loss:3.645467 +step:3304 train loss:3.731768 +step:3305 train loss:3.646519 +step:3306 train loss:3.635949 +step:3307 train loss:3.660278 +step:3308 train loss:3.611716 +step:3309 train loss:3.769263 +step:3310 train loss:3.719052 +step:3311 train loss:3.756354 +step:3312 train loss:3.684542 +step:3313 train loss:3.691682 +step:3314 train loss:3.674223 +step:3315 train loss:3.655214 +step:3316 train loss:3.707469 +step:3317 train loss:3.644115 +step:3318 train loss:3.608518 +step:3319 train loss:3.742682 +step:3320 train loss:3.832727 +step:3321 train loss:3.742665 +step:3322 train loss:3.976518 +step:3323 train loss:3.920979 +step:3324 train loss:3.848682 +step:3325 train loss:3.867235 +step:3326 train loss:3.862287 +step:3327 train loss:3.874254 +step:3328 train loss:3.752836 +step:3329 train loss:3.822018 +step:3330 train loss:3.753267 +step:3331 train loss:3.854283 +step:3332 train loss:3.777718 +step:3333 train loss:3.781901 +step:3334 train loss:3.762315 +step:3335 train loss:3.743311 +step:3336 train loss:3.747487 +step:3337 train loss:3.766837 +step:3338 train loss:3.690027 +step:3339 train loss:3.763523 +step:3340 train loss:3.673119 +step:3341 train loss:3.693179 +step:3342 train loss:3.749381 +step:3343 train loss:3.713846 +step:3344 train loss:3.711919 +step:3345 train loss:3.741993 +step:3346 train loss:3.669014 +step:3347 train loss:3.699794 +step:3348 train loss:3.738281 +step:3349 train loss:3.664332 +step:3350 train loss:3.670178 +step:3351 train loss:3.690352 +step:3352 train loss:3.754074 +step:3353 train loss:3.706256 +step:3354 train loss:3.670910 +step:3355 train loss:3.662411 +step:3356 train loss:3.669452 +step:3357 train loss:3.678160 +step:3358 train loss:3.712064 +step:3359 train loss:3.686888 +step:3360 train loss:3.735754 +step:3361 train loss:3.688991 +step:3362 train loss:3.660397 +step:3363 train loss:3.782782 +step:3364 train loss:3.714533 +step:3365 train loss:3.644509 +step:3366 train loss:3.735183 +step:3367 train loss:3.670135 +step:3368 train loss:3.669755 +step:3369 train loss:3.678179 +step:3370 train loss:3.705234 +step:3371 train loss:3.692286 +step:3372 train loss:3.691687 +step:3373 train loss:3.677817 +step:3374 train loss:3.735229 +step:3375 train loss:3.835282 +step:3376 train loss:3.716419 +step:3377 train loss:3.796804 +step:3378 train loss:3.686008 +step:3379 train loss:3.672794 +step:3380 train loss:3.706030 +step:3381 train loss:3.728633 +step:3382 train loss:3.702218 +step:3383 train loss:3.642467 +step:3384 train loss:3.669463 +step:3385 train loss:3.691204 +step:3386 train loss:3.635669 +step:3387 train loss:3.704282 +step:3388 train loss:3.693555 +step:3389 train loss:3.648125 +step:3390 train loss:3.684990 +step:3391 train loss:3.735134 +step:3392 train loss:3.681630 +step:3393 train loss:3.703015 +step:3394 train loss:3.684502 +step:3395 train loss:3.701509 +step:3396 train loss:3.740158 +step:3397 train loss:3.711587 +step:3398 train loss:3.722215 +step:3399 train loss:3.698673 +step:3400 train loss:3.740476 +step:3401 train loss:3.624666 +step:3402 train loss:3.655843 +step:3403 train loss:3.669534 +step:3404 train loss:3.699863 +step:3405 train loss:3.658792 +step:3406 train loss:3.715424 +step:3407 train loss:3.833914 +step:3408 train loss:3.682528 +step:3409 train loss:3.670010 +step:3410 train loss:3.724080 +step:3411 train loss:3.697390 +step:3412 train loss:3.702130 +step:3413 train loss:3.620621 +step:3414 train loss:3.638564 +step:3415 train loss:3.710536 +step:3416 train loss:3.703586 +step:3417 train loss:3.706943 +step:3418 train loss:3.636935 +step:3419 train loss:3.574849 +step:3420 train loss:3.661896 +step:3421 train loss:3.704536 +step:3422 train loss:3.680646 +step:3423 train loss:3.731042 +step:3424 train loss:3.789588 +step:3425 train loss:3.725562 +step:3426 train loss:3.677327 +step:3427 train loss:3.694261 +step:3428 train loss:3.742819 +step:3429 train loss:3.641921 +step:3430 train loss:3.696972 +step:3431 train loss:3.678000 +step:3432 train loss:3.651138 +step:3433 train loss:3.633302 +step:3434 train loss:3.693234 +step:3435 train loss:3.664636 +step:3436 train loss:3.714953 +step:3437 train loss:3.619115 +step:3438 train loss:3.683636 +step:3439 train loss:3.693936 +step:3440 train loss:3.622346 +step:3441 train loss:3.718520 +step:3442 train loss:3.698381 +step:3443 train loss:3.661864 +step:3444 train loss:3.659266 +step:3445 train loss:3.659241 +step:3446 train loss:3.616480 +step:3447 train loss:3.668657 +step:3448 train loss:3.757133 +step:3449 train loss:3.682229 +step:3450 train loss:3.677316 +step:3451 train loss:3.734463 +step:3452 train loss:3.743406 +step:3453 train loss:3.635780 +step:3454 train loss:3.665654 +step:3455 train loss:3.690876 +step:3456 train loss:3.684107 +step:3457 train loss:3.722703 +step:3458 train loss:3.703769 +step:3459 train loss:3.674150 +step:3460 train loss:3.728781 +step:3461 train loss:3.646297 +step:3462 train loss:3.670539 +step:3463 train loss:3.665268 +step:3464 train loss:3.692130 +step:3465 train loss:3.656997 +step:3466 train loss:3.758899 +step:3467 train loss:3.621777 +step:3468 train loss:3.769820 +step:3469 train loss:3.682764 +step:3470 train loss:3.697664 +step:3471 train loss:3.638526 +step:3472 train loss:3.707619 +step:3473 train loss:3.705390 +step:3474 train loss:3.656619 +step:3475 train loss:3.741822 +step:3476 train loss:3.672609 +step:3477 train loss:3.693424 +step:3478 train loss:3.619243 +step:3479 train loss:3.651650 +step:3480 train loss:3.610738 +step:3481 train loss:3.625710 +step:3482 train loss:3.623904 +step:3483 train loss:3.680750 +step:3484 train loss:3.654151 +step:3485 train loss:3.745890 +step:3486 train loss:3.873492 +step:3487 train loss:3.691111 +step:3488 train loss:3.704049 +step:3489 train loss:3.619344 +step:3490 train loss:3.650952 +step:3491 train loss:3.669276 +step:3492 train loss:3.721302 +step:3493 train loss:3.637105 +step:3494 train loss:3.702482 +step:3495 train loss:3.621023 +step:3496 train loss:3.779045 +step:3497 train loss:3.665937 +step:3498 train loss:3.693903 +step:3499 train loss:3.652841 +step:3500 validation loss:3.624696 total_sharp:4.0767e-01 L1_sharp:3.8811e+00 L2_sharp:2.2963e-01 L3_sharp:6.9428e-02 L4_sharp:6.2797e-03 L5_sharp:1.6550e-03 L6_sharp:1.3410e-03 L7_sharp:1.6949e-03 L8_sharp:2.5806e-03 L9_sharp:2.1154e-03 L10_sharp:1.3375e-03 L11_sharp:1.1429e-03 L12_sharp:2.4408e-03 total_fnorm:2.1611e+00 total_l1_linf:1.9120e+04 total_spectral:2.1611e+00 L1_fnorm:4.8280e-01 L2_fnorm:4.9248e-01 L3_fnorm:4.7568e-01 L4_fnorm:4.7608e-01 L5_fnorm:4.6435e-01 L6_fnorm:4.8430e-01 L7_fnorm:4.9247e-01 L8_fnorm:4.9441e-01 L9_fnorm:5.0693e-01 L10_fnorm:5.1929e-01 L11_fnorm:5.2170e-01 L12_fnorm:5.0257e-01 L1_l1linf:6.0642e-01 L2_l1linf:5.5371e-01 L3_l1linf:5.6218e-01 L4_l1linf:6.2733e-01 L5_l1linf:5.1035e-01 L6_l1linf:6.1906e-01 L7_l1linf:5.3993e-01 L8_l1linf:5.2087e-01 L9_l1linf:5.3689e-01 L10_l1linf:5.2568e-01 L11_l1linf:5.2879e-01 L12_l1linf:5.6756e-01 L1_spectral:9.4419e-02 L2_spectral:8.6068e-02 L3_spectral:8.0605e-02 L4_spectral:8.5029e-02 L5_spectral:6.5470e-02 L6_spectral:7.4282e-02 L7_spectral:6.4578e-02 L8_spectral:5.5741e-02 L9_spectral:4.8757e-02 L10_spectral:4.8766e-02 L11_spectral:5.1585e-02 L12_spectral:7.7229e-02 v_norm:2.1611e+00 cos_v_-g_hvp:1.0068e-01 g_hvp_norm:3.4439e-01 cos_v_-g_t:1.2531e-01 g_t_norm:2.7923e-01 hv_norm:2.1570e+01 cos_v_hv:4.0845e-02 hg_norm:8.7358e+02 cos_g_hg:3.1876e-01 v_par:1.6300e-02 v_perp:2.1611e+00 L1_cos_v_neg_g:2.1767e-01 L1_v_norm:4.8280e-01 L2_cos_v_neg_g:1.2025e-01 L2_v_norm:4.9248e-01 L3_cos_v_neg_g:9.8293e-02 L3_v_norm:4.7568e-01 L4_cos_v_neg_g:8.9909e-02 L4_v_norm:4.7608e-01 L5_cos_v_neg_g:6.9092e-02 L5_v_norm:4.6435e-01 L6_cos_v_neg_g:8.5650e-02 L6_v_norm:4.8430e-01 L7_cos_v_neg_g:9.9919e-02 L7_v_norm:4.9247e-01 L8_cos_v_neg_g:9.5025e-02 L8_v_norm:4.9441e-01 L9_cos_v_neg_g:8.8729e-02 L9_v_norm:5.0693e-01 L10_cos_v_neg_g:1.0550e-01 L10_v_norm:5.1929e-01 L11_cos_v_neg_g:1.2838e-01 L11_v_norm:5.2170e-01 L12_cos_v_neg_g:1.5777e-01 L12_v_norm:5.0257e-01 +step:3500 train loss:3.631643 +step:3501 train loss:3.724918 +step:3502 train loss:3.672688 +step:3503 train loss:3.694712 +step:3504 train loss:3.670136 +step:3505 train loss:3.679704 +step:3506 train loss:3.626643 +step:3507 train loss:3.705290 +step:3508 train loss:3.659432 +step:3509 train loss:3.691253 +step:3510 train loss:3.704190 +step:3511 train loss:3.672006 +step:3512 train loss:3.628150 +step:3513 train loss:3.653766 +step:3514 train loss:3.639731 +step:3515 train loss:3.660582 +step:3516 train loss:3.604927 +step:3517 train loss:3.704616 +step:3518 train loss:3.635917 +step:3519 train loss:3.689702 +step:3520 train loss:3.704787 +step:3521 train loss:3.671351 +step:3522 train loss:3.680521 +step:3523 train loss:3.759923 +step:3524 train loss:3.701827 +step:3525 train loss:3.681661 +step:3526 train loss:3.675762 +step:3527 train loss:3.705528 +step:3528 train loss:3.692492 +step:3529 train loss:3.659842 +step:3530 train loss:3.633586 +step:3531 train loss:3.682781 +step:3532 train loss:3.652445 +step:3533 train loss:3.662079 +step:3534 train loss:3.659771 +step:3535 train loss:3.636636 +step:3536 train loss:3.740514 +step:3537 train loss:3.709671 +step:3538 train loss:3.703960 +step:3539 train loss:3.654283 +step:3540 train loss:3.636855 +step:3541 train loss:3.616205 +step:3542 train loss:3.644714 +step:3543 train loss:3.624823 +step:3544 train loss:3.652672 +step:3545 train loss:3.683773 +step:3546 train loss:3.620161 +step:3547 train loss:3.681218 +step:3548 train loss:3.796258 +step:3549 train loss:3.608567 +step:3550 train loss:3.650644 +step:3551 train loss:3.614739 +step:3552 train loss:3.731414 +step:3553 train loss:3.664997 +step:3554 train loss:3.640275 +step:3555 train loss:3.666747 +step:3556 train loss:3.672816 +step:3557 train loss:3.634983 +step:3558 train loss:3.679626 +step:3559 train loss:3.644519 +step:3560 train loss:3.682543 +step:3561 train loss:3.582547 +step:3562 train loss:3.718467 +step:3563 train loss:3.663285 +step:3564 train loss:3.648748 +step:3565 train loss:3.647194 +step:3566 train loss:3.683182 +step:3567 train loss:3.635530 +step:3568 train loss:3.662103 +step:3569 train loss:3.633518 +step:3570 train loss:3.691859 +step:3571 train loss:3.644873 +step:3572 train loss:3.748336 +step:3573 train loss:3.635201 +step:3574 train loss:3.696804 +step:3575 train loss:3.636918 +step:3576 train loss:3.698180 +step:3577 train loss:3.704980 +step:3578 train loss:3.667407 +step:3579 train loss:3.667950 +step:3580 train loss:3.669996 +step:3581 train loss:3.624102 +step:3582 train loss:3.606916 +step:3583 train loss:3.649844 +step:3584 train loss:3.629941 +step:3585 train loss:3.622681 +step:3586 train loss:3.712558 +step:3587 train loss:3.762357 +step:3588 train loss:3.726325 +step:3589 train loss:3.686209 +step:3590 train loss:3.704155 +step:3591 train loss:3.652757 +step:3592 train loss:3.663809 +step:3593 train loss:3.682446 +step:3594 train loss:3.677464 +step:3595 train loss:3.671498 +step:3596 train loss:3.660450 +step:3597 train loss:3.647144 +step:3598 train loss:3.647983 +step:3599 train loss:3.683659 +step:3600 train loss:3.693119 +step:3601 train loss:3.676941 +step:3602 train loss:3.680189 +step:3603 train loss:3.759146 +step:3604 train loss:3.724394 +step:3605 train loss:3.661624 +step:3606 train loss:3.635823 +step:3607 train loss:3.607715 +step:3608 train loss:3.631400 +step:3609 train loss:3.664995 +step:3610 train loss:3.638581 +step:3611 train loss:3.730938 +step:3612 train loss:3.611115 +step:3613 train loss:3.690809 +step:3614 train loss:3.620094 +step:3615 train loss:3.604977 +step:3616 train loss:3.696992 +step:3617 train loss:3.681331 +step:3618 train loss:3.686888 +step:3619 train loss:3.629842 +step:3620 train loss:3.641576 +step:3621 train loss:3.712913 +step:3622 train loss:3.659776 +step:3623 train loss:3.653200 +step:3624 train loss:3.665708 +step:3625 train loss:3.770700 +step:3626 train loss:3.641899 +step:3627 train loss:3.620021 +step:3628 train loss:3.658446 +step:3629 train loss:3.676197 +step:3630 train loss:3.667174 +step:3631 train loss:3.684167 +step:3632 train loss:3.670341 +step:3633 train loss:3.652769 +step:3634 train loss:3.645638 +step:3635 train loss:3.692412 +step:3636 train loss:3.620118 +step:3637 train loss:3.633492 +step:3638 train loss:3.614927 +step:3639 train loss:3.665959 +step:3640 train loss:3.651381 +step:3641 train loss:3.635683 +step:3642 train loss:3.654983 +step:3643 train loss:3.618014 +step:3644 train loss:3.662806 +step:3645 train loss:3.688990 +step:3646 train loss:3.702365 +step:3647 train loss:3.698882 +step:3648 train loss:3.768560 +step:3649 train loss:3.665062 +step:3650 train loss:3.673622 +step:3651 train loss:3.668368 +step:3652 train loss:3.683079 +step:3653 train loss:3.692967 +step:3654 train loss:3.710239 +step:3655 train loss:3.695158 +step:3656 train loss:3.694020 +step:3657 train loss:3.624715 +step:3658 train loss:3.684673 +step:3659 train loss:3.651083 +step:3660 train loss:3.638205 +step:3661 train loss:3.652084 +step:3662 train loss:3.674627 +step:3663 train loss:3.675875 +step:3664 train loss:3.653362 +step:3665 train loss:3.653732 +step:3666 train loss:3.692629 +step:3667 train loss:3.652886 +step:3668 train loss:3.699715 +step:3669 train loss:3.701350 +step:3670 train loss:3.698449 +step:3671 train loss:3.659019 +step:3672 train loss:3.717474 +step:3673 train loss:3.703688 +step:3674 train loss:3.727533 +step:3675 train loss:3.711502 +step:3676 train loss:3.665184 +step:3677 train loss:3.646183 +step:3678 train loss:3.695155 +step:3679 train loss:3.670483 +step:3680 train loss:3.655051 +step:3681 train loss:3.731325 +step:3682 train loss:3.686375 +step:3683 train loss:3.676029 +step:3684 train loss:3.665630 +step:3685 train loss:3.654257 +step:3686 train loss:3.678728 +step:3687 train loss:3.689234 +step:3688 train loss:3.721864 +step:3689 train loss:3.618795 +step:3690 train loss:3.625220 +step:3691 train loss:3.706533 +step:3692 train loss:3.632288 +step:3693 train loss:3.650850 +step:3694 train loss:3.759143 +step:3695 train loss:3.606602 +step:3696 train loss:3.694262 +step:3697 train loss:3.732276 +step:3698 train loss:3.743389 +step:3699 train loss:3.667710 +step:3700 train loss:3.654324 +step:3701 train loss:3.671447 +step:3702 train loss:3.639915 +step:3703 train loss:3.690507 +step:3704 train loss:3.841608 +step:3705 train loss:3.671076 +step:3706 train loss:3.679572 +step:3707 train loss:3.667002 +step:3708 train loss:3.683077 +step:3709 train loss:3.685507 +step:3710 train loss:3.736996 +step:3711 train loss:3.606701 +step:3712 train loss:3.666569 +step:3713 train loss:3.695497 +step:3714 train loss:3.636418 +step:3715 train loss:3.656093 +step:3716 train loss:3.659491 +step:3717 train loss:3.647425 +step:3718 train loss:3.668469 +step:3719 train loss:3.692950 +step:3720 train loss:3.649347 +step:3721 train loss:3.657104 +step:3722 train loss:3.823653 +step:3723 train loss:3.657308 +step:3724 train loss:3.693131 +step:3725 train loss:3.664206 +step:3726 train loss:3.667481 +step:3727 train loss:3.648838 +step:3728 train loss:3.702972 +step:3729 train loss:3.665321 +step:3730 train loss:3.708184 +step:3731 train loss:3.689554 +step:3732 train loss:3.656157 +step:3733 train loss:3.615144 +step:3734 train loss:3.623329 +step:3735 train loss:3.622139 +step:3736 train loss:3.657447 +step:3737 train loss:3.709949 +step:3738 train loss:3.703409 +step:3739 train loss:3.645694 +step:3740 train loss:3.640775 +step:3741 train loss:3.696036 +step:3742 train loss:3.742683 +step:3743 train loss:3.689013 +step:3744 train loss:3.724427 +step:3745 train loss:3.665144 +step:3746 train loss:3.685617 +step:3747 train loss:3.677727 +step:3748 train loss:3.723359 +step:3749 train loss:3.670929 +step:3750 validation loss:3.611280 +step:3750 train loss:3.657376 +step:3751 train loss:3.698525 +step:3752 train loss:3.681094 +step:3753 train loss:3.669528 +step:3754 train loss:3.667177 +step:3755 train loss:3.673946 +step:3756 train loss:3.618806 +step:3757 train loss:3.650922 +step:3758 train loss:3.667009 +step:3759 train loss:3.646208 +step:3760 train loss:3.655183 +step:3761 train loss:3.604887 +step:3762 train loss:3.722537 +step:3763 train loss:3.636247 +step:3764 train loss:3.633180 +step:3765 train loss:3.669504 +step:3766 train loss:3.657248 +step:3767 train loss:3.653677 +step:3768 train loss:3.724617 +step:3769 train loss:3.704699 +step:3770 train loss:3.662407 +step:3771 train loss:3.709283 +step:3772 train loss:3.699858 +step:3773 train loss:3.695046 +step:3774 train loss:3.683455 +step:3775 train loss:3.688605 +step:3776 train loss:3.625591 +step:3777 train loss:3.684531 +step:3778 train loss:3.598314 +step:3779 train loss:3.686587 +step:3780 train loss:3.670176 +step:3781 train loss:3.584933 +step:3782 train loss:3.636727 +step:3783 train loss:3.634807 +step:3784 train loss:3.635916 +step:3785 train loss:3.659885 +step:3786 train loss:3.667255 +step:3787 train loss:3.678084 +step:3788 train loss:3.696094 +step:3789 train loss:3.819618 +step:3790 train loss:3.703538 +step:3791 train loss:3.673782 +step:3792 train loss:3.676960 +step:3793 train loss:3.648827 +step:3794 train loss:3.761244 +step:3795 train loss:3.676520 +step:3796 train loss:3.686503 +step:3797 train loss:3.658662 +step:3798 train loss:3.611379 +step:3799 train loss:3.629611 +step:3800 train loss:3.647923 +step:3801 train loss:3.642627 +step:3802 train loss:3.603136 +step:3803 train loss:3.626723 +step:3804 train loss:3.752074 +step:3805 train loss:3.725373 +step:3806 train loss:3.666015 +step:3807 train loss:3.718305 +step:3808 train loss:3.644756 +step:3809 train loss:3.661384 +step:3810 train loss:3.652902 +step:3811 train loss:3.673882 +step:3812 train loss:3.677539 +step:3813 train loss:3.612034 +step:3814 train loss:3.621259 +step:3815 train loss:3.662155 +step:3816 train loss:3.660779 +step:3817 train loss:3.662252 +step:3818 train loss:3.683862 +step:3819 train loss:3.621016 +step:3820 train loss:3.687345 +step:3821 train loss:3.630732 +step:3822 train loss:3.704158 +step:3823 train loss:3.632074 +step:3824 train loss:3.666663 +step:3825 train loss:3.660251 +step:3826 train loss:3.675676 +step:3827 train loss:3.700131 +step:3828 train loss:3.629544 +step:3829 train loss:3.669739 +step:3830 train loss:3.644759 +step:3831 train loss:3.713930 +step:3832 train loss:3.699350 +step:3833 train loss:3.710726 +step:3834 train loss:3.704442 +step:3835 train loss:3.705125 +step:3836 train loss:3.709030 +step:3837 train loss:3.683296 +step:3838 train loss:3.664239 +step:3839 train loss:3.709997 +step:3840 train loss:3.675603 +step:3841 train loss:3.629124 +step:3842 train loss:3.689660 +step:3843 train loss:3.692436 +step:3844 train loss:3.629678 +step:3845 train loss:3.631726 +step:3846 train loss:3.630570 +step:3847 train loss:3.648939 +step:3848 train loss:3.689804 +step:3849 train loss:3.662843 +step:3850 train loss:3.715147 +step:3851 train loss:3.620178 +step:3852 train loss:3.691218 +step:3853 train loss:3.615689 +step:3854 train loss:3.647893 +step:3855 train loss:3.647591 +step:3856 train loss:3.694968 +step:3857 train loss:3.609283 +step:3858 train loss:3.595478 +step:3859 train loss:3.631843 +step:3860 train loss:3.627591 +step:3861 train loss:3.677433 +step:3862 train loss:3.611379 +step:3863 train loss:3.633827 +step:3864 train loss:3.626375 +step:3865 train loss:3.618127 +step:3866 train loss:3.685017 +step:3867 train loss:3.614367 +step:3868 train loss:3.654799 +step:3869 train loss:3.594262 +step:3870 train loss:3.640574 +step:3871 train loss:3.640951 +step:3872 train loss:3.607131 +step:3873 train loss:3.632548 +step:3874 train loss:3.632158 +step:3875 train loss:3.664808 +step:3876 train loss:3.642139 +step:3877 train loss:3.656643 +step:3878 train loss:3.677769 +step:3879 train loss:3.652482 +step:3880 train loss:3.670085 +step:3881 train loss:3.696717 +step:3882 train loss:3.644368 +step:3883 train loss:3.620313 +step:3884 train loss:3.616050 +step:3885 train loss:3.642803 +step:3886 train loss:3.673986 +step:3887 train loss:3.675408 +step:3888 train loss:3.636143 +step:3889 train loss:3.601711 +step:3890 train loss:3.658733 +step:3891 train loss:3.691927 +step:3892 train loss:3.648129 +step:3893 train loss:3.590410 +step:3894 train loss:3.600039 +step:3895 train loss:3.706092 +step:3896 train loss:3.756861 +step:3897 train loss:3.603826 +step:3898 train loss:3.638800 +step:3899 train loss:3.679472 +step:3900 train loss:3.700489 +step:3901 train loss:3.684306 +step:3902 train loss:3.691218 +step:3903 train loss:3.648449 +step:3904 train loss:3.642701 +step:3905 train loss:3.654109 +step:3906 train loss:3.588668 +step:3907 train loss:3.668136 +step:3908 train loss:3.656485 +step:3909 train loss:3.612349 +step:3910 train loss:3.617492 +step:3911 train loss:3.704782 +step:3912 train loss:3.640831 +step:3913 train loss:3.650236 +step:3914 train loss:3.518242 +step:3915 train loss:3.654191 +step:3916 train loss:3.638640 +step:3917 train loss:3.798515 +step:3918 train loss:3.633103 +step:3919 train loss:3.685903 +step:3920 train loss:3.640805 +step:3921 train loss:3.613317 +step:3922 train loss:3.652219 +step:3923 train loss:3.670748 +step:3924 train loss:3.674168 +step:3925 train loss:3.663708 +step:3926 train loss:3.676662 +step:3927 train loss:3.725256 +step:3928 train loss:3.684193 +step:3929 train loss:3.687772 +step:3930 train loss:3.650846 +step:3931 train loss:3.662804 +step:3932 train loss:3.611584 +step:3933 train loss:3.640570 +step:3934 train loss:3.621041 +step:3935 train loss:3.625765 +step:3936 train loss:3.619399 +step:3937 train loss:3.626106 +step:3938 train loss:3.670717 +step:3939 train loss:3.665515 +step:3940 train loss:3.671514 +step:3941 train loss:3.656844 +step:3942 train loss:3.705054 +step:3943 train loss:3.584419 +step:3944 train loss:3.626004 +step:3945 train loss:3.603906 +step:3946 train loss:3.657816 +step:3947 train loss:3.657283 +step:3948 train loss:3.636150 +step:3949 train loss:3.617828 +step:3950 train loss:3.663511 +step:3951 train loss:3.642049 +step:3952 train loss:3.680116 +step:3953 train loss:3.692499 +step:3954 train loss:3.656813 +step:3955 train loss:3.693983 +step:3956 train loss:3.675156 +step:3957 train loss:3.676945 +step:3958 train loss:3.622293 +step:3959 train loss:3.622229 +step:3960 train loss:3.626607 +step:3961 train loss:3.636657 +step:3962 train loss:3.629105 +step:3963 train loss:3.719085 +step:3964 train loss:3.631234 +step:3965 train loss:3.581321 +step:3966 train loss:3.604994 +step:3967 train loss:3.608418 +step:3968 train loss:3.664495 +step:3969 train loss:3.677265 +step:3970 train loss:3.663847 +step:3971 train loss:3.624013 +step:3972 train loss:3.645747 +step:3973 train loss:3.641452 +step:3974 train loss:3.677005 +step:3975 train loss:3.646996 +step:3976 train loss:3.651036 +step:3977 train loss:3.638077 +step:3978 train loss:3.624875 +step:3979 train loss:3.680905 +step:3980 train loss:3.569257 +step:3981 train loss:3.620316 +step:3982 train loss:3.592390 +step:3983 train loss:3.656734 +step:3984 train loss:3.557721 +step:3985 train loss:3.666168 +step:3986 train loss:3.672423 +step:3987 train loss:3.643897 +step:3988 train loss:3.630433 +step:3989 train loss:3.617618 +step:3990 train loss:3.640501 +step:3991 train loss:3.629784 +step:3992 train loss:3.659556 +step:3993 train loss:3.648481 +step:3994 train loss:3.609521 +step:3995 train loss:3.621535 +step:3996 train loss:3.616149 +step:3997 train loss:3.630233 +step:3998 train loss:3.628388 +step:3999 train loss:3.628257 +step:4000 validation loss:3.590015 total_sharp:8.0346e-03 L1_sharp:1.0405e-02 L2_sharp:1.0166e-03 L3_sharp:1.8307e-03 L4_sharp:1.7127e-03 L5_sharp:1.0450e-03 L6_sharp:1.4612e-03 L7_sharp:1.7138e-03 L8_sharp:2.6828e-03 L9_sharp:2.0327e-03 L10_sharp:1.1804e-03 L11_sharp:1.3054e-03 L12_sharp:4.0537e-03 total_fnorm:2.1565e+00 total_l1_linf:1.9080e+04 total_spectral:2.1565e+00 L1_fnorm:4.5684e-01 L2_fnorm:4.8072e-01 L3_fnorm:4.7221e-01 L4_fnorm:4.7716e-01 L5_fnorm:4.6484e-01 L6_fnorm:4.8345e-01 L7_fnorm:4.9405e-01 L8_fnorm:4.9275e-01 L9_fnorm:5.1027e-01 L10_fnorm:5.2270e-01 L11_fnorm:5.2767e-01 L12_fnorm:5.1045e-01 L1_l1linf:5.7794e-01 L2_l1linf:5.4922e-01 L3_l1linf:5.8227e-01 L4_l1linf:5.9934e-01 L5_l1linf:5.2977e-01 L6_l1linf:5.7105e-01 L7_l1linf:5.3913e-01 L8_l1linf:5.1328e-01 L9_l1linf:5.2813e-01 L10_l1linf:5.3991e-01 L11_l1linf:5.5248e-01 L12_l1linf:5.5320e-01 L1_spectral:8.1586e-02 L2_spectral:7.7738e-02 L3_spectral:7.8589e-02 L4_spectral:8.3725e-02 L5_spectral:6.5110e-02 L6_spectral:7.2333e-02 L7_spectral:6.4773e-02 L8_spectral:6.1144e-02 L9_spectral:4.9108e-02 L10_spectral:4.5589e-02 L11_spectral:5.2964e-02 L12_spectral:7.9809e-02 v_norm:2.1565e+00 cos_v_-g_hvp:8.8483e-02 g_hvp_norm:3.2854e-01 cos_v_-g_t:1.0678e-01 g_t_norm:2.7378e-01 hv_norm:4.5459e-01 cos_v_hv:3.8116e-02 hg_norm:3.4572e+00 cos_g_hg:6.2241e-01 v_par:1.3091e-02 v_perp:2.1565e+00 L1_cos_v_neg_g:1.6873e-01 L1_v_norm:4.5684e-01 L2_cos_v_neg_g:7.6832e-02 L2_v_norm:4.8072e-01 L3_cos_v_neg_g:7.0806e-02 L3_v_norm:4.7221e-01 L4_cos_v_neg_g:7.1338e-02 L4_v_norm:4.7716e-01 L5_cos_v_neg_g:6.0267e-02 L5_v_norm:4.6484e-01 L6_cos_v_neg_g:7.9943e-02 L6_v_norm:4.8345e-01 L7_cos_v_neg_g:9.2832e-02 L7_v_norm:4.9405e-01 L8_cos_v_neg_g:8.8208e-02 L8_v_norm:4.9275e-01 L9_cos_v_neg_g:8.2371e-02 L9_v_norm:5.1027e-01 L10_cos_v_neg_g:1.0068e-01 L10_v_norm:5.2270e-01 L11_cos_v_neg_g:1.2577e-01 L11_v_norm:5.2767e-01 L12_cos_v_neg_g:1.5763e-01 L12_v_norm:5.1045e-01 +step:4000 train loss:3.637697 +step:4001 train loss:3.655870 +step:4002 train loss:3.601436 +step:4003 train loss:3.631486 +step:4004 train loss:3.652629 +step:4005 train loss:3.594862 +step:4006 train loss:3.594707 +step:4007 train loss:3.590465 +step:4008 train loss:3.635537 +step:4009 train loss:3.634472 +step:4010 train loss:3.560386 +step:4011 train loss:3.665027 +step:4012 train loss:3.599955 +step:4013 train loss:3.626206 +step:4014 train loss:3.614986 +step:4015 train loss:3.573652 +step:4016 train loss:3.616901 +step:4017 train loss:3.676967 +step:4018 train loss:3.630202 +step:4019 train loss:3.632484 +step:4020 train loss:3.582640 +step:4021 train loss:3.608063 +step:4022 train loss:3.614414 +step:4023 train loss:3.666938 +step:4024 train loss:3.601673 +step:4025 train loss:3.578512 +step:4026 train loss:3.679809 +step:4027 train loss:3.593812 +step:4028 train loss:3.652008 +step:4029 train loss:3.530034 +step:4030 train loss:3.597935 +step:4031 train loss:3.605807 +step:4032 train loss:3.642961 +step:4033 train loss:3.615561 +step:4034 train loss:3.597525 +step:4035 train loss:3.617062 +step:4036 train loss:3.626465 +step:4037 train loss:3.587887 +step:4038 train loss:3.559189 +step:4039 train loss:3.612571 +step:4040 train loss:3.646973 +step:4041 train loss:3.702473 +step:4042 train loss:3.628585 +step:4043 train loss:3.648705 +step:4044 train loss:3.602472 +step:4045 train loss:3.607318 +step:4046 train loss:3.570076 +step:4047 train loss:3.591678 +step:4048 train loss:3.611281 +step:4049 train loss:3.672900 +step:4050 train loss:3.604681 +step:4051 train loss:3.598919 +step:4052 train loss:3.597919 +step:4053 train loss:3.635496 +step:4054 train loss:3.593854 +step:4055 train loss:3.589916 +step:4056 train loss:3.668385 +step:4057 train loss:3.613327 +step:4058 train loss:3.621771 +step:4059 train loss:3.585325 +step:4060 train loss:3.636631 +step:4061 train loss:3.579389 +step:4062 train loss:3.600683 +step:4063 train loss:3.635172 +step:4064 train loss:3.606071 +step:4065 train loss:3.583656 +step:4066 train loss:3.650866 +step:4067 train loss:3.692562 +step:4068 train loss:3.678115 +step:4069 train loss:3.688287 +step:4070 train loss:3.643659 +step:4071 train loss:3.642374 +step:4072 train loss:3.578773 +step:4073 train loss:3.617631 +step:4074 train loss:3.533962 +step:4075 train loss:3.667962 +step:4076 train loss:3.579458 +step:4077 train loss:3.604343 +step:4078 train loss:3.561797 +step:4079 train loss:3.641265 +step:4080 train loss:3.614393 +step:4081 train loss:3.633903 +step:4082 train loss:3.594208 +step:4083 train loss:3.582378 +step:4084 train loss:3.641602 +step:4085 train loss:3.646989 +step:4086 train loss:3.693053 +step:4087 train loss:3.677619 +step:4088 train loss:3.580665 +step:4089 train loss:3.628047 +step:4090 train loss:3.574022 +step:4091 train loss:3.614807 +step:4092 train loss:3.574834 +step:4093 train loss:3.810479 +step:4094 train loss:3.655322 +step:4095 train loss:3.570412 +step:4096 train loss:3.686191 +step:4097 train loss:3.550889 +step:4098 train loss:3.612150 +step:4099 train loss:3.580500 +step:4100 train loss:3.634125 +step:4101 train loss:3.600271 +step:4102 train loss:3.600827 +step:4103 train loss:3.581016 +step:4104 train loss:3.586348 +step:4105 train loss:3.594860 +step:4106 train loss:3.548982 +step:4107 train loss:3.556774 +step:4108 train loss:3.574217 +step:4109 train loss:3.638259 +step:4110 train loss:3.614439 +step:4111 train loss:3.527662 +step:4112 train loss:3.568958 +step:4113 train loss:3.584749 +step:4114 train loss:3.646625 +step:4115 train loss:3.621333 +step:4116 train loss:3.654642 +step:4117 train loss:3.573405 +step:4118 train loss:3.585635 +step:4119 train loss:3.590358 +step:4120 train loss:3.570909 +step:4121 train loss:3.617822 +step:4122 train loss:3.617058 +step:4123 train loss:3.543725 +step:4124 train loss:3.603309 +step:4125 train loss:3.571060 +step:4126 train loss:3.591829 +step:4127 train loss:3.609984 +step:4128 train loss:3.591347 +step:4129 train loss:3.681404 +step:4130 train loss:3.601892 +step:4131 train loss:3.559800 +step:4132 train loss:3.562413 +step:4133 train loss:3.600298 +step:4134 train loss:3.582447 +step:4135 train loss:3.568398 +step:4136 train loss:3.591754 +step:4137 train loss:3.612356 +step:4138 train loss:3.666231 +step:4139 train loss:3.556962 +step:4140 train loss:3.645218 +step:4141 train loss:3.567705 +step:4142 train loss:3.609180 +step:4143 train loss:3.609512 +step:4144 train loss:3.618779 +step:4145 train loss:3.661276 +step:4146 train loss:3.541112 +step:4147 train loss:3.578788 +step:4148 train loss:3.597874 +step:4149 train loss:3.607846 +step:4150 train loss:3.633648 +step:4151 train loss:3.596497 +step:4152 train loss:3.653835 +step:4153 train loss:3.604997 +step:4154 train loss:3.625293 +step:4155 train loss:3.587810 +step:4156 train loss:3.660127 +step:4157 train loss:3.619545 +step:4158 train loss:3.623183 +step:4159 train loss:3.577624 +step:4160 train loss:3.543940 +step:4161 train loss:3.623239 +step:4162 train loss:3.571216 +step:4163 train loss:3.579910 +step:4164 train loss:3.591639 +step:4165 train loss:3.581877 +step:4166 train loss:3.575499 +step:4167 train loss:3.604809 +step:4168 train loss:3.618490 +step:4169 train loss:3.569286 +step:4170 train loss:3.624273 +step:4171 train loss:3.598273 +step:4172 train loss:3.611404 +step:4173 train loss:3.629741 +step:4174 train loss:3.601171 +step:4175 train loss:3.641970 +step:4176 train loss:3.599373 +step:4177 train loss:3.638620 +step:4178 train loss:3.623528 +step:4179 train loss:3.669877 +step:4180 train loss:3.672156 +step:4181 train loss:3.488523 +step:4182 train loss:3.636444 +step:4183 train loss:3.570253 +step:4184 train loss:3.615590 +step:4185 train loss:3.626047 +step:4186 train loss:3.589548 +step:4187 train loss:3.603600 +step:4188 train loss:3.595365 +step:4189 train loss:3.612047 +step:4190 train loss:3.564876 +step:4191 train loss:3.550312 +step:4192 train loss:3.578369 +step:4193 train loss:3.508962 +step:4194 train loss:3.601722 +step:4195 train loss:3.545698 +step:4196 train loss:3.584931 +step:4197 train loss:3.645531 +step:4198 train loss:3.580455 +step:4199 train loss:3.589365 +step:4200 train loss:3.612796 +step:4201 train loss:3.627048 +step:4202 train loss:3.635391 +step:4203 train loss:3.578346 +step:4204 train loss:3.613235 +step:4205 train loss:3.647810 +step:4206 train loss:3.627005 +step:4207 train loss:3.614483 +step:4208 train loss:3.591931 +step:4209 train loss:3.634677 +step:4210 train loss:3.596401 +step:4211 train loss:3.657929 +step:4212 train loss:3.548409 +step:4213 train loss:3.605117 +step:4214 train loss:3.633610 +step:4215 train loss:3.649663 +step:4216 train loss:3.634900 +step:4217 train loss:3.638996 +step:4218 train loss:3.589878 +step:4219 train loss:3.624957 +step:4220 train loss:3.575799 +step:4221 train loss:3.587013 +step:4222 train loss:3.558310 +step:4223 train loss:3.662264 +step:4224 train loss:3.612398 +step:4225 train loss:3.621320 +step:4226 train loss:3.661943 +step:4227 train loss:3.568047 +step:4228 train loss:3.636690 +step:4229 train loss:3.599193 +step:4230 train loss:3.676057 +step:4231 train loss:3.692966 +step:4232 train loss:3.629502 +step:4233 train loss:3.615145 +step:4234 train loss:3.653193 +step:4235 train loss:3.590172 +step:4236 train loss:3.627492 +step:4237 train loss:3.568981 +step:4238 train loss:3.561738 +step:4239 train loss:3.599379 +step:4240 train loss:3.582803 +step:4241 train loss:3.608944 +step:4242 train loss:3.614729 +step:4243 train loss:3.633446 +step:4244 train loss:3.678345 +step:4245 train loss:3.609416 +step:4246 train loss:3.680516 +step:4247 train loss:3.715989 +step:4248 train loss:3.725061 +step:4249 train loss:3.632275 +step:4250 validation loss:3.576801 +step:4250 train loss:3.663008 +step:4251 train loss:3.685001 +step:4252 train loss:3.621201 +step:4253 train loss:3.569884 +step:4254 train loss:3.603575 +step:4255 train loss:3.604568 +step:4256 train loss:3.627520 +step:4257 train loss:3.625292 +step:4258 train loss:3.658392 +step:4259 train loss:3.579322 +step:4260 train loss:3.652699 +step:4261 train loss:3.616992 +step:4262 train loss:3.664459 +step:4263 train loss:3.655554 +step:4264 train loss:3.662269 +step:4265 train loss:3.710868 +step:4266 train loss:3.813541 +step:4267 train loss:3.602998 +step:4268 train loss:3.640188 +step:4269 train loss:3.577205 +step:4270 train loss:3.655687 +step:4271 train loss:3.604359 +step:4272 train loss:3.600898 +step:4273 train loss:3.631930 +step:4274 train loss:3.563014 +step:4275 train loss:3.581226 +step:4276 train loss:3.664828 +step:4277 train loss:3.668004 +step:4278 train loss:3.638833 +step:4279 train loss:3.616732 +step:4280 train loss:3.618240 +step:4281 train loss:3.594612 +step:4282 train loss:3.616128 +step:4283 train loss:3.579920 +step:4284 train loss:3.606383 +step:4285 train loss:3.638386 +step:4286 train loss:3.580114 +step:4287 train loss:3.615275 +step:4288 train loss:3.583614 +step:4289 train loss:3.588557 +step:4290 train loss:3.630124 +step:4291 train loss:3.607707 +step:4292 train loss:3.627115 +step:4293 train loss:3.645808 +step:4294 train loss:3.637131 +step:4295 train loss:3.619106 +step:4296 train loss:3.683806 +step:4297 train loss:3.624604 +step:4298 train loss:3.613128 +step:4299 train loss:3.619052 +step:4300 train loss:3.535510 +step:4301 train loss:3.592084 +step:4302 train loss:3.673584 +step:4303 train loss:3.631115 +step:4304 train loss:3.650859 +step:4305 train loss:3.627569 +step:4306 train loss:3.591043 +step:4307 train loss:3.749902 +step:4308 train loss:3.696064 +step:4309 train loss:3.633139 +step:4310 train loss:3.645955 +step:4311 train loss:3.586560 +step:4312 train loss:3.611983 +step:4313 train loss:3.578133 +step:4314 train loss:3.670139 +step:4315 train loss:3.616941 +step:4316 train loss:3.611698 +step:4317 train loss:3.632745 +step:4318 train loss:3.657605 +step:4319 train loss:3.641971 +step:4320 train loss:3.636952 +step:4321 train loss:3.647961 +step:4322 train loss:3.607716 +step:4323 train loss:3.612984 +step:4324 train loss:3.662679 +step:4325 train loss:3.673901 +step:4326 train loss:3.608163 +step:4327 train loss:3.585870 +step:4328 train loss:3.600133 +step:4329 train loss:3.638185 +step:4330 train loss:3.607325 +step:4331 train loss:3.604973 +step:4332 train loss:3.646884 +step:4333 train loss:3.594450 +step:4334 train loss:3.645843 +step:4335 train loss:3.672514 +step:4336 train loss:3.654346 +step:4337 train loss:3.594893 +step:4338 train loss:3.607191 +step:4339 train loss:3.587254 +step:4340 train loss:3.588472 +step:4341 train loss:3.547322 +step:4342 train loss:3.590688 +step:4343 train loss:3.650957 +step:4344 train loss:3.658488 +step:4345 train loss:3.630454 +step:4346 train loss:3.656146 +step:4347 train loss:3.589857 +step:4348 train loss:3.595954 +step:4349 train loss:3.571240 +step:4350 train loss:3.604788 +step:4351 train loss:3.525720 +step:4352 train loss:3.580295 +step:4353 train loss:3.667948 +step:4354 train loss:3.617368 +step:4355 train loss:3.549144 +step:4356 train loss:3.605898 +step:4357 train loss:3.597525 +step:4358 train loss:3.622312 +step:4359 train loss:3.614199 +step:4360 train loss:3.595818 +step:4361 train loss:3.649636 +step:4362 train loss:3.588086 +step:4363 train loss:3.556972 +step:4364 train loss:3.671555 +step:4365 train loss:3.712955 +step:4366 train loss:3.597357 +step:4367 train loss:3.641536 +step:4368 train loss:3.594509 +step:4369 train loss:3.625649 +step:4370 train loss:3.659018 +step:4371 train loss:3.631961 +step:4372 train loss:3.601406 +step:4373 train loss:3.666116 +step:4374 train loss:3.667671 +step:4375 train loss:3.652928 +step:4376 train loss:3.676160 +step:4377 train loss:3.694629 +step:4378 train loss:3.676753 +step:4379 train loss:3.594654 +step:4380 train loss:3.714423 +step:4381 train loss:3.620263 +step:4382 train loss:3.647518 +step:4383 train loss:3.633261 +step:4384 train loss:3.689938 +step:4385 train loss:3.596342 +step:4386 train loss:3.575138 +step:4387 train loss:3.615907 +step:4388 train loss:3.582519 +step:4389 train loss:3.561367 +step:4390 train loss:3.560783 +step:4391 train loss:3.624247 +step:4392 train loss:3.565343 +step:4393 train loss:3.668835 +step:4394 train loss:3.598998 +step:4395 train loss:3.561529 +step:4396 train loss:3.686365 +step:4397 train loss:3.622383 +step:4398 train loss:3.630839 +step:4399 train loss:3.608911 +step:4400 train loss:3.639416 +step:4401 train loss:3.574365 +step:4402 train loss:3.587094 +step:4403 train loss:3.614013 +step:4404 train loss:3.673897 +step:4405 train loss:3.576083 +step:4406 train loss:3.596834 +step:4407 train loss:3.647538 +step:4408 train loss:3.612463 +step:4409 train loss:3.550887 +step:4410 train loss:3.624898 +step:4411 train loss:3.609546 +step:4412 train loss:3.609350 +step:4413 train loss:3.641053 +step:4414 train loss:3.585421 +step:4415 train loss:3.587035 +step:4416 train loss:3.595626 +step:4417 train loss:3.601594 +step:4418 train loss:3.777984 +step:4419 train loss:3.632098 +step:4420 train loss:3.602368 +step:4421 train loss:3.602163 +step:4422 train loss:3.550486 +step:4423 train loss:3.593748 +step:4424 train loss:3.524488 +step:4425 train loss:3.564644 +step:4426 train loss:3.570979 +step:4427 train loss:3.563393 +step:4428 train loss:3.639454 +step:4429 train loss:3.632391 +step:4430 train loss:3.736850 +step:4431 train loss:3.555807 +step:4432 train loss:3.603998 +step:4433 train loss:3.679547 +step:4434 train loss:3.741362 +step:4435 train loss:3.667477 +step:4436 train loss:3.700852 +step:4437 train loss:3.620433 +step:4438 train loss:3.567868 +step:4439 train loss:3.619051 +step:4440 train loss:3.662195 +step:4441 train loss:3.597320 +step:4442 train loss:3.563825 +step:4443 train loss:3.612727 +step:4444 train loss:3.598851 +step:4445 train loss:3.642207 +step:4446 train loss:3.570896 +step:4447 train loss:3.580346 +step:4448 train loss:3.611522 +step:4449 train loss:3.655378 +step:4450 train loss:3.524319 +step:4451 train loss:3.578241 +step:4452 train loss:3.596176 +step:4453 train loss:3.584886 +step:4454 train loss:3.574109 +step:4455 train loss:3.608673 +step:4456 train loss:3.614324 +step:4457 train loss:3.581568 +step:4458 train loss:3.592923 +step:4459 train loss:3.604233 +step:4460 train loss:3.609926 +step:4461 train loss:3.612375 +step:4462 train loss:3.585938 +step:4463 train loss:3.629201 +step:4464 train loss:3.642911 +step:4465 train loss:3.626988 +step:4466 train loss:3.665474 +step:4467 train loss:3.561236 +step:4468 train loss:3.552269 +step:4469 train loss:3.647636 +step:4470 train loss:3.666124 +step:4471 train loss:3.598846 +step:4472 train loss:3.612825 +step:4473 train loss:3.553832 +step:4474 train loss:3.632531 +step:4475 train loss:3.620573 +step:4476 train loss:3.657881 +step:4477 train loss:3.621134 +step:4478 train loss:3.567603 +step:4479 train loss:3.633772 +step:4480 train loss:3.583214 +step:4481 train loss:3.626928 +step:4482 train loss:3.690902 +step:4483 train loss:3.580526 +step:4484 train loss:3.742013 +step:4485 train loss:3.627854 +step:4486 train loss:3.561288 +step:4487 train loss:3.622486 +step:4488 train loss:3.614670 +step:4489 train loss:3.648737 +step:4490 train loss:3.628205 +step:4491 train loss:3.602123 +step:4492 train loss:3.565585 +step:4493 train loss:3.614837 +step:4494 train loss:3.577405 +step:4495 train loss:3.597900 +step:4496 train loss:3.578094 +step:4497 train loss:3.556316 +step:4498 train loss:3.588260 +step:4499 train loss:3.635164 +step:4500 validation loss:3.560770 total_sharp:6.8409e-03 L1_sharp:8.2031e-03 L2_sharp:4.2695e-04 L3_sharp:1.1530e-03 L4_sharp:1.3331e-03 L5_sharp:8.5719e-04 L6_sharp:1.2995e-03 L7_sharp:1.5337e-03 L8_sharp:2.6044e-03 L9_sharp:1.9608e-03 L10_sharp:1.0809e-03 L11_sharp:1.0927e-03 L12_sharp:2.1922e-03 total_fnorm:2.1630e+00 total_l1_linf:1.9155e+04 total_spectral:2.1630e+00 L1_fnorm:4.6573e-01 L2_fnorm:4.8187e-01 L3_fnorm:4.7701e-01 L4_fnorm:4.8047e-01 L5_fnorm:4.7623e-01 L6_fnorm:4.8728e-01 L7_fnorm:4.9875e-01 L8_fnorm:4.9614e-01 L9_fnorm:5.0884e-01 L10_fnorm:5.2162e-01 L11_fnorm:5.2592e-01 L12_fnorm:5.0963e-01 L1_l1linf:5.4991e-01 L2_l1linf:5.6039e-01 L3_l1linf:5.9255e-01 L4_l1linf:5.6498e-01 L5_l1linf:4.9044e-01 L6_l1linf:6.0202e-01 L7_l1linf:5.7944e-01 L8_l1linf:5.4113e-01 L9_l1linf:5.4133e-01 L10_l1linf:5.3900e-01 L11_l1linf:5.5673e-01 L12_l1linf:5.4929e-01 L1_spectral:7.6019e-02 L2_spectral:8.0291e-02 L3_spectral:7.9214e-02 L4_spectral:7.9694e-02 L5_spectral:6.2609e-02 L6_spectral:7.7324e-02 L7_spectral:6.7198e-02 L8_spectral:6.4080e-02 L9_spectral:4.9287e-02 L10_spectral:4.5841e-02 L11_spectral:5.4110e-02 L12_spectral:6.8863e-02 v_norm:2.1630e+00 cos_v_-g_hvp:7.8514e-02 g_hvp_norm:3.4774e-01 cos_v_-g_t:9.2667e-02 g_t_norm:2.9615e-01 hv_norm:4.3963e-01 cos_v_hv:3.3658e-02 hg_norm:4.0253e+00 cos_g_hg:6.1227e-01 v_par:9.9804e-03 v_perp:2.1630e+00 L1_cos_v_neg_g:1.5338e-01 L1_v_norm:4.6573e-01 L2_cos_v_neg_g:7.5786e-02 L2_v_norm:4.8187e-01 L3_cos_v_neg_g:6.7420e-02 L3_v_norm:4.7701e-01 L4_cos_v_neg_g:6.4330e-02 L4_v_norm:4.8047e-01 L5_cos_v_neg_g:5.2674e-02 L5_v_norm:4.7623e-01 L6_cos_v_neg_g:6.7451e-02 L6_v_norm:4.8728e-01 L7_cos_v_neg_g:7.9403e-02 L7_v_norm:4.9875e-01 L8_cos_v_neg_g:7.4066e-02 L8_v_norm:4.9614e-01 L9_cos_v_neg_g:7.3164e-02 L9_v_norm:5.0884e-01 L10_cos_v_neg_g:8.7658e-02 L10_v_norm:5.2162e-01 L11_cos_v_neg_g:1.0861e-01 L11_v_norm:5.2592e-01 L12_cos_v_neg_g:1.3825e-01 L12_v_norm:5.0963e-01 +step:4500 train loss:3.575999 +step:4501 train loss:3.603568 +step:4502 train loss:3.618008 +step:4503 train loss:3.609939 +step:4504 train loss:3.596956 +step:4505 train loss:3.684313 +step:4506 train loss:3.611678 +step:4507 train loss:3.561558 +step:4508 train loss:3.569610 +step:4509 train loss:3.640819 +step:4510 train loss:3.540253 +step:4511 train loss:3.616059 +step:4512 train loss:3.561985 +step:4513 train loss:3.606330 +step:4514 train loss:3.564458 +step:4515 train loss:3.616735 +step:4516 train loss:3.633242 +step:4517 train loss:3.710287 +step:4518 train loss:3.622156 +step:4519 train loss:3.668891 +step:4520 train loss:3.565284 +step:4521 train loss:3.604218 +step:4522 train loss:3.550524 +step:4523 train loss:3.636677 +step:4524 train loss:3.672009 +step:4525 train loss:3.614467 +step:4526 train loss:3.569375 +step:4527 train loss:3.586514 +step:4528 train loss:3.539501 +step:4529 train loss:3.564500 +step:4530 train loss:3.567755 +step:4531 train loss:3.554482 +step:4532 train loss:3.584602 +step:4533 train loss:3.579686 +step:4534 train loss:3.541914 +step:4535 train loss:3.607433 +step:4536 train loss:3.625515 +step:4537 train loss:3.575241 +step:4538 train loss:3.601863 +step:4539 train loss:3.610065 +step:4540 train loss:3.584948 +step:4541 train loss:3.677541 +step:4542 train loss:3.603367 +step:4543 train loss:3.596322 +step:4544 train loss:3.543849 +step:4545 train loss:3.582414 +step:4546 train loss:3.623063 +step:4547 train loss:3.585286 +step:4548 train loss:3.586228 +step:4549 train loss:3.556549 +step:4550 train loss:3.581073 +step:4551 train loss:3.625410 +step:4552 train loss:3.606899 +step:4553 train loss:3.604673 +step:4554 train loss:3.614953 +step:4555 train loss:3.595632 +step:4556 train loss:3.608246 +step:4557 train loss:3.579920 +step:4558 train loss:3.601930 +step:4559 train loss:3.578044 +step:4560 train loss:3.576592 +step:4561 train loss:3.577191 +step:4562 train loss:3.600270 +step:4563 train loss:3.506864 +step:4564 train loss:3.604133 +step:4565 train loss:3.560536 +step:4566 train loss:3.585611 +step:4567 train loss:3.587675 +step:4568 train loss:3.602568 +step:4569 train loss:3.542491 +step:4570 train loss:3.636303 +step:4571 train loss:3.606388 +step:4572 train loss:3.577055 +step:4573 train loss:3.565186 +step:4574 train loss:3.578730 +step:4575 train loss:3.625422 +step:4576 train loss:3.664350 +step:4577 train loss:3.610148 +step:4578 train loss:3.650650 +step:4579 train loss:3.686701 +step:4580 train loss:3.596765 +step:4581 train loss:3.684073 +step:4582 train loss:3.609208 +step:4583 train loss:3.612058 +step:4584 train loss:3.588413 +step:4585 train loss:3.605470 +step:4586 train loss:3.578040 +step:4587 train loss:3.558448 +step:4588 train loss:3.580826 +step:4589 train loss:3.591161 +step:4590 train loss:3.545897 +step:4591 train loss:3.584884 +step:4592 train loss:3.575678 +step:4593 train loss:3.587358 +step:4594 train loss:3.610276 +step:4595 train loss:3.669390 +step:4596 train loss:3.611781 +step:4597 train loss:3.683400 +step:4598 train loss:3.596141 +step:4599 train loss:3.541746 +step:4600 train loss:3.559500 +step:4601 train loss:3.650062 +step:4602 train loss:3.638172 +step:4603 train loss:3.600700 +step:4604 train loss:3.603514 +step:4605 train loss:3.561474 +step:4606 train loss:3.554723 +step:4607 train loss:3.613341 +step:4608 train loss:3.661488 +step:4609 train loss:3.593275 +step:4610 train loss:3.570928 +step:4611 train loss:3.617713 +step:4612 train loss:3.591709 +step:4613 train loss:3.598461 +step:4614 train loss:3.580468 +step:4615 train loss:3.546543 +step:4616 train loss:3.584361 +step:4617 train loss:3.539424 +step:4618 train loss:3.635980 +step:4619 train loss:3.624058 +step:4620 train loss:3.596366 +step:4621 train loss:3.681291 +step:4622 train loss:3.587379 +step:4623 train loss:3.547674 +step:4624 train loss:3.548131 +step:4625 train loss:3.579114 +step:4626 train loss:3.653157 +step:4627 train loss:3.624720 +step:4628 train loss:3.673071 +step:4629 train loss:3.561623 +step:4630 train loss:3.566137 +step:4631 train loss:3.556709 +step:4632 train loss:3.558073 +step:4633 train loss:3.547879 +step:4634 train loss:3.555302 +step:4635 train loss:3.658062 +step:4636 train loss:3.497513 +step:4637 train loss:3.611174 +step:4638 train loss:3.649030 +step:4639 train loss:3.587024 +step:4640 train loss:3.576817 +step:4641 train loss:3.571314 +step:4642 train loss:3.625124 +step:4643 train loss:3.553490 +step:4644 train loss:3.616087 +step:4645 train loss:3.557339 +step:4646 train loss:3.545155 +step:4647 train loss:3.620068 +step:4648 train loss:3.586838 +step:4649 train loss:3.600873 +step:4650 train loss:3.558275 +step:4651 train loss:3.635092 +step:4652 train loss:3.582854 +step:4653 train loss:3.692252 +step:4654 train loss:3.651113 +step:4655 train loss:3.559657 +step:4656 train loss:3.539247 +step:4657 train loss:3.605137 +step:4658 train loss:3.591281 +step:4659 train loss:3.556666 +step:4660 train loss:3.596090 +step:4661 train loss:3.609581 +step:4662 train loss:3.567651 +step:4663 train loss:3.535377 +step:4664 train loss:3.579339 +step:4665 train loss:3.563472 +step:4666 train loss:3.563826 +step:4667 train loss:3.587705 +step:4668 train loss:3.555444 +step:4669 train loss:3.554718 +step:4670 train loss:3.572721 +step:4671 train loss:3.571941 +step:4672 train loss:3.617956 +step:4673 train loss:3.556723 +step:4674 train loss:3.560389 +step:4675 train loss:3.568603 +step:4676 train loss:3.604561 +step:4677 train loss:3.561057 +step:4678 train loss:3.597438 +step:4679 train loss:3.559926 +step:4680 train loss:3.523744 +step:4681 train loss:3.562731 +step:4682 train loss:3.629930 +step:4683 train loss:3.568421 +step:4684 train loss:3.610770 +step:4685 train loss:3.616365 +step:4686 train loss:3.608563 +step:4687 train loss:3.598095 +step:4688 train loss:3.571935 +step:4689 train loss:3.602689 +step:4690 train loss:3.585689 +step:4691 train loss:3.554134 +step:4692 train loss:3.593881 +step:4693 train loss:3.568918 +step:4694 train loss:3.624157 +step:4695 train loss:3.558721 +step:4696 train loss:3.682554 +step:4697 train loss:3.627345 +step:4698 train loss:3.606044 +step:4699 train loss:3.622425 +step:4700 train loss:3.571891 +step:4701 train loss:3.635268 +step:4702 train loss:3.634830 +step:4703 train loss:3.477350 +step:4704 train loss:3.604279 +step:4705 train loss:3.588255 +step:4706 train loss:3.575441 +step:4707 train loss:3.638482 +step:4708 train loss:3.636850 +step:4709 train loss:3.564548 +step:4710 train loss:3.608052 +step:4711 train loss:3.583024 +step:4712 train loss:3.570193 +step:4713 train loss:3.643148 +step:4714 train loss:3.653850 +step:4715 train loss:3.627772 +step:4716 train loss:3.773954 +step:4717 train loss:3.683875 +step:4718 train loss:3.650847 +step:4719 train loss:3.568582 +step:4720 train loss:3.599444 +step:4721 train loss:3.645835 +step:4722 train loss:3.645257 +step:4723 train loss:3.650295 +step:4724 train loss:3.680482 +step:4725 train loss:3.565742 +step:4726 train loss:3.668262 +step:4727 train loss:3.576931 +step:4728 train loss:3.581109 +step:4729 train loss:3.573210 +step:4730 train loss:3.569022 +step:4731 train loss:3.633793 +step:4732 train loss:3.648938 +step:4733 train loss:3.628345 +step:4734 train loss:3.566433 +step:4735 train loss:3.584331 +step:4736 train loss:3.563369 +step:4737 train loss:3.587413 +step:4738 train loss:3.581278 +step:4739 train loss:3.621360 +step:4740 train loss:3.573281 +step:4741 train loss:3.608621 +step:4742 train loss:3.566906 +step:4743 train loss:3.651093 +step:4744 train loss:3.557528 +step:4745 train loss:3.600094 +step:4746 train loss:3.571906 +step:4747 train loss:3.520176 +step:4748 train loss:3.589346 +step:4749 train loss:3.598452 +step:4750 validation loss:3.548671 +step:4750 train loss:3.624868 +step:4751 train loss:3.618565 +step:4752 train loss:3.690418 +step:4753 train loss:3.633057 +step:4754 train loss:3.622604 +step:4755 train loss:3.637537 +step:4756 train loss:3.563673 +step:4757 train loss:3.606481 +step:4758 train loss:3.615462 +step:4759 train loss:3.577607 +step:4760 train loss:3.568534 +step:4761 train loss:3.591073 +step:4762 train loss:3.602815 +step:4763 train loss:3.597295 +step:4764 train loss:3.535661 +step:4765 train loss:3.618295 +step:4766 train loss:3.581797 +step:4767 train loss:3.528668 +step:4768 train loss:3.611994 +step:4769 train loss:3.567607 +step:4770 train loss:3.595763 +step:4771 train loss:3.575590 +step:4772 train loss:3.505935 +step:4773 train loss:3.599051 +step:4774 train loss:3.605278 +step:4775 train loss:3.563673 +step:4776 train loss:3.608146 +step:4777 train loss:3.562460 +step:4778 train loss:3.595170 +step:4779 train loss:3.567881 +step:4780 train loss:3.549925 +step:4781 train loss:3.602623 +step:4782 train loss:3.585807 +step:4783 train loss:3.642200 +step:4784 train loss:3.575855 +step:4785 train loss:3.537099 +step:4786 train loss:3.606749 +step:4787 train loss:3.577319 +step:4788 train loss:3.573694 +step:4789 train loss:3.553993 +step:4790 train loss:3.618659 +step:4791 train loss:3.566846 +step:4792 train loss:3.614223 +step:4793 train loss:3.631009 +step:4794 train loss:3.618427 +step:4795 train loss:3.628860 +step:4796 train loss:3.605474 +step:4797 train loss:3.599188 +step:4798 train loss:3.635703 +step:4799 train loss:3.544844 +step:4800 train loss:3.581734 +step:4801 train loss:3.559654 +step:4802 train loss:3.588094 +step:4803 train loss:3.617743 +step:4804 train loss:3.602938 +step:4805 train loss:3.594935 +step:4806 train loss:3.591861 +step:4807 train loss:3.579275 +step:4808 train loss:3.563747 +step:4809 train loss:3.520596 +step:4810 train loss:3.600444 +step:4811 train loss:3.634342 +step:4812 train loss:3.567834 +step:4813 train loss:3.572295 +step:4814 train loss:3.613234 +step:4815 train loss:3.552658 +step:4816 train loss:3.563651 +step:4817 train loss:3.596089 +step:4818 train loss:3.622627 +step:4819 train loss:3.547106 +step:4820 train loss:3.569832 +step:4821 train loss:3.565958 +step:4822 train loss:3.589783 +step:4823 train loss:3.584566 +step:4824 train loss:3.629952 +step:4825 train loss:3.572945 +step:4826 train loss:3.578645 +step:4827 train loss:3.589566 +step:4828 train loss:3.560967 +step:4829 train loss:3.635085 +step:4830 train loss:3.528328 +step:4831 train loss:3.572498 +step:4832 train loss:3.558422 +step:4833 train loss:3.530412 +step:4834 train loss:3.581605 +step:4835 train loss:3.616714 +step:4836 train loss:3.539710 +step:4837 train loss:3.606767 +step:4838 train loss:3.598097 +step:4839 train loss:3.629868 +step:4840 train loss:3.552388 +step:4841 train loss:3.548138 +step:4842 train loss:3.552930 +step:4843 train loss:3.624351 +step:4844 train loss:3.597705 +step:4845 train loss:3.579964 +step:4846 train loss:3.649126 +step:4847 train loss:3.558925 +step:4848 train loss:3.630926 +step:4849 train loss:3.615411 +step:4850 train loss:3.605297 +step:4851 train loss:3.614824 +step:4852 train loss:3.605469 +step:4853 train loss:3.625427 +step:4854 train loss:3.585784 +step:4855 train loss:3.617130 +step:4856 train loss:3.585485 +step:4857 train loss:3.586645 +step:4858 train loss:3.512939 +step:4859 train loss:3.556593 +step:4860 train loss:3.624658 +step:4861 train loss:3.582656 +step:4862 train loss:3.610012 +step:4863 train loss:3.591307 +step:4864 train loss:3.556783 +step:4865 train loss:3.606340 +step:4866 train loss:3.581419 +step:4867 train loss:3.610254 +step:4868 train loss:3.578079 +step:4869 train loss:3.570061 +step:4870 train loss:3.729018 +step:4871 train loss:3.630142 +step:4872 train loss:3.571650 +step:4873 train loss:3.623868 +step:4874 train loss:3.611835 +step:4875 train loss:3.597433 +step:4876 train loss:3.523972 +step:4877 train loss:3.558680 +step:4878 train loss:3.512658 +step:4879 train loss:3.510025 +step:4880 train loss:3.547366 +step:4881 train loss:3.541205 +step:4882 train loss:3.569273 +step:4883 train loss:3.597263 +step:4884 train loss:3.607400 +step:4885 train loss:3.579754 +step:4886 train loss:3.575936 +step:4887 train loss:3.606215 +step:4888 train loss:3.648266 +step:4889 train loss:3.581611 +step:4890 train loss:3.547640 +step:4891 train loss:3.564589 +step:4892 train loss:3.592409 +step:4893 train loss:3.564591 +step:4894 train loss:3.549681 +step:4895 train loss:3.618026 +step:4896 train loss:3.633828 +step:4897 train loss:3.631653 +step:4898 train loss:3.551739 +step:4899 train loss:3.532682 +step:4900 train loss:3.576003 +step:4901 train loss:3.584482 +step:4902 train loss:3.570957 +step:4903 train loss:3.524688 +step:4904 train loss:3.583016 +step:4905 train loss:3.568561 +step:4906 train loss:3.635919 +step:4907 train loss:3.569964 +step:4908 train loss:3.570076 +step:4909 train loss:3.604045 +step:4910 train loss:3.591150 +step:4911 train loss:3.567500 +step:4912 train loss:3.679595 +step:4913 train loss:3.538393 +step:4914 train loss:3.631962 +step:4915 train loss:3.586205 +step:4916 train loss:3.592976 +step:4917 train loss:3.589191 +step:4918 train loss:3.581549 +step:4919 train loss:3.499663 +step:4920 train loss:3.530500 +step:4921 train loss:3.597046 +step:4922 train loss:3.573290 +step:4923 train loss:3.587131 +step:4924 train loss:3.571061 +step:4925 train loss:3.575816 +step:4926 train loss:3.536200 +step:4927 train loss:3.592239 +step:4928 train loss:3.560121 +step:4929 train loss:3.563917 +step:4930 train loss:3.563875 +step:4931 train loss:3.509631 +step:4932 train loss:3.594998 +step:4933 train loss:3.581788 +step:4934 train loss:3.529097 +step:4935 train loss:3.587231 +step:4936 train loss:3.597804 +step:4937 train loss:3.541380 +step:4938 train loss:3.621732 +step:4939 train loss:3.532712 +step:4940 train loss:3.563760 +step:4941 train loss:3.630054 +step:4942 train loss:3.564259 +step:4943 train loss:3.567117 +step:4944 train loss:3.557900 +step:4945 train loss:3.617766 +step:4946 train loss:3.523791 +step:4947 train loss:3.659443 +step:4948 train loss:3.519299 +step:4949 train loss:3.561631 +step:4950 train loss:3.613138 +step:4951 train loss:3.569993 +step:4952 train loss:3.588811 +step:4953 train loss:3.581684 +step:4954 train loss:3.521223 +step:4955 train loss:3.561346 +step:4956 train loss:3.580890 +step:4957 train loss:3.566835 +step:4958 train loss:3.595256 +step:4959 train loss:3.637877 +step:4960 train loss:3.677002 +step:4961 train loss:3.566090 +step:4962 train loss:3.596239 +step:4963 train loss:3.583015 +step:4964 train loss:3.542001 +step:4965 train loss:3.618611 +step:4966 train loss:3.546376 +step:4967 train loss:3.634927 +step:4968 train loss:3.616370 +step:4969 train loss:3.553378 +step:4970 train loss:3.590138 +step:4971 train loss:3.566974 +step:4972 train loss:3.610939 +step:4973 train loss:3.806811 +step:4974 train loss:3.569487 +step:4975 train loss:3.662142 +step:4976 train loss:3.622390 +step:4977 train loss:3.659712 +step:4978 train loss:3.562425 +step:4979 train loss:3.557813 +step:4980 train loss:3.578964 +step:4981 train loss:3.529915 +step:4982 train loss:3.601484 +step:4983 train loss:3.563274 +step:4984 train loss:3.576711 +step:4985 train loss:3.579398 +step:4986 train loss:3.514161 +step:4987 train loss:3.628189 +step:4988 train loss:3.539139 +step:4989 train loss:3.616353 +step:4990 train loss:3.566916 +step:4991 train loss:3.541767 +step:4992 train loss:3.554619 +step:4993 train loss:3.619545 +step:4994 train loss:3.597838 +step:4995 train loss:3.599888 +step:4996 train loss:3.629174 +step:4997 train loss:3.630439 +step:4998 train loss:3.594381 +step:4999 train loss:3.553103 +step:5000 validation loss:3.541049 total_sharp:7.5543e-03 L1_sharp:6.8758e-03 L2_sharp:6.3050e-04 L3_sharp:1.7717e-03 L4_sharp:1.5939e-03 L5_sharp:1.3074e-03 L6_sharp:1.8503e-03 L7_sharp:1.8792e-03 L8_sharp:2.9743e-03 L9_sharp:2.3617e-03 L10_sharp:1.1398e-03 L11_sharp:9.7481e-04 L12_sharp:1.9957e-03 total_fnorm:2.2011e+00 total_l1_linf:1.9532e+04 total_spectral:2.2011e+00 L1_fnorm:4.7940e-01 L2_fnorm:4.9557e-01 L3_fnorm:4.8869e-01 L4_fnorm:4.9325e-01 L5_fnorm:4.9040e-01 L6_fnorm:5.0021e-01 L7_fnorm:5.0723e-01 L8_fnorm:5.0389e-01 L9_fnorm:5.1874e-01 L10_fnorm:5.3462e-01 L11_fnorm:5.3850e-01 L12_fnorm:5.2603e-01 L1_l1linf:5.7816e-01 L2_l1linf:5.8405e-01 L3_l1linf:6.2232e-01 L4_l1linf:7.1094e-01 L5_l1linf:5.9458e-01 L6_l1linf:6.5402e-01 L7_l1linf:5.6362e-01 L8_l1linf:5.4268e-01 L9_l1linf:5.2461e-01 L10_l1linf:5.3840e-01 L11_l1linf:5.4463e-01 L12_l1linf:5.5182e-01 L1_spectral:8.0277e-02 L2_spectral:7.7152e-02 L3_spectral:8.1077e-02 L4_spectral:8.4575e-02 L5_spectral:6.8426e-02 L6_spectral:7.9731e-02 L7_spectral:7.2776e-02 L8_spectral:6.4271e-02 L9_spectral:5.2270e-02 L10_spectral:4.5172e-02 L11_spectral:5.0620e-02 L12_spectral:7.4549e-02 v_norm:2.2011e+00 cos_v_-g_hvp:7.1783e-02 g_hvp_norm:3.9308e-01 cos_v_-g_t:8.2159e-02 g_t_norm:3.4455e-01 hv_norm:5.0071e-01 cos_v_hv:3.3209e-02 hg_norm:5.1384e+00 cos_g_hg:7.2972e-01 v_par:7.5120e-03 v_perp:2.2011e+00 L1_cos_v_neg_g:1.4803e-01 L1_v_norm:4.7940e-01 L2_cos_v_neg_g:6.4900e-02 L2_v_norm:4.9557e-01 L3_cos_v_neg_g:5.5031e-02 L3_v_norm:4.8869e-01 L4_cos_v_neg_g:5.9867e-02 L4_v_norm:4.9325e-01 L5_cos_v_neg_g:5.1643e-02 L5_v_norm:4.9040e-01 L6_cos_v_neg_g:6.7475e-02 L6_v_norm:5.0021e-01 L7_cos_v_neg_g:7.6325e-02 L7_v_norm:5.0723e-01 L8_cos_v_neg_g:6.7982e-02 L8_v_norm:5.0389e-01 L9_cos_v_neg_g:6.4809e-02 L9_v_norm:5.1874e-01 L10_cos_v_neg_g:7.9202e-02 L10_v_norm:5.3462e-01 L11_cos_v_neg_g:1.0182e-01 L11_v_norm:5.3850e-01 L12_cos_v_neg_g:1.4679e-01 L12_v_norm:5.2603e-01 +step:5000 train loss:3.525006 +step:5001 train loss:3.559119 +step:5002 train loss:3.583848 +step:5003 train loss:3.561766 +step:5004 train loss:3.566177 +step:5005 train loss:3.497686 +step:5006 train loss:3.591925 +step:5007 train loss:3.555417 +step:5008 train loss:3.620499 +step:5009 train loss:3.595286 +step:5010 train loss:3.521690 +step:5011 train loss:3.570443 +step:5012 train loss:3.551839 +step:5013 train loss:3.771402 +step:5014 train loss:3.589003 +step:5015 train loss:3.611563 +step:5016 train loss:3.584371 +step:5017 train loss:3.519216 +step:5018 train loss:3.613783 +step:5019 train loss:3.571774 +step:5020 train loss:3.568608 +step:5021 train loss:3.561763 +step:5022 train loss:3.549098 +step:5023 train loss:3.610991 +step:5024 train loss:3.628661 +step:5025 train loss:3.552108 +step:5026 train loss:3.538179 +step:5027 train loss:3.563285 +step:5028 train loss:3.570259 +step:5029 train loss:3.605209 +step:5030 train loss:3.527199 +step:5031 train loss:3.554126 +step:5032 train loss:3.560011 +step:5033 train loss:3.661119 +step:5034 train loss:3.578881 +step:5035 train loss:3.588500 +step:5036 train loss:3.580083 +step:5037 train loss:3.600534 +step:5038 train loss:3.587132 +step:5039 train loss:3.596014 +step:5040 train loss:3.544138 +step:5041 train loss:3.601098 +step:5042 train loss:3.554162 +step:5043 train loss:3.558143 +step:5044 train loss:3.635387 +step:5045 train loss:3.567345 +step:5046 train loss:3.565077 +step:5047 train loss:3.580409 +step:5048 train loss:3.553258 +step:5049 train loss:3.596624 +step:5050 train loss:3.563834 +step:5051 train loss:3.618164 +step:5052 train loss:3.555935 +step:5053 train loss:3.525618 +step:5054 train loss:3.511413 +step:5055 train loss:3.530721 +step:5056 train loss:3.615879 +step:5057 train loss:3.583709 +step:5058 train loss:3.552362 +step:5059 train loss:3.585658 +step:5060 train loss:3.539641 +step:5061 train loss:3.655253 +step:5062 train loss:3.538003 +step:5063 train loss:3.520178 +step:5064 train loss:3.624156 +step:5065 train loss:3.589767 +step:5066 train loss:3.610363 +step:5067 train loss:3.596840 +step:5068 train loss:3.585816 +step:5069 train loss:3.602796 +step:5070 train loss:3.533927 +step:5071 train loss:3.560492 +step:5072 train loss:3.643790 +step:5073 train loss:3.565401 +step:5074 train loss:3.627428 +step:5075 train loss:3.638944 +step:5076 train loss:3.625955 +step:5077 train loss:3.561550 +step:5078 train loss:3.556897 +step:5079 train loss:3.541069 +step:5080 train loss:3.565066 +step:5081 train loss:3.581247 +step:5082 train loss:3.552377 +step:5083 train loss:3.564729 +step:5084 train loss:3.549028 +step:5085 train loss:3.574366 +step:5086 train loss:3.561016 +step:5087 train loss:3.550192 +step:5088 train loss:3.529369 +step:5089 train loss:3.592271 +step:5090 train loss:3.574453 +step:5091 train loss:3.554781 +step:5092 train loss:3.572353 +step:5093 train loss:3.554771 +step:5094 train loss:3.557528 +step:5095 train loss:3.622882 +step:5096 train loss:3.746912 +step:5097 train loss:3.541994 +step:5098 train loss:3.590357 +step:5099 train loss:3.564219 +step:5100 train loss:3.553166 +step:5101 train loss:3.602069 +step:5102 train loss:3.560619 +step:5103 train loss:3.566822 +step:5104 train loss:3.603157 +step:5105 train loss:3.505424 +step:5106 train loss:3.591598 +step:5107 train loss:3.564767 +step:5108 train loss:3.521553 +step:5109 train loss:3.517347 +step:5110 train loss:3.566737 +step:5111 train loss:3.549700 +step:5112 train loss:3.519330 +step:5113 train loss:3.559550 +step:5114 train loss:3.550003 +step:5115 train loss:3.521607 +step:5116 train loss:3.541995 +step:5117 train loss:3.608844 +step:5118 train loss:3.486521 +step:5119 train loss:3.606364 +step:5120 train loss:3.520208 +step:5121 train loss:3.550786 +step:5122 train loss:3.602921 +step:5123 train loss:3.522069 +step:5124 train loss:3.567552 +step:5125 train loss:3.576329 +step:5126 train loss:3.514290 +step:5127 train loss:3.562609 +step:5128 train loss:3.551120 +step:5129 train loss:3.512480 +step:5130 train loss:3.570078 +step:5131 train loss:3.608730 +step:5132 train loss:3.614387 +step:5133 train loss:3.600939 +step:5134 train loss:3.631786 +step:5135 train loss:3.578215 +step:5136 train loss:3.528602 +step:5137 train loss:3.554792 +step:5138 train loss:3.563131 +step:5139 train loss:3.581224 +step:5140 train loss:3.618510 +step:5141 train loss:3.588544 +step:5142 train loss:3.653911 +step:5143 train loss:3.678454 +step:5144 train loss:3.650761 +step:5145 train loss:3.545244 +step:5146 train loss:3.620717 +step:5147 train loss:3.661512 +step:5148 train loss:3.578936 +step:5149 train loss:3.575742 +step:5150 train loss:3.558644 +step:5151 train loss:3.559663 +step:5152 train loss:3.557220 +step:5153 train loss:3.547819 +step:5154 train loss:3.534431 +step:5155 train loss:3.531970 +step:5156 train loss:3.561888 +step:5157 train loss:3.548008 +step:5158 train loss:3.587529 +step:5159 train loss:3.614980 +step:5160 train loss:3.583999 +step:5161 train loss:3.589283 +step:5162 train loss:3.504813 +step:5163 train loss:3.566847 +step:5164 train loss:3.576012 +step:5165 train loss:3.578189 +step:5166 train loss:3.536827 +step:5167 train loss:3.530566 +step:5168 train loss:3.595890 +step:5169 train loss:3.576308 +step:5170 train loss:3.606672 +step:5171 train loss:3.566815 +step:5172 train loss:3.556466 +step:5173 train loss:3.547335 +step:5174 train loss:3.618808 +step:5175 train loss:3.568043 +step:5176 train loss:3.605529 +step:5177 train loss:3.649511 +step:5178 train loss:3.775394 +step:5179 train loss:3.595931 +step:5180 train loss:3.553432 +step:5181 train loss:3.567350 +step:5182 train loss:3.585957 +step:5183 train loss:3.567123 +step:5184 train loss:3.534061 +step:5185 train loss:3.526246 +step:5186 train loss:3.569889 +step:5187 train loss:3.595459 +step:5188 train loss:3.571475 +step:5189 train loss:3.555947 +step:5190 train loss:3.494883 +step:5191 train loss:3.615090 +step:5192 train loss:3.520648 +step:5193 train loss:3.538856 +step:5194 train loss:3.629155 +step:5195 train loss:3.546879 +step:5196 train loss:3.576454 +step:5197 train loss:3.467530 +step:5198 train loss:3.539919 +step:5199 train loss:3.543642 +step:5200 train loss:3.542144 +step:5201 train loss:3.518285 +step:5202 train loss:3.563571 +step:5203 train loss:3.551036 +step:5204 train loss:3.557532 +step:5205 train loss:3.551265 +step:5206 train loss:3.587829 +step:5207 train loss:3.543774 +step:5208 train loss:3.607137 +step:5209 train loss:3.512842 +step:5210 train loss:3.546226 +step:5211 train loss:3.556245 +step:5212 train loss:3.578272 +step:5213 train loss:3.538549 +step:5214 train loss:3.591592 +step:5215 train loss:3.567053 +step:5216 train loss:3.531714 +step:5217 train loss:3.573805 +step:5218 train loss:3.592945 +step:5219 train loss:3.504058 +step:5220 train loss:3.540914 +step:5221 train loss:3.573143 +step:5222 train loss:3.643004 +step:5223 train loss:3.596799 +step:5224 train loss:3.583026 +step:5225 train loss:3.525361 +step:5226 train loss:3.518764 +step:5227 train loss:3.512364 +step:5228 train loss:3.593019 +step:5229 train loss:3.576953 +step:5230 train loss:3.562728 +step:5231 train loss:3.488449 +step:5232 train loss:3.550872 +step:5233 train loss:3.521849 +step:5234 train loss:3.573346 +step:5235 train loss:3.567256 +step:5236 train loss:3.592167 +step:5237 train loss:3.489367 +step:5238 train loss:3.549291 +step:5239 train loss:3.484652 +step:5240 train loss:3.571183 +step:5241 train loss:3.533118 +step:5242 train loss:3.553087 +step:5243 train loss:3.529767 +step:5244 train loss:3.551808 +step:5245 train loss:3.552772 +step:5246 train loss:3.675476 +step:5247 train loss:3.518186 +step:5248 train loss:3.663544 +step:5249 train loss:3.518392 +step:5250 validation loss:3.534412 +step:5250 train loss:3.584808 +step:5251 train loss:3.539048 +step:5252 train loss:3.564351 +step:5253 train loss:3.494098 +step:5254 train loss:3.567073 +step:5255 train loss:3.510858 +step:5256 train loss:3.573351 +step:5257 train loss:3.556153 +step:5258 train loss:3.585752 +step:5259 train loss:3.531530 +step:5260 train loss:3.532676 +step:5261 train loss:3.543133 +step:5262 train loss:3.541449 +step:5263 train loss:3.541248 +step:5264 train loss:3.599248 +step:5265 train loss:3.493639 +step:5266 train loss:3.575170 +step:5267 train loss:3.535706 +step:5268 train loss:3.516329 +step:5269 train loss:3.581418 +step:5270 train loss:3.569174 +step:5271 train loss:3.507233 +step:5272 train loss:3.628261 +step:5273 train loss:3.516129 +step:5274 train loss:3.529729 +step:5275 train loss:3.536675 +step:5276 train loss:3.577048 +step:5277 train loss:3.530482 +step:5278 train loss:3.551853 +step:5279 train loss:3.507566 +step:5280 train loss:3.559254 +step:5281 train loss:3.519734 +step:5282 train loss:3.550583 +step:5283 train loss:3.558338 +step:5284 train loss:3.551739 +step:5285 train loss:3.520671 +step:5286 train loss:3.517650 +step:5287 train loss:3.595338 +step:5288 train loss:3.545920 +step:5289 train loss:3.556975 +step:5290 train loss:3.551703 +step:5291 train loss:3.554401 +step:5292 train loss:3.549580 +step:5293 train loss:3.550259 +step:5294 train loss:3.495304 +step:5295 train loss:3.619365 +step:5296 train loss:3.620831 +step:5297 train loss:3.576720 +step:5298 train loss:3.533846 +step:5299 train loss:3.520283 +step:5300 train loss:3.470328 +step:5301 train loss:3.539328 +step:5302 train loss:3.508965 +step:5303 train loss:3.550819 +step:5304 train loss:3.581084 +step:5305 train loss:3.571661 +step:5306 train loss:3.557739 +step:5307 train loss:3.585749 +step:5308 train loss:3.540945 +step:5309 train loss:3.514778 +step:5310 train loss:3.555451 +step:5311 train loss:3.528648 +step:5312 train loss:3.556890 +step:5313 train loss:3.556127 +step:5314 train loss:3.558069 +step:5315 train loss:3.532846 +step:5316 train loss:3.556716 +step:5317 train loss:3.570777 +step:5318 train loss:3.578449 +step:5319 train loss:3.630429 +step:5320 train loss:3.549856 +step:5321 train loss:3.546960 +step:5322 train loss:3.506724 +step:5323 train loss:3.583357 +step:5324 train loss:3.525318 +step:5325 train loss:3.578296 +step:5326 train loss:3.504270 +step:5327 train loss:3.530873 +step:5328 train loss:3.510771 +step:5329 train loss:3.613800 +step:5330 train loss:3.520921 +step:5331 train loss:3.681865 +step:5332 train loss:3.531159 +step:5333 train loss:3.570958 +step:5334 train loss:3.543312 +step:5335 train loss:3.518567 +step:5336 train loss:3.712589 +step:5337 train loss:3.547551 +step:5338 train loss:3.576544 +step:5339 train loss:3.511712 +step:5340 train loss:3.553116 +step:5341 train loss:3.552119 +step:5342 train loss:3.586889 +step:5343 train loss:3.528203 +step:5344 train loss:3.536156 +step:5345 train loss:3.598071 +step:5346 train loss:3.600526 +step:5347 train loss:3.553118 +step:5348 train loss:3.571519 +step:5349 train loss:3.535390 +step:5350 train loss:3.551420 +step:5351 train loss:3.543223 +step:5352 train loss:3.564112 +step:5353 train loss:3.529010 +step:5354 train loss:3.587477 +step:5355 train loss:3.589142 +step:5356 train loss:3.544890 +step:5357 train loss:3.497345 +step:5358 train loss:3.615592 +step:5359 train loss:3.540619 +step:5360 train loss:3.558623 +step:5361 train loss:3.581355 +step:5362 train loss:3.570458 +step:5363 train loss:3.566808 +step:5364 train loss:3.605403 +step:5365 train loss:3.593940 +step:5366 train loss:3.584150 +step:5367 train loss:3.606380 +step:5368 train loss:3.607283 +step:5369 train loss:3.560079 +step:5370 train loss:3.549390 +step:5371 train loss:3.603061 +step:5372 train loss:3.542192 +step:5373 train loss:3.506571 +step:5374 train loss:3.582456 +step:5375 train loss:3.583510 +step:5376 train loss:3.581467 +step:5377 train loss:3.588075 +step:5378 train loss:3.542200 +step:5379 train loss:3.549304 +step:5380 train loss:3.596287 +step:5381 train loss:3.549675 +step:5382 train loss:3.575155 +step:5383 train loss:3.595664 +step:5384 train loss:3.724370 +step:5385 train loss:3.571146 +step:5386 train loss:3.692093 +step:5387 train loss:3.539444 +step:5388 train loss:3.567652 +step:5389 train loss:3.568070 +step:5390 train loss:3.573203 +step:5391 train loss:3.538393 +step:5392 train loss:3.543025 +step:5393 train loss:3.512615 +step:5394 train loss:3.537155 +step:5395 train loss:3.582500 +step:5396 train loss:3.553255 +step:5397 train loss:3.519531 +step:5398 train loss:3.555095 +step:5399 train loss:3.531761 +step:5400 train loss:3.561590 +step:5401 train loss:3.552666 +step:5402 train loss:3.682743 +step:5403 train loss:3.546832 +step:5404 train loss:3.562597 +step:5405 train loss:3.555535 +step:5406 train loss:3.529432 +step:5407 train loss:3.602009 +step:5408 train loss:3.579782 +step:5409 train loss:3.761610 +step:5410 train loss:3.572986 +step:5411 train loss:3.553627 +step:5412 train loss:3.554409 +step:5413 train loss:3.577396 +step:5414 train loss:3.558322 +step:5415 train loss:3.533887 +step:5416 train loss:3.559160 +step:5417 train loss:3.520259 +step:5418 train loss:3.579063 +step:5419 train loss:3.596834 +step:5420 train loss:3.546046 +step:5421 train loss:3.583562 +step:5422 train loss:3.530609 +step:5423 train loss:3.565937 +step:5424 train loss:3.528628 +step:5425 train loss:3.581705 +step:5426 train loss:3.555081 +step:5427 train loss:3.535833 +step:5428 train loss:3.533305 +step:5429 train loss:3.531022 +step:5430 train loss:3.516652 +step:5431 train loss:3.607470 +step:5432 train loss:3.544831 +step:5433 train loss:3.549082 +step:5434 train loss:3.575127 +step:5435 train loss:3.750715 +step:5436 train loss:3.543915 +step:5437 train loss:3.561207 +step:5438 train loss:3.523645 +step:5439 train loss:3.534483 +step:5440 train loss:3.628844 +step:5441 train loss:3.685469 +step:5442 train loss:3.530973 +step:5443 train loss:3.580993 +step:5444 train loss:3.558084 +step:5445 train loss:3.547596 +step:5446 train loss:3.523107 +step:5447 train loss:3.568785 +step:5448 train loss:3.604062 +step:5449 train loss:3.556808 +step:5450 train loss:3.579537 +step:5451 train loss:3.549154 +step:5452 train loss:3.581027 +step:5453 train loss:3.552799 +step:5454 train loss:3.545213 +step:5455 train loss:3.532293 +step:5456 train loss:3.558352 +step:5457 train loss:3.554837 +step:5458 train loss:3.544557 +step:5459 train loss:3.572699 +step:5460 train loss:3.575351 +step:5461 train loss:3.489352 +step:5462 train loss:3.531043 +step:5463 train loss:3.577122 +step:5464 train loss:3.591140 +step:5465 train loss:3.531370 +step:5466 train loss:3.539713 +step:5467 train loss:3.559717 +step:5468 train loss:3.516880 +step:5469 train loss:3.519146 +step:5470 train loss:3.551605 +step:5471 train loss:3.572049 +step:5472 train loss:3.525989 +step:5473 train loss:3.558679 +step:5474 train loss:3.773384 +step:5475 train loss:3.585500 +step:5476 train loss:3.580690 +step:5477 train loss:3.679064 +step:5478 train loss:3.560462 +step:5479 train loss:3.589586 +step:5480 train loss:3.576221 +step:5481 train loss:3.605240 +step:5482 train loss:3.586728 +step:5483 train loss:3.629388 +step:5484 train loss:3.551613 +step:5485 train loss:3.551504 +step:5486 train loss:3.600575 +step:5487 train loss:3.592814 +step:5488 train loss:3.616601 +step:5489 train loss:3.564738 +step:5490 train loss:3.527321 +step:5491 train loss:3.562020 +step:5492 train loss:3.534572 +step:5493 train loss:3.525366 +step:5494 train loss:3.584147 +step:5495 train loss:3.571835 +step:5496 train loss:3.551929 +step:5497 train loss:3.559672 +step:5498 train loss:3.551021 +step:5499 train loss:3.597308 +step:5500 validation loss:3.525550 total_sharp:9.9571e-03 L1_sharp:1.0958e-02 L2_sharp:1.6665e-03 L3_sharp:6.8757e-03 L4_sharp:2.4638e-03 L5_sharp:1.3681e-03 L6_sharp:1.4802e-03 L7_sharp:1.5920e-03 L8_sharp:2.9609e-03 L9_sharp:2.1696e-03 L10_sharp:1.1240e-03 L11_sharp:1.0521e-03 L12_sharp:2.1871e-03 total_fnorm:2.2146e+00 total_l1_linf:1.9696e+04 total_spectral:2.2146e+00 L1_fnorm:4.9367e-01 L2_fnorm:5.0246e-01 L3_fnorm:5.0161e-01 L4_fnorm:5.0274e-01 L5_fnorm:5.0531e-01 L6_fnorm:5.0243e-01 L7_fnorm:5.0964e-01 L8_fnorm:5.0551e-01 L9_fnorm:5.1744e-01 L10_fnorm:5.3263e-01 L11_fnorm:5.3781e-01 L12_fnorm:5.2283e-01 L1_l1linf:6.3292e-01 L2_l1linf:5.8749e-01 L3_l1linf:6.4994e-01 L4_l1linf:6.5624e-01 L5_l1linf:5.6933e-01 L6_l1linf:6.2264e-01 L7_l1linf:6.2549e-01 L8_l1linf:5.5713e-01 L9_l1linf:5.4596e-01 L10_l1linf:5.4279e-01 L11_l1linf:5.5698e-01 L12_l1linf:5.7290e-01 L1_spectral:8.3857e-02 L2_spectral:8.0512e-02 L3_spectral:9.4440e-02 L4_spectral:9.7044e-02 L5_spectral:7.6652e-02 L6_spectral:8.4368e-02 L7_spectral:7.8218e-02 L8_spectral:7.5120e-02 L9_spectral:6.0610e-02 L10_spectral:4.6585e-02 L11_spectral:5.2897e-02 L12_spectral:6.7289e-02 v_norm:2.2146e+00 cos_v_-g_hvp:6.9593e-02 g_hvp_norm:4.2105e-01 cos_v_-g_t:7.9057e-02 g_t_norm:3.7303e-01 hv_norm:7.0990e-01 cos_v_hv:3.1062e-02 hg_norm:9.4472e+00 cos_g_hg:4.2200e-01 v_par:6.8824e-03 v_perp:2.2146e+00 L1_cos_v_neg_g:1.5908e-01 L1_v_norm:4.9367e-01 L2_cos_v_neg_g:4.7044e-02 L2_v_norm:5.0246e-01 L3_cos_v_neg_g:5.6248e-02 L3_v_norm:5.0161e-01 L4_cos_v_neg_g:5.6436e-02 L4_v_norm:5.0274e-01 L5_cos_v_neg_g:5.3447e-02 L5_v_norm:5.0531e-01 L6_cos_v_neg_g:6.3628e-02 L6_v_norm:5.0243e-01 L7_cos_v_neg_g:7.3853e-02 L7_v_norm:5.0964e-01 L8_cos_v_neg_g:6.8333e-02 L8_v_norm:5.0551e-01 L9_cos_v_neg_g:6.7109e-02 L9_v_norm:5.1744e-01 L10_cos_v_neg_g:7.6217e-02 L10_v_norm:5.3263e-01 L11_cos_v_neg_g:1.0197e-01 L11_v_norm:5.3781e-01 L12_cos_v_neg_g:1.3264e-01 L12_v_norm:5.2283e-01 +step:5500 train loss:3.526157 +step:5501 train loss:3.553909 +step:5502 train loss:3.531338 +step:5503 train loss:3.523402 +step:5504 train loss:3.601077 +step:5505 train loss:3.589960 +step:5506 train loss:3.571830 +step:5507 train loss:3.547672 +step:5508 train loss:3.598109 +step:5509 train loss:3.512427 +step:5510 train loss:3.627881 +step:5511 train loss:3.652025 +step:5512 train loss:3.569264 +step:5513 train loss:3.570815 +step:5514 train loss:3.590853 +step:5515 train loss:3.542910 +step:5516 train loss:3.475767 +step:5517 train loss:3.556516 +step:5518 train loss:3.538315 +step:5519 train loss:3.543530 +step:5520 train loss:3.574074 +step:5521 train loss:3.515127 +step:5522 train loss:3.544307 +step:5523 train loss:3.564650 +step:5524 train loss:3.547570 +step:5525 train loss:3.488934 +step:5526 train loss:3.584926 +step:5527 train loss:3.588157 +step:5528 train loss:3.622973 +step:5529 train loss:3.503377 +step:5530 train loss:3.593164 +step:5531 train loss:3.564125 +step:5532 train loss:3.555356 +step:5533 train loss:3.548866 +step:5534 train loss:3.601181 +step:5535 train loss:3.595817 +step:5536 train loss:3.714708 +step:5537 train loss:3.532860 +step:5538 train loss:3.585237 +step:5539 train loss:3.559722 +step:5540 train loss:3.543171 +step:5541 train loss:3.573995 +step:5542 train loss:3.555070 +step:5543 train loss:3.535100 +step:5544 train loss:3.572267 +step:5545 train loss:3.603945 +step:5546 train loss:3.526614 +step:5547 train loss:3.617092 +step:5548 train loss:3.578025 +step:5549 train loss:3.615337 +step:5550 train loss:3.627415 +step:5551 train loss:3.573843 +step:5552 train loss:3.531024 +step:5553 train loss:3.583109 +step:5554 train loss:3.627903 +step:5555 train loss:3.581673 +step:5556 train loss:3.561565 +step:5557 train loss:3.579293 +step:5558 train loss:3.575837 +step:5559 train loss:3.559252 +step:5560 train loss:3.536470 +step:5561 train loss:3.564892 +step:5562 train loss:3.531064 +step:5563 train loss:3.573152 +step:5564 train loss:3.587204 +step:5565 train loss:3.590748 +step:5566 train loss:3.568462 +step:5567 train loss:3.576166 +step:5568 train loss:3.505193 +step:5569 train loss:3.533056 +step:5570 train loss:3.587639 +step:5571 train loss:3.550208 +step:5572 train loss:3.562935 +step:5573 train loss:3.595661 +step:5574 train loss:3.569053 +step:5575 train loss:3.562439 +step:5576 train loss:3.595564 +step:5577 train loss:3.527441 +step:5578 train loss:3.564949 +step:5579 train loss:3.573428 +step:5580 train loss:3.612617 +step:5581 train loss:3.521805 +step:5582 train loss:3.588824 +step:5583 train loss:3.526370 +step:5584 train loss:3.624272 +step:5585 train loss:3.711332 +step:5586 train loss:3.518297 +step:5587 train loss:3.548376 +step:5588 train loss:3.543047 +step:5589 train loss:3.536222 +step:5590 train loss:3.577519 +step:5591 train loss:3.552935 +step:5592 train loss:3.542901 +step:5593 train loss:3.530352 +step:5594 train loss:3.582704 +step:5595 train loss:3.554790 +step:5596 train loss:3.552177 +step:5597 train loss:3.515637 +step:5598 train loss:3.595015 +step:5599 train loss:3.609810 +step:5600 train loss:3.509944 +step:5601 train loss:3.513955 +step:5602 train loss:3.577482 +step:5603 train loss:3.574710 +step:5604 train loss:3.520321 +step:5605 train loss:3.669505 +step:5606 train loss:3.508235 +step:5607 train loss:3.549973 +step:5608 train loss:3.527670 +step:5609 train loss:3.587818 +step:5610 train loss:3.577213 +step:5611 train loss:3.578269 +step:5612 train loss:3.564888 +step:5613 train loss:3.516346 +step:5614 train loss:3.544770 +step:5615 train loss:3.637086 +step:5616 train loss:3.560610 +step:5617 train loss:3.595779 +step:5618 train loss:3.542623 +step:5619 train loss:3.505608 +step:5620 train loss:3.510109 +step:5621 train loss:3.567324 +step:5622 train loss:3.551172 +step:5623 train loss:3.501728 +step:5624 train loss:3.501829 +step:5625 train loss:3.506538 +step:5626 train loss:3.490497 +step:5627 train loss:3.561278 +step:5628 train loss:3.558533 +step:5629 train loss:3.539382 +step:5630 train loss:3.584750 +step:5631 train loss:3.550738 +step:5632 train loss:3.602607 +step:5633 train loss:3.572423 +step:5634 train loss:3.592095 +step:5635 train loss:3.554143 +step:5636 train loss:3.529780 +step:5637 train loss:3.529540 +step:5638 train loss:3.548645 +step:5639 train loss:3.458139 +step:5640 train loss:3.560387 +step:5641 train loss:3.515735 +step:5642 train loss:3.526622 +step:5643 train loss:3.566183 +step:5644 train loss:3.595622 +step:5645 train loss:3.565078 +step:5646 train loss:3.564944 +step:5647 train loss:3.563340 +step:5648 train loss:3.575664 +step:5649 train loss:3.569602 +step:5650 train loss:3.549639 +step:5651 train loss:3.532821 +step:5652 train loss:3.567094 +step:5653 train loss:3.573122 +step:5654 train loss:3.628934 +step:5655 train loss:3.531923 +step:5656 train loss:3.521634 +step:5657 train loss:3.605709 +step:5658 train loss:3.552925 +step:5659 train loss:3.606520 +step:5660 train loss:3.615633 +step:5661 train loss:3.539044 +step:5662 train loss:3.522341 +step:5663 train loss:3.566184 +step:5664 train loss:3.547720 +step:5665 train loss:3.564431 +step:5666 train loss:3.563490 +step:5667 train loss:3.675655 +step:5668 train loss:3.569715 +step:5669 train loss:3.571739 +step:5670 train loss:3.595257 +step:5671 train loss:3.600971 +step:5672 train loss:3.606688 +step:5673 train loss:3.576295 +step:5674 train loss:3.536304 +step:5675 train loss:3.568355 +step:5676 train loss:3.546389 +step:5677 train loss:3.561611 +step:5678 train loss:3.570802 +step:5679 train loss:3.654997 +step:5680 train loss:3.551985 +step:5681 train loss:3.576885 +step:5682 train loss:3.560946 +step:5683 train loss:3.568277 +step:5684 train loss:3.555475 +step:5685 train loss:3.574288 +step:5686 train loss:3.562322 +step:5687 train loss:3.517978 +step:5688 train loss:3.501412 +step:5689 train loss:3.492583 +step:5690 train loss:3.519650 +step:5691 train loss:3.598391 +step:5692 train loss:3.528415 +step:5693 train loss:3.483716 +step:5694 train loss:3.537127 +step:5695 train loss:3.682073 +step:5696 train loss:3.647031 +step:5697 train loss:3.537908 +step:5698 train loss:3.515637 +step:5699 train loss:3.591915 +step:5700 train loss:3.647637 +step:5701 train loss:3.557471 +step:5702 train loss:3.611520 +step:5703 train loss:3.661534 +step:5704 train loss:3.569798 +step:5705 train loss:3.602175 +step:5706 train loss:3.597270 +step:5707 train loss:3.536444 +step:5708 train loss:3.519167 +step:5709 train loss:3.544168 +step:5710 train loss:3.502964 +step:5711 train loss:3.560181 +step:5712 train loss:3.518082 +step:5713 train loss:3.539114 +step:5714 train loss:3.576003 +step:5715 train loss:3.613436 +step:5716 train loss:3.544046 +step:5717 train loss:3.621522 +step:5718 train loss:3.615332 +step:5719 train loss:3.552114 +step:5720 train loss:3.553533 +step:5721 train loss:3.523037 +step:5722 train loss:3.575274 +step:5723 train loss:3.521684 +step:5724 train loss:3.495177 +step:5725 train loss:3.550090 +step:5726 train loss:3.509444 +step:5727 train loss:3.626158 +step:5728 train loss:3.497448 +step:5729 train loss:3.563147 +step:5730 train loss:3.543606 +step:5731 train loss:3.444689 +step:5732 train loss:3.520894 +step:5733 train loss:3.501160 +step:5734 train loss:3.559679 +step:5735 train loss:3.459892 +step:5736 train loss:3.522540 +step:5737 train loss:3.513657 +step:5738 train loss:3.533121 +step:5739 train loss:3.570240 +step:5740 train loss:3.501238 +step:5741 train loss:3.508661 +step:5742 train loss:3.551954 +step:5743 train loss:3.508473 +step:5744 train loss:3.574354 +step:5745 train loss:3.552672 +step:5746 train loss:3.563619 +step:5747 train loss:3.631497 +step:5748 train loss:3.527915 +step:5749 train loss:3.531686 +step:5750 validation loss:3.510371 +step:5750 train loss:3.556264 +step:5751 train loss:3.539740 +step:5752 train loss:3.579032 +step:5753 train loss:3.570133 +step:5754 train loss:3.542808 +step:5755 train loss:3.523101 +step:5756 train loss:3.535263 +step:5757 train loss:3.589494 +step:5758 train loss:3.559676 +step:5759 train loss:3.579604 +step:5760 train loss:3.595566 +step:5761 train loss:3.584126 +step:5762 train loss:3.513858 +step:5763 train loss:3.479325 +step:5764 train loss:3.574092 +step:5765 train loss:3.539221 +step:5766 train loss:3.521094 +step:5767 train loss:3.541978 +step:5768 train loss:3.527565 +step:5769 train loss:3.558314 +step:5770 train loss:3.605026 +step:5771 train loss:3.559522 +step:5772 train loss:3.542619 +step:5773 train loss:3.473112 +step:5774 train loss:3.513078 +step:5775 train loss:3.594980 +step:5776 train loss:3.600404 +step:5777 train loss:3.511579 +step:5778 train loss:3.526394 +step:5779 train loss:3.517041 +step:5780 train loss:3.554153 +step:5781 train loss:3.523178 +step:5782 train loss:3.578928 +step:5783 train loss:3.572285 +step:5784 train loss:3.556841 +step:5785 train loss:3.573518 +step:5786 train loss:3.538462 +step:5787 train loss:3.528047 +step:5788 train loss:3.513854 +step:5789 train loss:3.555392 +step:5790 train loss:3.502833 +step:5791 train loss:3.518324 +step:5792 train loss:3.564601 +step:5793 train loss:3.473996 +step:5794 train loss:3.563467 +step:5795 train loss:3.570284 +step:5796 train loss:3.519571 +step:5797 train loss:3.512956 +step:5798 train loss:3.527587 +step:5799 train loss:3.524231 +step:5800 train loss:3.577008 +step:5801 train loss:3.514354 +step:5802 train loss:3.580981 +step:5803 train loss:3.560595 +step:5804 train loss:3.477961 +step:5805 train loss:3.535405 +step:5806 train loss:3.525944 +step:5807 train loss:3.530251 +step:5808 train loss:3.520059 +step:5809 train loss:3.479251 +step:5810 train loss:3.519996 +step:5811 train loss:3.546179 +step:5812 train loss:3.554405 +step:5813 train loss:3.509798 +step:5814 train loss:3.477956 +step:5815 train loss:3.514576 +step:5816 train loss:3.527241 +step:5817 train loss:3.558064 +step:5818 train loss:3.532885 +step:5819 train loss:3.525039 +step:5820 train loss:3.556683 +step:5821 train loss:3.508164 +step:5822 train loss:3.574323 +step:5823 train loss:3.506965 +step:5824 train loss:3.507677 +step:5825 train loss:3.552562 +step:5826 train loss:3.565464 +step:5827 train loss:3.552485 +step:5828 train loss:3.515137 +step:5829 train loss:3.544482 +step:5830 train loss:3.530166 +step:5831 train loss:3.465965 +step:5832 train loss:3.519827 +step:5833 train loss:3.492943 +step:5834 train loss:3.551977 +step:5835 train loss:3.510183 +step:5836 train loss:3.546912 +step:5837 train loss:3.567031 +step:5838 train loss:3.555544 +step:5839 train loss:3.563527 +step:5840 train loss:3.534846 +step:5841 train loss:3.526481 +step:5842 train loss:3.549631 +step:5843 train loss:3.489454 +step:5844 train loss:3.523693 +step:5845 train loss:3.516870 +step:5846 train loss:3.525970 +step:5847 train loss:3.538679 +step:5848 train loss:3.557047 +step:5849 train loss:3.560765 +step:5850 train loss:3.535627 +step:5851 train loss:3.536676 +step:5852 train loss:3.608515 +step:5853 train loss:3.522583 +step:5854 train loss:3.505914 +step:5855 train loss:3.493826 +step:5856 train loss:3.490849 +step:5857 train loss:3.522051 +step:5858 train loss:3.515590 +step:5859 train loss:3.558463 +step:5860 train loss:3.520367 +step:5861 train loss:3.523351 +step:5862 train loss:3.598290 +step:5863 train loss:3.562016 +step:5864 train loss:3.605998 +step:5865 train loss:3.599299 +step:5866 train loss:3.491301 +step:5867 train loss:3.639002 +step:5868 train loss:3.553929 +step:5869 train loss:3.602271 +step:5870 train loss:3.524157 +step:5871 train loss:3.495002 +step:5872 train loss:3.565234 +step:5873 train loss:3.538832 +step:5874 train loss:3.519197 +step:5875 train loss:3.516014 +step:5876 train loss:3.548854 +step:5877 train loss:3.644301 +step:5878 train loss:3.497611 +step:5879 train loss:3.617348 +step:5880 train loss:3.865879 +step:5881 train loss:3.533304 +step:5882 train loss:3.589346 +step:5883 train loss:3.482980 +step:5884 train loss:3.509420 +step:5885 train loss:3.558585 +step:5886 train loss:3.548539 +step:5887 train loss:3.607685 +step:5888 train loss:3.570160 +step:5889 train loss:3.558625 +step:5890 train loss:3.508146 +step:5891 train loss:3.505440 +step:5892 train loss:3.511049 +step:5893 train loss:3.513311 +step:5894 train loss:3.575331 +step:5895 train loss:3.537665 +step:5896 train loss:3.519455 +step:5897 train loss:3.550817 +step:5898 train loss:3.588268 +step:5899 train loss:3.521956 +step:5900 train loss:3.559089 +step:5901 train loss:3.511783 +step:5902 train loss:3.514306 +step:5903 train loss:3.494010 +step:5904 train loss:3.522704 +step:5905 train loss:3.580323 +step:5906 train loss:3.482644 +step:5907 train loss:3.483028 +step:5908 train loss:3.543742 +step:5909 train loss:3.521551 +step:5910 train loss:3.540080 +step:5911 train loss:3.513297 +step:5912 train loss:3.503485 +step:5913 train loss:3.543998 +step:5914 train loss:3.521040 +step:5915 train loss:3.550915 +step:5916 train loss:3.506408 +step:5917 train loss:3.558324 +step:5918 train loss:3.554868 +step:5919 train loss:3.589704 +step:5920 train loss:3.537201 +step:5921 train loss:3.577453 +step:5922 train loss:3.491727 +step:5923 train loss:3.539129 +step:5924 train loss:3.502263 +step:5925 train loss:3.543254 +step:5926 train loss:3.566579 +step:5927 train loss:3.571304 +step:5928 train loss:3.511506 +step:5929 train loss:3.553847 +step:5930 train loss:3.467702 +step:5931 train loss:3.519365 +step:5932 train loss:3.460835 +step:5933 train loss:3.579404 +step:5934 train loss:3.530654 +step:5935 train loss:3.509621 +step:5936 train loss:3.496007 +step:5937 train loss:3.517323 +step:5938 train loss:3.554851 +step:5939 train loss:3.519454 +step:5940 train loss:3.495097 +step:5941 train loss:3.551959 +step:5942 train loss:3.518507 +step:5943 train loss:3.543530 +step:5944 train loss:3.503413 +step:5945 train loss:3.546300 +step:5946 train loss:3.508010 +step:5947 train loss:3.525352 +step:5948 train loss:3.619035 +step:5949 train loss:3.538328 +step:5950 train loss:3.477211 +step:5951 train loss:3.598362 +step:5952 train loss:3.485124 +step:5953 train loss:3.518710 +step:5954 train loss:3.511226 +step:5955 train loss:3.537071 +step:5956 train loss:3.503033 +step:5957 train loss:3.576075 +step:5958 train loss:3.453925 +step:5959 train loss:3.565834 +step:5960 train loss:3.473067 +step:5961 train loss:3.566867 +step:5962 train loss:3.492032 +step:5963 train loss:3.558416 +step:5964 train loss:3.516997 +step:5965 train loss:3.540740 +step:5966 train loss:3.493231 +step:5967 train loss:3.567136 +step:5968 train loss:3.476665 +step:5969 train loss:3.546152 +step:5970 train loss:3.527374 +step:5971 train loss:3.548753 +step:5972 train loss:3.539656 +step:5973 train loss:3.513536 +step:5974 train loss:3.504670 +step:5975 train loss:3.543337 +step:5976 train loss:3.530996 +step:5977 train loss:3.547511 +step:5978 train loss:3.492559 +step:5979 train loss:3.545702 +step:5980 train loss:3.522757 +step:5981 train loss:3.534944 +step:5982 train loss:3.549474 +step:5983 train loss:3.631955 +step:5984 train loss:3.550776 +step:5985 train loss:3.607893 +step:5986 train loss:3.547420 +step:5987 train loss:3.563677 +step:5988 train loss:3.507646 +step:5989 train loss:3.550071 +step:5990 train loss:3.507157 +step:5991 train loss:3.462463 +step:5992 train loss:3.502892 +step:5993 train loss:3.478763 +step:5994 train loss:3.543909 +step:5995 train loss:3.505166 +step:5996 train loss:3.536262 +step:5997 train loss:3.527597 +step:5998 train loss:3.557077 +step:5999 train loss:3.478164 +step:6000 validation loss:3.503052 total_sharp:6.5831e-03 L1_sharp:1.3997e-02 L2_sharp:5.2490e-04 L3_sharp:1.1080e-03 L4_sharp:1.4555e-03 L5_sharp:9.5226e-04 L6_sharp:1.6434e-03 L7_sharp:1.5477e-03 L8_sharp:2.4918e-03 L9_sharp:1.8238e-03 L10_sharp:9.3420e-04 L11_sharp:8.6472e-04 L12_sharp:3.2599e-03 total_fnorm:2.2074e+00 total_l1_linf:1.9590e+04 total_spectral:2.2074e+00 L1_fnorm:4.8277e-01 L2_fnorm:4.9826e-01 L3_fnorm:4.9380e-01 L4_fnorm:4.9776e-01 L5_fnorm:4.9180e-01 L6_fnorm:5.0236e-01 L7_fnorm:5.0938e-01 L8_fnorm:5.0481e-01 L9_fnorm:5.1819e-01 L10_fnorm:5.3191e-01 L11_fnorm:5.3710e-01 L12_fnorm:5.2855e-01 L1_l1linf:6.4525e-01 L2_l1linf:6.0181e-01 L3_l1linf:6.0606e-01 L4_l1linf:5.8714e-01 L5_l1linf:5.1695e-01 L6_l1linf:5.8309e-01 L7_l1linf:5.7603e-01 L8_l1linf:6.0848e-01 L9_l1linf:5.4577e-01 L10_l1linf:5.5549e-01 L11_l1linf:5.6999e-01 L12_l1linf:5.7458e-01 L1_spectral:8.1351e-02 L2_spectral:7.9808e-02 L3_spectral:7.9845e-02 L4_spectral:8.6730e-02 L5_spectral:7.0044e-02 L6_spectral:8.3865e-02 L7_spectral:7.6769e-02 L8_spectral:7.2345e-02 L9_spectral:5.7688e-02 L10_spectral:4.6194e-02 L11_spectral:5.6393e-02 L12_spectral:8.5787e-02 v_norm:2.2074e+00 cos_v_-g_hvp:6.9704e-02 g_hvp_norm:3.5968e-01 cos_v_-g_t:8.4583e-02 g_t_norm:2.9861e-01 hv_norm:5.0584e-01 cos_v_hv:2.8727e-02 hg_norm:1.0451e+01 cos_g_hg:3.6288e-01 v_par:1.0222e-02 v_perp:2.2073e+00 L1_cos_v_neg_g:1.3534e-01 L1_v_norm:4.8277e-01 L2_cos_v_neg_g:6.8802e-02 L2_v_norm:4.9826e-01 L3_cos_v_neg_g:6.1102e-02 L3_v_norm:4.9380e-01 L4_cos_v_neg_g:5.8686e-02 L4_v_norm:4.9776e-01 L5_cos_v_neg_g:4.8021e-02 L5_v_norm:4.9180e-01 L6_cos_v_neg_g:5.7725e-02 L6_v_norm:5.0236e-01 L7_cos_v_neg_g:6.8064e-02 L7_v_norm:5.0938e-01 L8_cos_v_neg_g:6.6127e-02 L8_v_norm:5.0481e-01 L9_cos_v_neg_g:6.2220e-02 L9_v_norm:5.1819e-01 L10_cos_v_neg_g:7.6421e-02 L10_v_norm:5.3191e-01 L11_cos_v_neg_g:9.8998e-02 L11_v_norm:5.3710e-01 L12_cos_v_neg_g:1.3410e-01 L12_v_norm:5.2855e-01 +step:6000 train loss:3.584088 +step:6001 train loss:3.475860 +step:6002 train loss:3.555867 +step:6003 train loss:3.473709 +step:6004 train loss:3.534582 +step:6005 train loss:3.505139 +step:6006 train loss:3.534373 +step:6007 train loss:3.467580 +step:6008 train loss:3.514614 +step:6009 train loss:3.492839 +step:6010 train loss:3.486652 +step:6011 train loss:3.506237 +step:6012 train loss:3.569694 +step:6013 train loss:3.494022 +step:6014 train loss:3.576260 +step:6015 train loss:3.509041 +step:6016 train loss:3.513400 +step:6017 train loss:3.516898 +step:6018 train loss:3.489821 +step:6019 train loss:3.501141 +step:6020 train loss:3.453476 +step:6021 train loss:3.568734 +step:6022 train loss:3.534743 +step:6023 train loss:3.507932 +step:6024 train loss:3.536628 +step:6025 train loss:3.543732 +step:6026 train loss:3.549005 +step:6027 train loss:3.520334 +step:6028 train loss:3.582878 +step:6029 train loss:3.473960 +step:6030 train loss:3.511245 +step:6031 train loss:3.489689 +step:6032 train loss:3.505329 +step:6033 train loss:3.475353 +step:6034 train loss:3.566295 +step:6035 train loss:3.510821 +step:6036 train loss:3.559963 +step:6037 train loss:3.500528 +step:6038 train loss:3.525162 +step:6039 train loss:3.508207 +step:6040 train loss:3.505781 +step:6041 train loss:3.494472 +step:6042 train loss:3.530043 +step:6043 train loss:3.560675 +step:6044 train loss:3.537651 +step:6045 train loss:3.551667 +step:6046 train loss:3.563265 +step:6047 train loss:3.552026 +step:6048 train loss:3.532820 +step:6049 train loss:3.568180 +step:6050 train loss:3.585406 +step:6051 train loss:3.579356 +step:6052 train loss:3.531784 +step:6053 train loss:3.506499 +step:6054 train loss:3.540413 +step:6055 train loss:3.605181 +step:6056 train loss:3.554894 +step:6057 train loss:3.568329 +step:6058 train loss:3.550184 +step:6059 train loss:3.486418 +step:6060 train loss:3.517565 +step:6061 train loss:3.541434 +step:6062 train loss:3.538910 +step:6063 train loss:3.505113 +step:6064 train loss:3.517420 +step:6065 train loss:3.522938 +step:6066 train loss:3.549443 +step:6067 train loss:3.592325 +step:6068 train loss:3.568037 +step:6069 train loss:3.568692 +step:6070 train loss:3.567507 +step:6071 train loss:3.455181 +step:6072 train loss:3.512936 +step:6073 train loss:3.528544 +step:6074 train loss:3.621514 +step:6075 train loss:3.452911 +step:6076 train loss:3.503721 +step:6077 train loss:3.470454 +step:6078 train loss:3.525114 +step:6079 train loss:3.532398 +step:6080 train loss:3.562232 +step:6081 train loss:3.500103 +step:6082 train loss:3.538165 +step:6083 train loss:3.562785 +step:6084 train loss:3.485728 +step:6085 train loss:3.547436 +step:6086 train loss:3.510967 +step:6087 train loss:3.553190 +step:6088 train loss:3.605006 +step:6089 train loss:3.548448 +step:6090 train loss:3.440983 +step:6091 train loss:3.536163 +step:6092 train loss:3.528694 +step:6093 train loss:3.580345 +step:6094 train loss:3.521023 +step:6095 train loss:3.529263 +step:6096 train loss:3.608037 +step:6097 train loss:3.766717 +step:6098 train loss:3.477745 +step:6099 train loss:3.542942 +step:6100 train loss:3.466040 +step:6101 train loss:3.531096 +step:6102 train loss:3.504726 +step:6103 train loss:3.554873 +step:6104 train loss:3.510810 +step:6105 train loss:3.589322 +step:6106 train loss:3.575750 +step:6107 train loss:3.537366 +step:6108 train loss:3.517385 +step:6109 train loss:3.483465 +step:6110 train loss:3.528964 +step:6111 train loss:3.500124 +step:6112 train loss:3.502012 +step:6113 train loss:3.532560 +step:6114 train loss:3.548099 +step:6115 train loss:3.578471 +step:6116 train loss:3.529908 +step:6117 train loss:3.557965 +step:6118 train loss:3.553003 +step:6119 train loss:3.535196 +step:6120 train loss:3.517445 +step:6121 train loss:3.586207 +step:6122 train loss:3.564719 +step:6123 train loss:3.529070 +step:6124 train loss:3.562135 +step:6125 train loss:3.539236 +step:6126 train loss:3.591132 +step:6127 train loss:3.608025 +step:6128 train loss:3.609847 +step:6129 train loss:3.621100 +step:6130 train loss:3.526988 +step:6131 train loss:3.561899 +step:6132 train loss:3.657899 +step:6133 train loss:3.495670 +step:6134 train loss:3.489618 +step:6135 train loss:3.528420 +step:6136 train loss:3.557938 +step:6137 train loss:3.518024 +step:6138 train loss:3.577307 +step:6139 train loss:3.588032 +step:6140 train loss:3.583535 +step:6141 train loss:3.594077 +step:6142 train loss:3.558407 +step:6143 train loss:3.540837 +step:6144 train loss:3.541081 +step:6145 train loss:3.580194 +step:6146 train loss:3.622764 +step:6147 train loss:3.532019 +step:6148 train loss:3.577919 +step:6149 train loss:3.631768 +step:6150 train loss:3.600844 +step:6151 train loss:3.481456 +step:6152 train loss:3.539243 +step:6153 train loss:3.479326 +step:6154 train loss:3.565995 +step:6155 train loss:3.539298 +step:6156 train loss:3.572940 +step:6157 train loss:3.487497 +step:6158 train loss:3.557126 +step:6159 train loss:3.704782 +step:6160 train loss:3.528175 +step:6161 train loss:3.550240 +step:6162 train loss:3.565703 +step:6163 train loss:3.548791 +step:6164 train loss:3.631339 +step:6165 train loss:3.561609 +step:6166 train loss:3.543918 +step:6167 train loss:3.555310 +step:6168 train loss:3.505768 +step:6169 train loss:3.450954 +step:6170 train loss:3.557786 +step:6171 train loss:3.477080 +step:6172 train loss:3.529029 +step:6173 train loss:3.547044 +step:6174 train loss:3.518232 +step:6175 train loss:3.547384 +step:6176 train loss:3.579046 +step:6177 train loss:3.541016 +step:6178 train loss:3.485023 +step:6179 train loss:3.560481 +step:6180 train loss:3.525784 +step:6181 train loss:3.517591 +step:6182 train loss:3.548313 +step:6183 train loss:3.547717 +step:6184 train loss:3.512857 +step:6185 train loss:3.547369 +step:6186 train loss:3.521817 +step:6187 train loss:3.510839 +step:6188 train loss:3.499406 +step:6189 train loss:3.522223 +step:6190 train loss:3.548653 +step:6191 train loss:3.518886 +step:6192 train loss:3.589178 +step:6193 train loss:3.512496 +step:6194 train loss:3.573891 +step:6195 train loss:3.594532 +step:6196 train loss:3.595756 +step:6197 train loss:3.523892 +step:6198 train loss:3.516607 +step:6199 train loss:3.546800 +step:6200 train loss:3.629982 +step:6201 train loss:3.577694 +step:6202 train loss:3.542935 +step:6203 train loss:3.575083 +step:6204 train loss:3.519360 +step:6205 train loss:3.585224 +step:6206 train loss:3.542083 +step:6207 train loss:3.541995 +step:6208 train loss:3.537728 +step:6209 train loss:3.567872 +step:6210 train loss:3.628391 +step:6211 train loss:3.584874 +step:6212 train loss:3.475550 +step:6213 train loss:3.530688 +step:6214 train loss:3.579976 +step:6215 train loss:3.610649 +step:6216 train loss:3.647509 +step:6217 train loss:3.576939 +step:6218 train loss:3.513756 +step:6219 train loss:3.532404 +step:6220 train loss:3.572918 +step:6221 train loss:3.521891 +step:6222 train loss:3.512675 +step:6223 train loss:3.575412 +step:6224 train loss:3.584039 +step:6225 train loss:3.525752 +step:6226 train loss:3.553440 +step:6227 train loss:3.556195 +step:6228 train loss:3.522517 +step:6229 train loss:3.504702 +step:6230 train loss:3.495378 +step:6231 train loss:3.537420 +step:6232 train loss:3.613732 +step:6233 train loss:3.482912 +step:6234 train loss:3.516806 +step:6235 train loss:3.546182 +step:6236 train loss:3.543087 +step:6237 train loss:3.509349 +step:6238 train loss:3.545335 +step:6239 train loss:3.571894 +step:6240 train loss:3.510044 +step:6241 train loss:3.533779 +step:6242 train loss:3.549546 +step:6243 train loss:3.548889 +step:6244 train loss:3.512687 +step:6245 train loss:3.589911 +step:6246 train loss:3.563695 +step:6247 train loss:3.518088 +step:6248 train loss:3.524590 +step:6249 train loss:3.524518 +step:6250 validation loss:3.493735 +step:6250 train loss:3.568952 +step:6251 train loss:3.588606 +step:6252 train loss:3.537647 +step:6253 train loss:3.524451 +step:6254 train loss:3.556752 +step:6255 train loss:3.543313 +step:6256 train loss:3.524573 +step:6257 train loss:3.548697 +step:6258 train loss:3.566869 +step:6259 train loss:3.552605 +step:6260 train loss:3.552530 +step:6261 train loss:3.555902 +step:6262 train loss:3.707272 +step:6263 train loss:3.590979 +step:6264 train loss:3.522332 +step:6265 train loss:3.480607 +step:6266 train loss:3.546369 +step:6267 train loss:3.520259 +step:6268 train loss:3.511173 +step:6269 train loss:3.502795 +step:6270 train loss:3.531505 +step:6271 train loss:3.506551 +step:6272 train loss:3.508415 +step:6273 train loss:3.519735 +step:6274 train loss:3.544260 +step:6275 train loss:3.556171 +step:6276 train loss:3.606493 +step:6277 train loss:3.536648 +step:6278 train loss:3.566291 +step:6279 train loss:3.462544 +step:6280 train loss:3.463198 +step:6281 train loss:3.495352 +step:6282 train loss:3.531499 +step:6283 train loss:3.511797 +step:6284 train loss:3.556232 +step:6285 train loss:3.530817 +step:6286 train loss:3.626311 +step:6287 train loss:3.530294 +step:6288 train loss:3.499433 +step:6289 train loss:3.555602 +step:6290 train loss:3.516451 +step:6291 train loss:3.554404 +step:6292 train loss:3.505932 +step:6293 train loss:3.522663 +step:6294 train loss:3.509844 +step:6295 train loss:3.582213 +step:6296 train loss:3.514125 +step:6297 train loss:3.549049 +step:6298 train loss:3.545284 +step:6299 train loss:3.559148 +step:6300 train loss:3.525808 +step:6301 train loss:3.618872 +step:6302 train loss:3.526741 +step:6303 train loss:3.594413 +step:6304 train loss:3.524612 +step:6305 train loss:3.533286 +step:6306 train loss:3.530470 +step:6307 train loss:3.512928 +step:6308 train loss:3.524559 +step:6309 train loss:3.578318 +step:6310 train loss:3.620436 +step:6311 train loss:3.523912 +step:6312 train loss:3.590244 +step:6313 train loss:3.545836 +step:6314 train loss:3.541218 +step:6315 train loss:3.507330 +step:6316 train loss:3.590976 +step:6317 train loss:3.492516 +step:6318 train loss:3.514737 +step:6319 train loss:3.548610 +step:6320 train loss:3.634278 +step:6321 train loss:3.571170 +step:6322 train loss:3.521908 +step:6323 train loss:3.566045 +step:6324 train loss:3.641429 +step:6325 train loss:3.550165 +step:6326 train loss:3.517737 +step:6327 train loss:3.536159 +step:6328 train loss:3.519074 +step:6329 train loss:3.567490 +step:6330 train loss:3.604246 +step:6331 train loss:3.524610 +step:6332 train loss:3.598810 +step:6333 train loss:3.528101 +step:6334 train loss:3.519979 +step:6335 train loss:3.546546 +step:6336 train loss:3.560804 +step:6337 train loss:3.512030 +step:6338 train loss:3.526718 +step:6339 train loss:3.555262 +step:6340 train loss:3.494836 +step:6341 train loss:3.550389 +step:6342 train loss:3.575211 +step:6343 train loss:3.534389 +step:6344 train loss:3.524629 +step:6345 train loss:3.524938 +step:6346 train loss:3.522010 +step:6347 train loss:3.521070 +step:6348 train loss:3.559340 +step:6349 train loss:3.576077 +step:6350 train loss:3.561684 +step:6351 train loss:3.564716 +step:6352 train loss:3.534711 +step:6353 train loss:3.539977 +step:6354 train loss:3.510497 +step:6355 train loss:3.498837 +step:6356 train loss:3.526099 +step:6357 train loss:3.559008 +step:6358 train loss:3.538378 +step:6359 train loss:3.567842 +step:6360 train loss:3.554261 +step:6361 train loss:3.517229 +step:6362 train loss:3.589100 +step:6363 train loss:3.558559 +step:6364 train loss:3.581630 +step:6365 train loss:3.464904 +step:6366 train loss:3.539322 +step:6367 train loss:3.530479 +step:6368 train loss:3.543913 +step:6369 train loss:3.555979 +step:6370 train loss:3.557937 +step:6371 train loss:3.637968 +step:6372 train loss:3.498503 +step:6373 train loss:3.582195 +step:6374 train loss:3.539730 +step:6375 train loss:3.581346 +step:6376 train loss:3.518358 +step:6377 train loss:3.526909 +step:6378 train loss:3.501965 +step:6379 train loss:3.584596 +step:6380 train loss:3.502994 +step:6381 train loss:3.555571 +step:6382 train loss:3.549699 +step:6383 train loss:3.515360 +step:6384 train loss:3.526841 +step:6385 train loss:3.592224 +step:6386 train loss:3.536894 +step:6387 train loss:3.550583 +step:6388 train loss:3.489279 +step:6389 train loss:3.568651 +step:6390 train loss:3.512657 +step:6391 train loss:3.577366 +step:6392 train loss:3.536440 +step:6393 train loss:3.509588 +step:6394 train loss:3.524873 +step:6395 train loss:3.522651 +step:6396 train loss:3.572096 +step:6397 train loss:3.487170 +step:6398 train loss:3.561351 +step:6399 train loss:3.484103 +step:6400 train loss:3.498541 +step:6401 train loss:3.541579 +step:6402 train loss:3.567638 +step:6403 train loss:3.535139 +step:6404 train loss:3.528816 +step:6405 train loss:3.510850 +step:6406 train loss:3.552980 +step:6407 train loss:3.547004 +step:6408 train loss:3.494761 +step:6409 train loss:3.491154 +step:6410 train loss:3.444567 +step:6411 train loss:3.504005 +step:6412 train loss:3.467459 +step:6413 train loss:3.531950 +step:6414 train loss:3.541245 +step:6415 train loss:3.535118 +step:6416 train loss:3.576913 +step:6417 train loss:3.508032 +step:6418 train loss:3.556090 +step:6419 train loss:3.585338 +step:6420 train loss:3.586299 +step:6421 train loss:3.561079 +step:6422 train loss:3.619389 +step:6423 train loss:3.526432 +step:6424 train loss:3.562488 +step:6425 train loss:3.581801 +step:6426 train loss:3.501069 +step:6427 train loss:3.578563 +step:6428 train loss:3.548176 +step:6429 train loss:3.517002 +step:6430 train loss:3.538465 +step:6431 train loss:3.548330 +step:6432 train loss:3.502301 +step:6433 train loss:3.518452 +step:6434 train loss:3.498323 +step:6435 train loss:3.530156 +step:6436 train loss:3.554391 +step:6437 train loss:3.651859 +step:6438 train loss:3.559384 +step:6439 train loss:3.557635 +step:6440 train loss:3.594211 +step:6441 train loss:3.515436 +step:6442 train loss:3.479300 +step:6443 train loss:3.522862 +step:6444 train loss:3.525635 +step:6445 train loss:3.569935 +step:6446 train loss:3.523547 +step:6447 train loss:3.561844 +step:6448 train loss:3.495972 +step:6449 train loss:3.487105 +step:6450 train loss:3.550011 +step:6451 train loss:3.476513 +step:6452 train loss:3.529996 +step:6453 train loss:3.508518 +step:6454 train loss:3.517015 +step:6455 train loss:3.513172 +step:6456 train loss:3.510533 +step:6457 train loss:3.516505 +step:6458 train loss:3.513121 +step:6459 train loss:3.468759 +step:6460 train loss:3.475416 +step:6461 train loss:3.560349 +step:6462 train loss:3.564827 +step:6463 train loss:3.553461 +step:6464 train loss:3.569148 +step:6465 train loss:3.467989 +step:6466 train loss:3.489578 +step:6467 train loss:3.500707 +step:6468 train loss:3.553139 +step:6469 train loss:3.516296 +step:6470 train loss:3.502568 +step:6471 train loss:3.550611 +step:6472 train loss:3.512442 +step:6473 train loss:3.552408 +step:6474 train loss:3.535150 +step:6475 train loss:3.554034 +step:6476 train loss:3.550025 +step:6477 train loss:3.546403 +step:6478 train loss:3.534432 +step:6479 train loss:3.572727 +step:6480 train loss:3.480205 +step:6481 train loss:3.479462 +step:6482 train loss:3.511373 +step:6483 train loss:3.537847 +step:6484 train loss:3.504866 +step:6485 train loss:3.575598 +step:6486 train loss:3.565014 +step:6487 train loss:3.557155 +step:6488 train loss:3.592845 +step:6489 train loss:3.526084 +step:6490 train loss:3.624738 +step:6491 train loss:3.534311 +step:6492 train loss:3.519264 +step:6493 train loss:3.632026 +step:6494 train loss:3.558065 +step:6495 train loss:3.597232 +step:6496 train loss:3.575730 +step:6497 train loss:3.537317 +step:6498 train loss:3.567212 +step:6499 train loss:3.614915 +step:6500 validation loss:3.486370 total_sharp:6.4189e-03 L1_sharp:7.5991e-03 L2_sharp:5.4293e-04 L3_sharp:1.2627e-03 L4_sharp:1.5280e-03 L5_sharp:1.0295e-03 L6_sharp:1.3714e-03 L7_sharp:1.2589e-03 L8_sharp:2.2937e-03 L9_sharp:2.0265e-03 L10_sharp:1.3528e-03 L11_sharp:1.0630e-03 L12_sharp:2.4910e-03 total_fnorm:2.2179e+00 total_l1_linf:1.9717e+04 total_spectral:2.2179e+00 L1_fnorm:4.9134e-01 L2_fnorm:5.0609e-01 L3_fnorm:4.9553e-01 L4_fnorm:4.9689e-01 L5_fnorm:4.9899e-01 L6_fnorm:5.0479e-01 L7_fnorm:5.0977e-01 L8_fnorm:5.0600e-01 L9_fnorm:5.2156e-01 L10_fnorm:5.3863e-01 L11_fnorm:5.4154e-01 L12_fnorm:5.3317e-01 L1_l1linf:6.0049e-01 L2_l1linf:5.9035e-01 L3_l1linf:6.9997e-01 L4_l1linf:6.4100e-01 L5_l1linf:6.0240e-01 L6_l1linf:6.2317e-01 L7_l1linf:6.0652e-01 L8_l1linf:5.6643e-01 L9_l1linf:5.5890e-01 L10_l1linf:5.6115e-01 L11_l1linf:5.7182e-01 L12_l1linf:5.9735e-01 L1_spectral:8.7899e-02 L2_spectral:8.0908e-02 L3_spectral:9.0754e-02 L4_spectral:9.2968e-02 L5_spectral:7.7406e-02 L6_spectral:8.7384e-02 L7_spectral:7.8329e-02 L8_spectral:7.3988e-02 L9_spectral:6.3592e-02 L10_spectral:4.7218e-02 L11_spectral:5.6265e-02 L12_spectral:7.2175e-02 v_norm:2.2179e+00 cos_v_-g_hvp:6.2194e-02 g_hvp_norm:4.1642e-01 cos_v_-g_t:7.2972e-02 g_t_norm:3.5761e-01 hv_norm:4.6194e-01 cos_v_hv:3.0818e-02 hg_norm:7.1173e+00 cos_g_hg:6.6141e-01 v_par:7.5243e-03 v_perp:2.2179e+00 L1_cos_v_neg_g:1.2286e-01 L1_v_norm:4.9134e-01 L2_cos_v_neg_g:5.8510e-02 L2_v_norm:5.0609e-01 L3_cos_v_neg_g:5.2136e-02 L3_v_norm:4.9553e-01 L4_cos_v_neg_g:5.5168e-02 L4_v_norm:4.9689e-01 L5_cos_v_neg_g:4.4112e-02 L5_v_norm:4.9899e-01 L6_cos_v_neg_g:5.4430e-02 L6_v_norm:5.0479e-01 L7_cos_v_neg_g:6.2911e-02 L7_v_norm:5.0977e-01 L8_cos_v_neg_g:5.6408e-02 L8_v_norm:5.0600e-01 L9_cos_v_neg_g:5.6754e-02 L9_v_norm:5.2156e-01 L10_cos_v_neg_g:6.6519e-02 L10_v_norm:5.3863e-01 L11_cos_v_neg_g:8.9280e-02 L11_v_norm:5.4154e-01 L12_cos_v_neg_g:1.1972e-01 L12_v_norm:5.3317e-01 +step:6500 train loss:3.542791 +step:6501 train loss:3.545090 +step:6502 train loss:3.607625 +step:6503 train loss:3.527823 +step:6504 train loss:3.537870 +step:6505 train loss:3.542648 +step:6506 train loss:3.572172 +step:6507 train loss:3.524348 +step:6508 train loss:3.509286 +step:6509 train loss:3.557977 +step:6510 train loss:3.542763 +step:6511 train loss:3.511522 +step:6512 train loss:3.506321 +step:6513 train loss:3.642371 +step:6514 train loss:3.613152 +step:6515 train loss:3.578859 +step:6516 train loss:3.562968 +step:6517 train loss:3.612736 +step:6518 train loss:3.533751 +step:6519 train loss:3.529370 +step:6520 train loss:3.513835 +step:6521 train loss:3.555442 +step:6522 train loss:3.561908 +step:6523 train loss:3.557317 +step:6524 train loss:3.610856 +step:6525 train loss:3.649094 +step:6526 train loss:3.598417 +step:6527 train loss:3.649737 +step:6528 train loss:3.549780 +step:6529 train loss:3.592844 +step:6530 train loss:3.518096 +step:6531 train loss:3.581207 +step:6532 train loss:3.533891 +step:6533 train loss:3.497317 +step:6534 train loss:3.516909 +step:6535 train loss:3.503040 +step:6536 train loss:3.586247 +step:6537 train loss:3.566923 +step:6538 train loss:3.611470 +step:6539 train loss:3.528997 +step:6540 train loss:3.601853 +step:6541 train loss:3.537004 +step:6542 train loss:3.537248 +step:6543 train loss:3.532940 +step:6544 train loss:3.525865 +step:6545 train loss:3.503372 +step:6546 train loss:3.530144 +step:6547 train loss:3.633248 +step:6548 train loss:3.609220 +step:6549 train loss:3.603947 +step:6550 train loss:3.498650 +step:6551 train loss:3.577422 +step:6552 train loss:3.538524 +step:6553 train loss:3.504813 +step:6554 train loss:3.528125 +step:6555 train loss:3.505836 +step:6556 train loss:3.510463 +step:6557 train loss:3.501428 +step:6558 train loss:3.510401 +step:6559 train loss:3.528652 +step:6560 train loss:3.504620 +step:6561 train loss:3.572180 +step:6562 train loss:3.571416 +step:6563 train loss:3.540437 +step:6564 train loss:3.558180 +step:6565 train loss:3.596535 +step:6566 train loss:3.682284 +step:6567 train loss:3.634945 +step:6568 train loss:3.520001 +step:6569 train loss:3.534785 +step:6570 train loss:3.544968 +step:6571 train loss:3.566782 +step:6572 train loss:3.611658 +step:6573 train loss:3.576831 +step:6574 train loss:3.520936 +step:6575 train loss:3.479659 +step:6576 train loss:3.556891 +step:6577 train loss:3.523722 +step:6578 train loss:3.589974 +step:6579 train loss:3.617620 +step:6580 train loss:3.643831 +step:6581 train loss:3.615607 +step:6582 train loss:3.664627 +step:6583 train loss:3.586921 +step:6584 train loss:3.625096 +step:6585 train loss:3.522163 +step:6586 train loss:3.588965 +step:6587 train loss:3.594684 +step:6588 train loss:3.549834 +step:6589 train loss:3.579994 +step:6590 train loss:3.559624 +step:6591 train loss:3.527355 +step:6592 train loss:3.579316 +step:6593 train loss:3.535685 +step:6594 train loss:3.546604 +step:6595 train loss:3.542404 +step:6596 train loss:3.562717 +step:6597 train loss:3.565440 +step:6598 train loss:3.551908 +step:6599 train loss:3.532854 +step:6600 train loss:3.527594 +step:6601 train loss:3.515160 +step:6602 train loss:3.567020 +step:6603 train loss:3.559067 +step:6604 train loss:3.594974 +step:6605 train loss:3.530944 +step:6606 train loss:3.572726 +step:6607 train loss:3.506761 +step:6608 train loss:3.568902 +step:6609 train loss:3.541766 +step:6610 train loss:3.544620 +step:6611 train loss:3.568192 +step:6612 train loss:3.579429 +step:6613 train loss:3.526843 +step:6614 train loss:3.605333 +step:6615 train loss:3.619417 +step:6616 train loss:3.558647 +step:6617 train loss:3.576501 +step:6618 train loss:3.522524 +step:6619 train loss:3.585604 +step:6620 train loss:3.503955 +step:6621 train loss:3.507754 +step:6622 train loss:3.535200 +step:6623 train loss:3.529590 +step:6624 train loss:3.574433 +step:6625 train loss:3.619500 +step:6626 train loss:3.516223 +step:6627 train loss:3.583345 +step:6628 train loss:3.524764 +step:6629 train loss:3.481013 +step:6630 train loss:3.515886 +step:6631 train loss:3.576740 +step:6632 train loss:3.498371 +step:6633 train loss:3.548376 +step:6634 train loss:3.612434 +step:6635 train loss:3.545146 +step:6636 train loss:3.571026 +step:6637 train loss:3.543738 +step:6638 train loss:3.501598 +step:6639 train loss:3.533832 +step:6640 train loss:3.565457 +step:6641 train loss:3.535017 +step:6642 train loss:3.464714 +step:6643 train loss:3.571106 +step:6644 train loss:3.511044 +step:6645 train loss:3.536642 +step:6646 train loss:3.530197 +step:6647 train loss:3.571941 +step:6648 train loss:3.534251 +step:6649 train loss:3.517537 +step:6650 train loss:3.538764 +step:6651 train loss:3.487344 +step:6652 train loss:3.571964 +step:6653 train loss:3.557439 +step:6654 train loss:3.525180 +step:6655 train loss:3.539822 +step:6656 train loss:3.533997 +step:6657 train loss:3.535071 +step:6658 train loss:3.517299 +step:6659 train loss:3.498713 +step:6660 train loss:3.510006 +step:6661 train loss:3.527873 +step:6662 train loss:3.592398 +step:6663 train loss:3.639315 +step:6664 train loss:3.592996 +step:6665 train loss:3.541496 +step:6666 train loss:3.492348 +step:6667 train loss:3.592846 +step:6668 train loss:3.537558 +step:6669 train loss:3.592817 +step:6670 train loss:3.505674 +step:6671 train loss:3.553357 +step:6672 train loss:3.510276 +step:6673 train loss:3.495546 +step:6674 train loss:3.563722 +step:6675 train loss:3.523489 +step:6676 train loss:3.526850 +step:6677 train loss:3.513365 +step:6678 train loss:3.553772 +step:6679 train loss:3.515363 +step:6680 train loss:3.542437 +step:6681 train loss:3.461004 +step:6682 train loss:3.477506 +step:6683 train loss:3.487925 +step:6684 train loss:3.463471 +step:6685 train loss:3.497264 +step:6686 train loss:3.512886 +step:6687 train loss:3.514932 +step:6688 train loss:3.515778 +step:6689 train loss:3.570424 +step:6690 train loss:3.516690 +step:6691 train loss:3.574475 +step:6692 train loss:3.475044 +step:6693 train loss:3.513469 +step:6694 train loss:3.496948 +step:6695 train loss:3.538172 +step:6696 train loss:3.558573 +step:6697 train loss:3.618601 +step:6698 train loss:3.502759 +step:6699 train loss:3.520437 +step:6700 train loss:3.578285 +step:6701 train loss:3.498380 +step:6702 train loss:3.483980 +step:6703 train loss:3.544620 +step:6704 train loss:3.596130 +step:6705 train loss:3.437997 +step:6706 train loss:3.565420 +step:6707 train loss:3.536028 +step:6708 train loss:3.490888 +step:6709 train loss:3.597391 +step:6710 train loss:3.507826 +step:6711 train loss:3.491874 +step:6712 train loss:3.485663 +step:6713 train loss:3.534523 +step:6714 train loss:3.509807 +step:6715 train loss:3.492740 +step:6716 train loss:3.466854 +step:6717 train loss:3.472961 +step:6718 train loss:3.517169 +step:6719 train loss:3.454319 +step:6720 train loss:3.480751 +step:6721 train loss:3.523202 +step:6722 train loss:3.483890 +step:6723 train loss:3.517647 +step:6724 train loss:3.517481 +step:6725 train loss:3.473843 +step:6726 train loss:3.472563 +step:6727 train loss:3.538392 +step:6728 train loss:3.545889 +step:6729 train loss:3.554660 +step:6730 train loss:3.543801 +step:6731 train loss:3.506151 +step:6732 train loss:3.562932 +step:6733 train loss:3.513789 +step:6734 train loss:3.493825 +step:6735 train loss:3.539436 +step:6736 train loss:3.490546 +step:6737 train loss:3.472200 +step:6738 train loss:3.521019 +step:6739 train loss:3.490482 +step:6740 train loss:3.484312 +step:6741 train loss:3.541181 +step:6742 train loss:3.515752 +step:6743 train loss:3.563303 +step:6744 train loss:3.617945 +step:6745 train loss:3.596205 +step:6746 train loss:3.546888 +step:6747 train loss:3.518270 +step:6748 train loss:3.524262 +step:6749 train loss:3.519825 +step:6750 validation loss:3.477970 +step:6750 train loss:3.457617 +step:6751 train loss:3.485268 +step:6752 train loss:3.539524 +step:6753 train loss:3.470643 +step:6754 train loss:3.463250 +step:6755 train loss:3.491672 +step:6756 train loss:3.532449 +step:6757 train loss:3.512159 +step:6758 train loss:3.559469 +step:6759 train loss:3.448867 +step:6760 train loss:3.473052 +step:6761 train loss:3.540876 +step:6762 train loss:3.497701 +step:6763 train loss:3.458759 +step:6764 train loss:3.530298 +step:6765 train loss:3.517293 +step:6766 train loss:3.495383 +step:6767 train loss:3.500756 +step:6768 train loss:3.493901 +step:6769 train loss:3.436337 +step:6770 train loss:3.566446 +step:6771 train loss:3.480919 +step:6772 train loss:3.568194 +step:6773 train loss:3.567871 +step:6774 train loss:3.589168 +step:6775 train loss:3.466305 +step:6776 train loss:3.520824 +step:6777 train loss:3.522081 +step:6778 train loss:3.489082 +step:6779 train loss:3.508375 +step:6780 train loss:3.510835 +step:6781 train loss:3.494982 +step:6782 train loss:3.569569 +step:6783 train loss:3.486684 +step:6784 train loss:3.473282 +step:6785 train loss:3.555990 +step:6786 train loss:3.539751 +step:6787 train loss:3.477268 +step:6788 train loss:3.553507 +step:6789 train loss:3.473447 +step:6790 train loss:3.478782 +step:6791 train loss:3.516683 +step:6792 train loss:3.425541 +step:6793 train loss:3.530723 +step:6794 train loss:3.519895 +step:6795 train loss:3.560176 +step:6796 train loss:3.451457 +step:6797 train loss:3.504441 +step:6798 train loss:3.489360 +step:6799 train loss:3.513898 +step:6800 train loss:3.565737 +step:6801 train loss:3.482227 +step:6802 train loss:3.466766 +step:6803 train loss:3.501209 +step:6804 train loss:3.553992 +step:6805 train loss:3.471325 +step:6806 train loss:3.489730 +step:6807 train loss:3.578327 +step:6808 train loss:3.511940 +step:6809 train loss:3.510746 +step:6810 train loss:3.499800 +step:6811 train loss:3.510114 +step:6812 train loss:3.492799 +step:6813 train loss:3.465819 +step:6814 train loss:3.490977 +step:6815 train loss:3.513181 +step:6816 train loss:3.559671 +step:6817 train loss:3.532833 +step:6818 train loss:3.503724 +step:6819 train loss:3.488652 +step:6820 train loss:3.472517 +step:6821 train loss:3.536655 +step:6822 train loss:3.507443 +step:6823 train loss:3.618858 +step:6824 train loss:3.758170 +step:6825 train loss:3.536166 +step:6826 train loss:3.501242 +step:6827 train loss:3.504454 +step:6828 train loss:3.551752 +step:6829 train loss:3.502259 +step:6830 train loss:3.476478 +step:6831 train loss:3.511013 +step:6832 train loss:3.482595 +step:6833 train loss:3.472650 +step:6834 train loss:3.487789 +step:6835 train loss:3.505774 +step:6836 train loss:3.542775 +step:6837 train loss:3.527929 +step:6838 train loss:3.500072 +step:6839 train loss:3.452090 +step:6840 train loss:3.528790 +step:6841 train loss:3.498133 +step:6842 train loss:3.444566 +step:6843 train loss:3.500425 +step:6844 train loss:3.484855 +step:6845 train loss:3.440796 +step:6846 train loss:3.509814 +step:6847 train loss:3.477203 +step:6848 train loss:3.481860 +step:6849 train loss:3.462784 +step:6850 train loss:3.508024 +step:6851 train loss:3.415025 +step:6852 train loss:3.484375 +step:6853 train loss:3.537331 +step:6854 train loss:3.501944 +step:6855 train loss:3.486629 +step:6856 train loss:3.556811 +step:6857 train loss:3.591149 +step:6858 train loss:3.463219 +step:6859 train loss:3.527077 +step:6860 train loss:3.479720 +step:6861 train loss:3.633075 +step:6862 train loss:3.511428 +step:6863 train loss:3.526637 +step:6864 train loss:3.543409 +step:6865 train loss:3.553648 +step:6866 train loss:3.461350 +step:6867 train loss:3.531393 +step:6868 train loss:3.494511 +step:6869 train loss:3.575063 +step:6870 train loss:3.488889 +step:6871 train loss:3.481046 +step:6872 train loss:3.471840 +step:6873 train loss:3.494794 +step:6874 train loss:3.506502 +step:6875 train loss:3.523137 +step:6876 train loss:3.529769 +step:6877 train loss:3.529716 +step:6878 train loss:3.508137 +step:6879 train loss:3.521917 +step:6880 train loss:3.498875 +step:6881 train loss:3.537425 +step:6882 train loss:3.495582 +step:6883 train loss:3.549538 +step:6884 train loss:3.491660 +step:6885 train loss:3.560108 +step:6886 train loss:3.441967 +step:6887 train loss:3.454539 +step:6888 train loss:3.438672 +step:6889 train loss:3.512467 +step:6890 train loss:3.480755 +step:6891 train loss:3.519978 +step:6892 train loss:3.485903 +step:6893 train loss:3.516979 +step:6894 train loss:3.491795 +step:6895 train loss:3.525427 +step:6896 train loss:3.540293 +step:6897 train loss:3.549753 +step:6898 train loss:3.523087 +step:6899 train loss:3.509376 +step:6900 train loss:3.519479 +step:6901 train loss:3.505033 +step:6902 train loss:3.479305 +step:6903 train loss:3.487272 +step:6904 train loss:3.472984 +step:6905 train loss:3.561167 +step:6906 train loss:3.573637 +step:6907 train loss:3.430782 +step:6908 train loss:3.467253 +step:6909 train loss:3.485293 +step:6910 train loss:3.459857 +step:6911 train loss:3.548280 +step:6912 train loss:3.486217 +step:6913 train loss:3.463297 +step:6914 train loss:3.469402 +step:6915 train loss:3.512022 +step:6916 train loss:3.467268 +step:6917 train loss:3.532199 +step:6918 train loss:3.451617 +step:6919 train loss:3.424967 +step:6920 train loss:3.522575 +step:6921 train loss:3.435989 +step:6922 train loss:3.514922 +step:6923 train loss:3.471559 +step:6924 train loss:3.513458 +step:6925 train loss:3.536362 +step:6926 train loss:3.500740 +step:6927 train loss:3.442904 +step:6928 train loss:3.508200 +step:6929 train loss:3.446493 +step:6930 train loss:3.537549 +step:6931 train loss:3.456570 +step:6932 train loss:3.480131 +step:6933 train loss:3.487625 +step:6934 train loss:3.420854 +step:6935 train loss:3.495562 +step:6936 train loss:3.513057 +step:6937 train loss:3.547887 +step:6938 train loss:3.509926 +step:6939 train loss:3.497018 +step:6940 train loss:3.472011 +step:6941 train loss:3.467569 +step:6942 train loss:3.510126 +step:6943 train loss:3.430495 +step:6944 train loss:3.515258 +step:6945 train loss:3.490891 +step:6946 train loss:3.502389 +step:6947 train loss:3.464011 +step:6948 train loss:3.594345 +step:6949 train loss:3.704338 +step:6950 train loss:3.524305 +step:6951 train loss:3.518986 +step:6952 train loss:3.559796 +step:6953 train loss:3.473727 +step:6954 train loss:3.498846 +step:6955 train loss:3.475302 +step:6956 train loss:3.538488 +step:6957 train loss:3.477765 +step:6958 train loss:3.501013 +step:6959 train loss:3.501753 +step:6960 train loss:3.414820 +step:6961 train loss:3.490606 +step:6962 train loss:3.467738 +step:6963 train loss:3.506132 +step:6964 train loss:3.485379 +step:6965 train loss:3.497579 +step:6966 train loss:3.477889 +step:6967 train loss:3.448735 +step:6968 train loss:3.439930 +step:6969 train loss:3.492302 +step:6970 train loss:3.493974 +step:6971 train loss:3.467713 +step:6972 train loss:3.508885 +step:6973 train loss:3.543293 +step:6974 train loss:3.548507 +step:6975 train loss:3.483889 +step:6976 train loss:3.538183 +step:6977 train loss:3.531714 +step:6978 train loss:3.471013 +step:6979 train loss:3.461122 +step:6980 train loss:3.479447 +step:6981 train loss:3.593839 +step:6982 train loss:3.527815 +step:6983 train loss:3.482879 +step:6984 train loss:3.504692 +step:6985 train loss:3.485481 +step:6986 train loss:3.538150 +step:6987 train loss:3.513683 +step:6988 train loss:3.454069 +step:6989 train loss:3.535644 +step:6990 train loss:3.458518 +step:6991 train loss:3.530733 +step:6992 train loss:3.550863 +step:6993 train loss:3.585412 +step:6994 train loss:3.507545 +step:6995 train loss:3.523878 +step:6996 train loss:3.460286 +step:6997 train loss:3.524360 +step:6998 train loss:3.556646 +step:6999 train loss:3.465628 +step:7000 validation loss:3.471701 total_sharp:4.9709e-03 L1_sharp:5.5067e-03 L2_sharp:5.2881e-04 L3_sharp:1.1815e-03 L4_sharp:1.1526e-03 L5_sharp:7.1209e-04 L6_sharp:1.0622e-03 L7_sharp:1.1662e-03 L8_sharp:1.9245e-03 L9_sharp:1.5654e-03 L10_sharp:7.8842e-04 L11_sharp:7.5244e-04 L12_sharp:2.0777e-03 total_fnorm:2.2360e+00 total_l1_linf:1.9887e+04 total_spectral:2.2360e+00 L1_fnorm:5.0775e-01 L2_fnorm:5.1361e-01 L3_fnorm:5.0557e-01 L4_fnorm:5.0681e-01 L5_fnorm:5.0053e-01 L6_fnorm:5.0556e-01 L7_fnorm:5.1448e-01 L8_fnorm:5.1296e-01 L9_fnorm:5.2658e-01 L10_fnorm:5.3993e-01 L11_fnorm:5.4968e-01 L12_fnorm:5.4040e-01 L1_l1linf:6.6151e-01 L2_l1linf:6.0134e-01 L3_l1linf:6.8291e-01 L4_l1linf:6.1610e-01 L5_l1linf:5.6894e-01 L6_l1linf:6.0565e-01 L7_l1linf:5.9804e-01 L8_l1linf:5.6355e-01 L9_l1linf:5.4033e-01 L10_l1linf:5.6168e-01 L11_l1linf:5.9063e-01 L12_l1linf:5.7146e-01 L1_spectral:8.7680e-02 L2_spectral:8.1894e-02 L3_spectral:8.9106e-02 L4_spectral:9.0647e-02 L5_spectral:7.0349e-02 L6_spectral:8.2899e-02 L7_spectral:7.7352e-02 L8_spectral:7.1096e-02 L9_spectral:5.3944e-02 L10_spectral:4.4303e-02 L11_spectral:5.4699e-02 L12_spectral:6.9765e-02 v_norm:2.2360e+00 cos_v_-g_hvp:6.1537e-02 g_hvp_norm:3.8346e-01 cos_v_-g_t:7.4015e-02 g_t_norm:3.2041e-01 hv_norm:4.2924e-01 cos_v_hv:2.5894e-02 hg_norm:6.2317e+00 cos_g_hg:6.7526e-01 v_par:8.6032e-03 v_perp:2.2360e+00 L1_cos_v_neg_g:1.1965e-01 L1_v_norm:5.0775e-01 L2_cos_v_neg_g:5.4826e-02 L2_v_norm:5.1361e-01 L3_cos_v_neg_g:5.2792e-02 L3_v_norm:5.0557e-01 L4_cos_v_neg_g:5.1640e-02 L4_v_norm:5.0681e-01 L5_cos_v_neg_g:4.2548e-02 L5_v_norm:5.0053e-01 L6_cos_v_neg_g:5.4643e-02 L6_v_norm:5.0556e-01 L7_cos_v_neg_g:6.1266e-02 L7_v_norm:5.1448e-01 L8_cos_v_neg_g:5.7007e-02 L8_v_norm:5.1296e-01 L9_cos_v_neg_g:5.5029e-02 L9_v_norm:5.2658e-01 L10_cos_v_neg_g:6.5390e-02 L10_v_norm:5.3993e-01 L11_cos_v_neg_g:8.6987e-02 L11_v_norm:5.4968e-01 L12_cos_v_neg_g:1.1657e-01 L12_v_norm:5.4040e-01 +step:7000 train loss:3.476124 +step:7001 train loss:3.467833 +step:7002 train loss:3.520874 +step:7003 train loss:3.437572 +step:7004 train loss:3.441165 +step:7005 train loss:3.434685 +step:7006 train loss:3.538598 +step:7007 train loss:3.475209 +step:7008 train loss:3.501850 +step:7009 train loss:3.470605 +step:7010 train loss:3.515956 +step:7011 train loss:3.555212 +step:7012 train loss:3.527196 +step:7013 train loss:3.518342 +step:7014 train loss:3.535167 +step:7015 train loss:3.543533 +step:7016 train loss:3.516216 +step:7017 train loss:3.514336 +step:7018 train loss:3.495289 +step:7019 train loss:3.656939 +step:7020 train loss:3.432691 +step:7021 train loss:3.510253 +step:7022 train loss:3.500103 +step:7023 train loss:3.490354 +step:7024 train loss:3.527447 +step:7025 train loss:3.526302 +step:7026 train loss:3.489982 +step:7027 train loss:3.584174 +step:7028 train loss:3.493301 +step:7029 train loss:3.463121 +step:7030 train loss:3.489118 +step:7031 train loss:3.483816 +step:7032 train loss:3.520835 +step:7033 train loss:3.506144 +step:7034 train loss:3.534832 +step:7035 train loss:3.564439 +step:7036 train loss:3.480882 +step:7037 train loss:3.554096 +step:7038 train loss:3.476529 +step:7039 train loss:3.524557 +step:7040 train loss:3.509013 +step:7041 train loss:3.546302 +step:7042 train loss:3.520947 +step:7043 train loss:3.570166 +step:7044 train loss:3.558109 +step:7045 train loss:3.624454 +step:7046 train loss:3.477605 +step:7047 train loss:3.495831 +step:7048 train loss:3.560528 +step:7049 train loss:3.423050 +step:7050 train loss:3.504666 +step:7051 train loss:3.496587 +step:7052 train loss:3.560810 +step:7053 train loss:3.418096 +step:7054 train loss:3.547634 +step:7055 train loss:3.528834 +step:7056 train loss:3.502341 +step:7057 train loss:3.498613 +step:7058 train loss:3.550211 +step:7059 train loss:3.540661 +step:7060 train loss:3.564467 +step:7061 train loss:3.510603 +step:7062 train loss:3.540592 +step:7063 train loss:3.627328 +step:7064 train loss:3.607513 +step:7065 train loss:3.470375 +step:7066 train loss:3.528170 +step:7067 train loss:3.549579 +step:7068 train loss:3.568490 +step:7069 train loss:3.529542 +step:7070 train loss:3.548600 +step:7071 train loss:3.574569 +step:7072 train loss:3.515512 +step:7073 train loss:3.552811 +step:7074 train loss:3.585595 +step:7075 train loss:3.572815 +step:7076 train loss:3.542671 +step:7077 train loss:3.493713 +step:7078 train loss:3.521102 +step:7079 train loss:3.519384 +step:7080 train loss:3.627243 +step:7081 train loss:3.557952 +step:7082 train loss:3.540530 +step:7083 train loss:3.585629 +step:7084 train loss:3.520019 +step:7085 train loss:3.584638 +step:7086 train loss:3.563895 +step:7087 train loss:3.567729 +step:7088 train loss:3.528525 +step:7089 train loss:3.523643 +step:7090 train loss:3.496385 +step:7091 train loss:3.537341 +step:7092 train loss:3.543376 +step:7093 train loss:3.514845 +step:7094 train loss:3.501797 +step:7095 train loss:3.500206 +step:7096 train loss:3.513614 +step:7097 train loss:3.525868 +step:7098 train loss:3.520535 +step:7099 train loss:3.502680 +step:7100 train loss:3.557084 +step:7101 train loss:3.543062 +step:7102 train loss:3.547318 +step:7103 train loss:3.705217 +step:7104 train loss:3.503580 +step:7105 train loss:3.559440 +step:7106 train loss:3.585554 +step:7107 train loss:3.540267 +step:7108 train loss:3.508797 +step:7109 train loss:3.516216 +step:7110 train loss:3.525242 +step:7111 train loss:3.610292 +step:7112 train loss:3.515854 +step:7113 train loss:3.521875 +step:7114 train loss:3.514759 +step:7115 train loss:3.555182 +step:7116 train loss:3.498595 +step:7117 train loss:3.509587 +step:7118 train loss:3.586486 +step:7119 train loss:3.537507 +step:7120 train loss:3.502472 +step:7121 train loss:3.555725 +step:7122 train loss:3.592013 +step:7123 train loss:3.528418 +step:7124 train loss:3.563491 +step:7125 train loss:3.521161 +step:7126 train loss:3.572497 +step:7127 train loss:3.539480 +step:7128 train loss:3.547482 +step:7129 train loss:3.570292 +step:7130 train loss:3.522770 +step:7131 train loss:3.520311 +step:7132 train loss:3.531718 +step:7133 train loss:3.543456 +step:7134 train loss:3.527735 +step:7135 train loss:3.529010 +step:7136 train loss:3.520857 +step:7137 train loss:3.561647 +step:7138 train loss:3.525999 +step:7139 train loss:3.509475 +step:7140 train loss:3.489668 +step:7141 train loss:3.505457 +step:7142 train loss:3.520879 +step:7143 train loss:3.508923 +step:7144 train loss:3.495571 +step:7145 train loss:3.568868 +step:7146 train loss:3.517326 +step:7147 train loss:3.490254 +step:7148 train loss:3.486554 +step:7149 train loss:3.533779 +step:7150 train loss:3.656971 +step:7151 train loss:3.518189 +step:7152 train loss:3.473586 +step:7153 train loss:3.533831 +step:7154 train loss:3.574782 +step:7155 train loss:3.512046 +step:7156 train loss:3.557262 +step:7157 train loss:3.599211 +step:7158 train loss:3.548115 +step:7159 train loss:3.497917 +step:7160 train loss:3.547526 +step:7161 train loss:3.578854 +step:7162 train loss:3.557320 +step:7163 train loss:3.516462 +step:7164 train loss:3.523459 +step:7165 train loss:3.551460 +step:7166 train loss:3.519785 +step:7167 train loss:3.558331 +step:7168 train loss:3.559139 +step:7169 train loss:3.543778 +step:7170 train loss:3.532712 +step:7171 train loss:3.559986 +step:7172 train loss:3.554163 +step:7173 train loss:3.552063 +step:7174 train loss:3.524367 +step:7175 train loss:3.522750 +step:7176 train loss:3.512418 +step:7177 train loss:3.467873 +step:7178 train loss:3.563261 +step:7179 train loss:3.570231 +step:7180 train loss:3.527763 +step:7181 train loss:3.512777 +step:7182 train loss:3.518256 +step:7183 train loss:3.640385 +step:7184 train loss:3.561564 +step:7185 train loss:3.498888 +step:7186 train loss:3.528267 +step:7187 train loss:3.530803 +step:7188 train loss:3.543986 +step:7189 train loss:3.496273 +step:7190 train loss:3.508185 +step:7191 train loss:3.481129 +step:7192 train loss:3.537071 +step:7193 train loss:3.522338 +step:7194 train loss:3.506757 +step:7195 train loss:3.506748 +step:7196 train loss:3.510813 +step:7197 train loss:3.545504 +step:7198 train loss:3.549180 +step:7199 train loss:3.544724 +step:7200 train loss:3.573333 +step:7201 train loss:3.485459 +step:7202 train loss:3.486114 +step:7203 train loss:3.489192 +step:7204 train loss:3.495347 +step:7205 train loss:3.557444 +step:7206 train loss:3.542268 +step:7207 train loss:3.540713 +step:7208 train loss:3.490389 +step:7209 train loss:3.470992 +step:7210 train loss:3.506485 +step:7211 train loss:3.576637 +step:7212 train loss:3.596836 +step:7213 train loss:3.542278 +step:7214 train loss:3.610026 +step:7215 train loss:3.562346 +step:7216 train loss:3.586630 +step:7217 train loss:3.539392 +step:7218 train loss:3.494997 +step:7219 train loss:3.540779 +step:7220 train loss:3.534944 +step:7221 train loss:3.574957 +step:7222 train loss:3.594664 +step:7223 train loss:3.584023 +step:7224 train loss:3.539910 +step:7225 train loss:3.605854 +step:7226 train loss:3.572419 +step:7227 train loss:3.531129 +step:7228 train loss:3.557587 +step:7229 train loss:3.529175 +step:7230 train loss:3.514225 +step:7231 train loss:3.556772 +step:7232 train loss:3.510396 +step:7233 train loss:3.506242 +step:7234 train loss:3.562992 +step:7235 train loss:3.559606 +step:7236 train loss:3.489909 +step:7237 train loss:3.584707 +step:7238 train loss:3.506906 +step:7239 train loss:3.491938 +step:7240 train loss:3.516865 +step:7241 train loss:3.551380 +step:7242 train loss:3.577618 +step:7243 train loss:3.591107 +step:7244 train loss:3.562897 +step:7245 train loss:3.546836 +step:7246 train loss:3.472525 +step:7247 train loss:3.495241 +step:7248 train loss:3.464185 +step:7249 train loss:3.511987 +step:7250 validation loss:3.465148 +step:7250 train loss:3.538877 +step:7251 train loss:3.446721 +step:7252 train loss:3.475700 +step:7253 train loss:3.441978 +step:7254 train loss:3.544030 +step:7255 train loss:3.467101 +step:7256 train loss:3.506969 +step:7257 train loss:3.534640 +step:7258 train loss:3.584294 +step:7259 train loss:3.510377 +step:7260 train loss:3.462965 +step:7261 train loss:3.470244 +step:7262 train loss:3.496281 +step:7263 train loss:3.509534 +step:7264 train loss:3.491606 +step:7265 train loss:3.504706 +step:7266 train loss:3.474842 +step:7267 train loss:3.492158 +step:7268 train loss:3.464704 +step:7269 train loss:3.484864 +step:7270 train loss:3.527480 +step:7271 train loss:3.516294 +step:7272 train loss:3.603529 +step:7273 train loss:3.517702 +step:7274 train loss:3.495718 +step:7275 train loss:3.527126 +step:7276 train loss:3.464045 +step:7277 train loss:3.482413 +step:7278 train loss:3.552461 +step:7279 train loss:3.591114 +step:7280 train loss:3.548528 +step:7281 train loss:3.430346 +step:7282 train loss:3.529040 +step:7283 train loss:3.488841 +step:7284 train loss:3.448654 +step:7285 train loss:3.480505 +step:7286 train loss:3.457178 +step:7287 train loss:3.542336 +step:7288 train loss:3.467129 +step:7289 train loss:3.528157 +step:7290 train loss:3.455686 +step:7291 train loss:3.507329 +step:7292 train loss:3.492352 +step:7293 train loss:3.445679 +step:7294 train loss:3.474230 +step:7295 train loss:3.532351 +step:7296 train loss:3.465032 +step:7297 train loss:3.579054 +step:7298 train loss:3.518188 +step:7299 train loss:3.544516 +step:7300 train loss:3.516700 +step:7301 train loss:3.508304 +step:7302 train loss:3.533969 +step:7303 train loss:3.424228 +step:7304 train loss:3.473409 +step:7305 train loss:3.488841 +step:7306 train loss:3.527826 +step:7307 train loss:3.556883 +step:7308 train loss:3.459985 +step:7309 train loss:3.500946 +step:7310 train loss:3.528635 +step:7311 train loss:3.467448 +step:7312 train loss:3.588533 +step:7313 train loss:3.504952 +step:7314 train loss:3.482553 +step:7315 train loss:3.520218 +step:7316 train loss:3.490162 +step:7317 train loss:3.476164 +step:7318 train loss:3.443978 +step:7319 train loss:3.503361 +step:7320 train loss:3.512692 +step:7321 train loss:3.489008 +step:7322 train loss:3.543317 +step:7323 train loss:3.542101 +step:7324 train loss:3.537567 +step:7325 train loss:3.537679 +step:7326 train loss:3.448570 +step:7327 train loss:3.463576 +step:7328 train loss:3.469548 +step:7329 train loss:3.516438 +step:7330 train loss:3.457795 +step:7331 train loss:3.495616 +step:7332 train loss:3.523183 +step:7333 train loss:3.542950 +step:7334 train loss:3.457521 +step:7335 train loss:3.485688 +step:7336 train loss:3.462854 +step:7337 train loss:3.513976 +step:7338 train loss:3.450436 +step:7339 train loss:3.482306 +step:7340 train loss:3.501196 +step:7341 train loss:3.423163 +step:7342 train loss:3.496289 +step:7343 train loss:3.463762 +step:7344 train loss:3.504199 +step:7345 train loss:3.529191 +step:7346 train loss:3.452587 +step:7347 train loss:3.503766 +step:7348 train loss:3.557427 +step:7349 train loss:3.496563 +step:7350 train loss:3.473962 +step:7351 train loss:3.528258 +step:7352 train loss:3.530661 +step:7353 train loss:3.465231 +step:7354 train loss:3.482141 +step:7355 train loss:3.537189 +step:7356 train loss:3.417524 +step:7357 train loss:3.579322 +step:7358 train loss:3.457075 +step:7359 train loss:3.613789 +step:7360 train loss:3.541870 +step:7361 train loss:3.539296 +step:7362 train loss:3.481378 +step:7363 train loss:3.505478 +step:7364 train loss:3.452314 +step:7365 train loss:3.489446 +step:7366 train loss:3.457258 +step:7367 train loss:3.501617 +step:7368 train loss:3.519181 +step:7369 train loss:3.518388 +step:7370 train loss:3.557401 +step:7371 train loss:3.561715 +step:7372 train loss:3.522260 +step:7373 train loss:3.526476 +step:7374 train loss:3.484937 +step:7375 train loss:3.561347 +step:7376 train loss:3.527806 +step:7377 train loss:3.417872 +step:7378 train loss:3.513700 +step:7379 train loss:3.513591 +step:7380 train loss:3.455794 +step:7381 train loss:3.465537 +step:7382 train loss:3.456553 +step:7383 train loss:3.536232 +step:7384 train loss:3.513213 +step:7385 train loss:3.412329 +step:7386 train loss:3.491068 +step:7387 train loss:3.481975 +step:7388 train loss:3.504304 +step:7389 train loss:3.486341 +step:7390 train loss:3.434551 +step:7391 train loss:3.498010 +step:7392 train loss:3.513974 +step:7393 train loss:3.462150 +step:7394 train loss:3.527694 +step:7395 train loss:3.640115 +step:7396 train loss:3.484577 +step:7397 train loss:3.493716 +step:7398 train loss:3.529239 +step:7399 train loss:3.499010 +step:7400 train loss:3.495402 +step:7401 train loss:3.552704 +step:7402 train loss:3.413165 +step:7403 train loss:3.522125 +step:7404 train loss:3.501961 +step:7405 train loss:3.459887 +step:7406 train loss:3.536804 +step:7407 train loss:3.534527 +step:7408 train loss:3.426555 +step:7409 train loss:3.501315 +step:7410 train loss:3.475440 +step:7411 train loss:3.494844 +step:7412 train loss:3.505916 +step:7413 train loss:3.500343 +step:7414 train loss:3.465848 +step:7415 train loss:3.447749 +step:7416 train loss:3.560935 +step:7417 train loss:3.597217 +step:7418 train loss:3.517163 +step:7419 train loss:3.589976 +step:7420 train loss:3.460744 +step:7421 train loss:3.484654 +step:7422 train loss:3.514540 +step:7423 train loss:3.489045 +step:7424 train loss:3.544050 +step:7425 train loss:3.412930 +step:7426 train loss:3.447740 +step:7427 train loss:3.448931 +step:7428 train loss:3.464860 +step:7429 train loss:3.571215 +step:7430 train loss:3.522913 +step:7431 train loss:3.470656 +step:7432 train loss:3.538945 +step:7433 train loss:3.490469 +step:7434 train loss:3.490269 +step:7435 train loss:3.485851 +step:7436 train loss:3.453908 +step:7437 train loss:3.531327 +step:7438 train loss:3.463387 +step:7439 train loss:3.546258 +step:7440 train loss:3.548219 +step:7441 train loss:3.507746 +step:7442 train loss:3.526839 +step:7443 train loss:3.532980 +step:7444 train loss:3.511546 +step:7445 train loss:3.507786 +step:7446 train loss:3.444856 +step:7447 train loss:3.515374 +step:7448 train loss:3.495323 +step:7449 train loss:3.514572 +step:7450 train loss:3.506407 +step:7451 train loss:3.526720 +step:7452 train loss:3.531361 +step:7453 train loss:3.547774 +step:7454 train loss:3.491495 +step:7455 train loss:3.461546 +step:7456 train loss:3.547312 +step:7457 train loss:3.520455 +step:7458 train loss:3.474340 +step:7459 train loss:3.550596 +step:7460 train loss:3.520803 +step:7461 train loss:3.464776 +step:7462 train loss:3.505156 +step:7463 train loss:3.657417 +step:7464 train loss:3.484640 +step:7465 train loss:3.482418 +step:7466 train loss:3.481722 +step:7467 train loss:3.545460 +step:7468 train loss:3.476173 +step:7469 train loss:3.557258 +step:7470 train loss:3.481163 +step:7471 train loss:3.460488 +step:7472 train loss:3.555465 +step:7473 train loss:3.480068 +step:7474 train loss:3.510316 +step:7475 train loss:3.485210 +step:7476 train loss:3.477108 +step:7477 train loss:3.486156 +step:7478 train loss:3.476670 +step:7479 train loss:3.534422 +step:7480 train loss:3.553567 +step:7481 train loss:3.472387 +step:7482 train loss:3.555730 +step:7483 train loss:3.517061 +step:7484 train loss:3.513923 +step:7485 train loss:3.533923 +step:7486 train loss:3.533377 +step:7487 train loss:3.475420 +step:7488 train loss:3.464722 +step:7489 train loss:3.470104 +step:7490 train loss:3.475190 +step:7491 train loss:3.538372 +step:7492 train loss:3.499854 +step:7493 train loss:3.508333 +step:7494 train loss:3.501421 +step:7495 train loss:3.505130 +step:7496 train loss:3.482488 +step:7497 train loss:3.503041 +step:7498 train loss:3.547097 +step:7499 train loss:3.510235 +step:7500 validation loss:3.460975 total_sharp:7.2012e-03 L1_sharp:5.3006e-03 L2_sharp:8.9958e-04 L3_sharp:1.1925e-03 L4_sharp:1.8176e-03 L5_sharp:9.7964e-04 L6_sharp:1.6753e-03 L7_sharp:1.7200e-03 L8_sharp:3.4997e-03 L9_sharp:2.4425e-03 L10_sharp:1.0852e-03 L11_sharp:8.5798e-04 L12_sharp:2.9996e-03 total_fnorm:2.2471e+00 total_l1_linf:1.9990e+04 total_spectral:2.2471e+00 L1_fnorm:5.1120e-01 L2_fnorm:5.1577e-01 L3_fnorm:5.0494e-01 L4_fnorm:5.1617e-01 L5_fnorm:5.1089e-01 L6_fnorm:5.1415e-01 L7_fnorm:5.1954e-01 L8_fnorm:5.1576e-01 L9_fnorm:5.2513e-01 L10_fnorm:5.4165e-01 L11_fnorm:5.5155e-01 L12_fnorm:5.4227e-01 L1_l1linf:5.8511e-01 L2_l1linf:5.8840e-01 L3_l1linf:5.9135e-01 L4_l1linf:6.2303e-01 L5_l1linf:5.6852e-01 L6_l1linf:6.3586e-01 L7_l1linf:5.8166e-01 L8_l1linf:6.0493e-01 L9_l1linf:5.8537e-01 L10_l1linf:5.5187e-01 L11_l1linf:5.7140e-01 L12_l1linf:5.6142e-01 L1_spectral:8.7381e-02 L2_spectral:8.1098e-02 L3_spectral:8.3672e-02 L4_spectral:9.3819e-02 L5_spectral:7.7987e-02 L6_spectral:8.4336e-02 L7_spectral:8.2872e-02 L8_spectral:8.4001e-02 L9_spectral:6.7917e-02 L10_spectral:4.7493e-02 L11_spectral:5.6538e-02 L12_spectral:7.8311e-02 v_norm:2.2471e+00 cos_v_-g_hvp:6.2187e-02 g_hvp_norm:4.0752e-01 cos_v_-g_t:7.1432e-02 g_t_norm:3.5980e-01 hv_norm:6.4250e-01 cos_v_hv:2.5186e-02 hg_norm:7.0561e+00 cos_g_hg:6.8076e-01 v_par:7.3448e-03 v_perp:2.2471e+00 L1_cos_v_neg_g:1.2437e-01 L1_v_norm:5.1120e-01 L2_cos_v_neg_g:6.2397e-02 L2_v_norm:5.1577e-01 L3_cos_v_neg_g:5.8298e-02 L3_v_norm:5.0494e-01 L4_cos_v_neg_g:5.9755e-02 L4_v_norm:5.1617e-01 L5_cos_v_neg_g:4.5335e-02 L5_v_norm:5.1089e-01 L6_cos_v_neg_g:5.5101e-02 L6_v_norm:5.1415e-01 L7_cos_v_neg_g:6.2439e-02 L7_v_norm:5.1954e-01 L8_cos_v_neg_g:5.9781e-02 L8_v_norm:5.1576e-01 L9_cos_v_neg_g:5.4895e-02 L9_v_norm:5.2513e-01 L10_cos_v_neg_g:6.2218e-02 L10_v_norm:5.4165e-01 L11_cos_v_neg_g:8.5213e-02 L11_v_norm:5.5155e-01 L12_cos_v_neg_g:1.2203e-01 L12_v_norm:5.4227e-01 +step:7500 train loss:3.458631 +step:7501 train loss:3.488771 +step:7502 train loss:3.490516 +step:7503 train loss:3.453931 +step:7504 train loss:3.499025 +step:7505 train loss:3.525636 +step:7506 train loss:3.547609 +step:7507 train loss:3.496882 +step:7508 train loss:3.473941 +step:7509 train loss:3.518202 +step:7510 train loss:3.516039 +step:7511 train loss:3.462336 +step:7512 train loss:3.542355 +step:7513 train loss:3.506504 +step:7514 train loss:3.565030 +step:7515 train loss:3.552869 +step:7516 train loss:3.517504 +step:7517 train loss:3.547543 +step:7518 train loss:3.494725 +step:7519 train loss:3.516659 +step:7520 train loss:3.513538 +step:7521 train loss:3.533960 +step:7522 train loss:3.511176 +step:7523 train loss:3.492303 +step:7524 train loss:3.468566 +step:7525 train loss:3.490022 +step:7526 train loss:3.494601 +step:7527 train loss:3.548971 +step:7528 train loss:3.491913 +step:7529 train loss:3.457686 +step:7530 train loss:3.581790 +step:7531 train loss:3.501731 +step:7532 train loss:3.568769 +step:7533 train loss:3.519767 +step:7534 train loss:3.452376 +step:7535 train loss:3.508174 +step:7536 train loss:3.505019 +step:7537 train loss:3.575852 +step:7538 train loss:3.509989 +step:7539 train loss:3.504944 +step:7540 train loss:3.555088 +step:7541 train loss:3.448951 +step:7542 train loss:3.450474 +step:7543 train loss:3.522883 +step:7544 train loss:3.522571 +step:7545 train loss:3.460901 +step:7546 train loss:3.550674 +step:7547 train loss:3.635048 +step:7548 train loss:3.498673 +step:7549 train loss:3.469359 +step:7550 train loss:3.530396 +step:7551 train loss:3.463067 +step:7552 train loss:3.520275 +step:7553 train loss:3.511656 +step:7554 train loss:3.484230 +step:7555 train loss:3.495481 +step:7556 train loss:3.477507 +step:7557 train loss:3.507613 +step:7558 train loss:3.470469 +step:7559 train loss:3.517556 +step:7560 train loss:3.533243 +step:7561 train loss:3.499087 +step:7562 train loss:3.472292 +step:7563 train loss:3.537657 +step:7564 train loss:3.478794 +step:7565 train loss:3.566854 +step:7566 train loss:3.535724 +step:7567 train loss:3.469166 +step:7568 train loss:3.473003 +step:7569 train loss:3.463383 +step:7570 train loss:3.504532 +step:7571 train loss:3.462759 +step:7572 train loss:3.449690 +step:7573 train loss:3.542412 +step:7574 train loss:3.519842 +step:7575 train loss:3.484390 +step:7576 train loss:3.536112 +step:7577 train loss:3.549289 +step:7578 train loss:3.532767 +step:7579 train loss:3.592316 +step:7580 train loss:3.442738 +step:7581 train loss:3.408546 +step:7582 train loss:3.442166 +step:7583 train loss:3.484854 +step:7584 train loss:3.519403 +step:7585 train loss:3.462646 +step:7586 train loss:3.554895 +step:7587 train loss:3.507962 +step:7588 train loss:3.445736 +step:7589 train loss:3.505262 +step:7590 train loss:3.492793 +step:7591 train loss:3.527986 +step:7592 train loss:3.510198 +step:7593 train loss:3.504661 +step:7594 train loss:3.561772 +step:7595 train loss:3.546622 +step:7596 train loss:3.467186 +step:7597 train loss:3.510813 +step:7598 train loss:3.466134 +step:7599 train loss:3.492794 +step:7600 train loss:3.507282 +step:7601 train loss:3.498848 +step:7602 train loss:3.533744 +step:7603 train loss:3.484072 +step:7604 train loss:3.497637 +step:7605 train loss:3.523206 +step:7606 train loss:3.517911 +step:7607 train loss:3.553875 +step:7608 train loss:3.528406 +step:7609 train loss:3.467171 +step:7610 train loss:3.556367 +step:7611 train loss:3.670619 +step:7612 train loss:3.464726 +step:7613 train loss:3.530860 +step:7614 train loss:3.504131 +step:7615 train loss:3.525118 +step:7616 train loss:3.539497 +step:7617 train loss:3.540566 +step:7618 train loss:3.548125 +step:7619 train loss:3.532043 +step:7620 train loss:3.479349 +step:7621 train loss:3.495664 +step:7622 train loss:3.529273 +step:7623 train loss:3.486226 +step:7624 train loss:3.532923 +step:7625 train loss:3.535109 +step:7626 train loss:3.625369 +step:7627 train loss:3.496093 +step:7628 train loss:3.436533 +step:7629 train loss:3.453006 +step:7630 train loss:3.457156 +step:7631 train loss:3.470263 +step:7632 train loss:3.511236 +step:7633 train loss:3.475800 +step:7634 train loss:3.465966 +step:7635 train loss:3.485814 +step:7636 train loss:3.483493 +step:7637 train loss:3.517496 +step:7638 train loss:3.477491 +step:7639 train loss:3.530936 +step:7640 train loss:3.477573 +step:7641 train loss:3.583131 +step:7642 train loss:3.488456 +step:7643 train loss:3.436169 +step:7644 train loss:3.483052 +step:7645 train loss:3.506019 +step:7646 train loss:3.489709 +step:7647 train loss:3.517354 +step:7648 train loss:3.497516 +step:7649 train loss:3.475965 +step:7650 train loss:3.468492 +step:7651 train loss:3.567826 +step:7652 train loss:3.489988 +step:7653 train loss:3.472616 +step:7654 train loss:3.518715 +step:7655 train loss:3.502368 +step:7656 train loss:3.494770 +step:7657 train loss:3.455171 +step:7658 train loss:3.488981 +step:7659 train loss:3.446663 +step:7660 train loss:3.453910 +step:7661 train loss:3.444939 +step:7662 train loss:3.413058 +step:7663 train loss:3.439338 +step:7664 train loss:3.495263 +step:7665 train loss:3.482256 +step:7666 train loss:3.475979 +step:7667 train loss:3.538759 +step:7668 train loss:3.556139 +step:7669 train loss:3.595472 +step:7670 train loss:3.474590 +step:7671 train loss:3.537007 +step:7672 train loss:3.444186 +step:7673 train loss:3.510541 +step:7674 train loss:3.466321 +step:7675 train loss:3.505892 +step:7676 train loss:3.522323 +step:7677 train loss:3.430393 +step:7678 train loss:3.456401 +step:7679 train loss:3.420745 +step:7680 train loss:3.482621 +step:7681 train loss:3.423057 +step:7682 train loss:3.481558 +step:7683 train loss:3.439240 +step:7684 train loss:3.485932 +step:7685 train loss:3.455239 +step:7686 train loss:3.508848 +step:7687 train loss:3.480488 +step:7688 train loss:3.500630 +step:7689 train loss:3.493807 +step:7690 train loss:3.432683 +step:7691 train loss:3.445982 +step:7692 train loss:3.486443 +step:7693 train loss:3.492285 +step:7694 train loss:3.495090 +step:7695 train loss:3.441612 +step:7696 train loss:3.582275 +step:7697 train loss:3.460300 +step:7698 train loss:3.520683 +step:7699 train loss:3.484579 +step:7700 train loss:3.456816 +step:7701 train loss:3.499736 +step:7702 train loss:3.475509 +step:7703 train loss:3.471522 +step:7704 train loss:3.480536 +step:7705 train loss:3.511259 +step:7706 train loss:3.417218 +step:7707 train loss:3.400014 +step:7708 train loss:3.466354 +step:7709 train loss:3.494097 +step:7710 train loss:3.468805 +step:7711 train loss:3.470894 +step:7712 train loss:3.500460 +step:7713 train loss:3.515856 +step:7714 train loss:3.495604 +step:7715 train loss:3.443056 +step:7716 train loss:3.518727 +step:7717 train loss:3.500669 +step:7718 train loss:3.525376 +step:7719 train loss:3.429029 +step:7720 train loss:3.460863 +step:7721 train loss:3.473203 +step:7722 train loss:3.446385 +step:7723 train loss:3.502481 +step:7724 train loss:3.447995 +step:7725 train loss:3.495730 +step:7726 train loss:3.442922 +step:7727 train loss:3.464567 +step:7728 train loss:3.428875 +step:7729 train loss:3.420993 +step:7730 train loss:3.441125 +step:7731 train loss:3.460602 +step:7732 train loss:3.458045 +step:7733 train loss:3.450788 +step:7734 train loss:3.408921 +step:7735 train loss:3.441160 +step:7736 train loss:3.503301 +step:7737 train loss:3.471485 +step:7738 train loss:3.484458 +step:7739 train loss:3.495532 +step:7740 train loss:3.497420 +step:7741 train loss:3.444973 +step:7742 train loss:3.464160 +step:7743 train loss:3.467685 +step:7744 train loss:3.464708 +step:7745 train loss:3.449791 +step:7746 train loss:3.484870 +step:7747 train loss:3.463084 +step:7748 train loss:3.473267 +step:7749 train loss:3.411684 +step:7750 validation loss:3.454671 +step:7750 train loss:3.512379 +step:7751 train loss:3.506920 +step:7752 train loss:3.501781 +step:7753 train loss:3.531348 +step:7754 train loss:3.550547 +step:7755 train loss:3.507138 +step:7756 train loss:3.489217 +step:7757 train loss:3.489302 +step:7758 train loss:3.408649 +step:7759 train loss:3.490524 +step:7760 train loss:3.428724 +step:7761 train loss:3.501398 +step:7762 train loss:3.426814 +step:7763 train loss:3.459371 +step:7764 train loss:3.494668 +step:7765 train loss:3.447369 +step:7766 train loss:3.484187 +step:7767 train loss:3.424479 +step:7768 train loss:3.501376 +step:7769 train loss:3.442448 +step:7770 train loss:3.463345 +step:7771 train loss:3.460644 +step:7772 train loss:3.460932 +step:7773 train loss:3.428895 +step:7774 train loss:3.493178 +step:7775 train loss:3.417911 +step:7776 train loss:3.465029 +step:7777 train loss:3.455722 +step:7778 train loss:3.485547 +step:7779 train loss:3.540999 +step:7780 train loss:3.400233 +step:7781 train loss:3.466722 +step:7782 train loss:3.532197 +step:7783 train loss:3.536516 +step:7784 train loss:3.458824 +step:7785 train loss:3.490425 +step:7786 train loss:3.473242 +step:7787 train loss:3.497875 +step:7788 train loss:3.530447 +step:7789 train loss:3.453582 +step:7790 train loss:3.444838 +step:7791 train loss:3.417197 +step:7792 train loss:3.466425 +step:7793 train loss:3.481724 +step:7794 train loss:3.512437 +step:7795 train loss:3.460473 +step:7796 train loss:3.484941 +step:7797 train loss:3.393113 +step:7798 train loss:3.515213 +step:7799 train loss:3.484473 +step:7800 train loss:3.496557 +step:7801 train loss:3.432527 +step:7802 train loss:3.432296 +step:7803 train loss:3.490193 +step:7804 train loss:3.469183 +step:7805 train loss:3.491349 +step:7806 train loss:3.424743 +step:7807 train loss:3.527764 +step:7808 train loss:3.479187 +step:7809 train loss:3.534221 +step:7810 train loss:3.433873 +step:7811 train loss:3.545304 +step:7812 train loss:3.429093 +step:7813 train loss:3.463226 +step:7814 train loss:3.439935 +step:7815 train loss:3.477173 +step:7816 train loss:3.467660 +step:7817 train loss:3.514549 +step:7818 train loss:3.481614 +step:7819 train loss:3.469136 +step:7820 train loss:3.484653 +step:7821 train loss:3.475135 +step:7822 train loss:3.474515 +step:7823 train loss:3.507435 +step:7824 train loss:3.448225 +step:7825 train loss:3.513182 +step:7826 train loss:3.497114 +step:7827 train loss:3.498375 +step:7828 train loss:3.492962 +step:7829 train loss:3.542332 +step:7830 train loss:3.453824 +step:7831 train loss:3.542571 +step:7832 train loss:3.456604 +step:7833 train loss:3.410644 +step:7834 train loss:3.556691 +step:7835 train loss:3.411671 +step:7836 train loss:3.535399 +step:7837 train loss:3.502350 +step:7838 train loss:3.455519 +step:7839 train loss:3.468372 +step:7840 train loss:3.422527 +step:7841 train loss:3.605302 +step:7842 train loss:3.456829 +step:7843 train loss:3.473031 +step:7844 train loss:3.537111 +step:7845 train loss:3.508477 +step:7846 train loss:3.521381 +step:7847 train loss:3.471973 +step:7848 train loss:3.524180 +step:7849 train loss:3.453746 +step:7850 train loss:3.458818 +step:7851 train loss:3.517836 +step:7852 train loss:3.456567 +step:7853 train loss:3.484232 +step:7854 train loss:3.439166 +step:7855 train loss:3.488573 +step:7856 train loss:3.502984 +step:7857 train loss:3.453710 +step:7858 train loss:3.484595 +step:7859 train loss:3.456407 +step:7860 train loss:3.511344 +step:7861 train loss:3.487808 +step:7862 train loss:3.513301 +step:7863 train loss:3.461750 +step:7864 train loss:3.435013 +step:7865 train loss:3.474499 +step:7866 train loss:3.477259 +step:7867 train loss:3.494142 +step:7868 train loss:3.490209 +step:7869 train loss:3.469524 +step:7870 train loss:3.480769 +step:7871 train loss:3.488116 +step:7872 train loss:3.499309 +step:7873 train loss:3.489980 +step:7874 train loss:3.424003 +step:7875 train loss:3.474874 +step:7876 train loss:3.482590 +step:7877 train loss:3.512211 +step:7878 train loss:3.517726 +step:7879 train loss:3.434645 +step:7880 train loss:3.455009 +step:7881 train loss:3.468927 +step:7882 train loss:3.498318 +step:7883 train loss:3.436372 +step:7884 train loss:3.446530 +step:7885 train loss:3.464921 +step:7886 train loss:3.439949 +step:7887 train loss:3.476867 +step:7888 train loss:3.463490 +step:7889 train loss:3.485403 +step:7890 train loss:3.437752 +step:7891 train loss:3.439183 +step:7892 train loss:3.463313 +step:7893 train loss:3.459181 +step:7894 train loss:3.490396 +step:7895 train loss:3.449491 +step:7896 train loss:3.525649 +step:7897 train loss:3.481978 +step:7898 train loss:3.515966 +step:7899 train loss:3.533665 +step:7900 train loss:3.429707 +step:7901 train loss:3.473661 +step:7902 train loss:3.416982 +step:7903 train loss:3.499517 +step:7904 train loss:3.475062 +step:7905 train loss:3.439005 +step:7906 train loss:3.531336 +step:7907 train loss:3.478442 +step:7908 train loss:3.475346 +step:7909 train loss:3.658064 +step:7910 train loss:3.437645 +step:7911 train loss:3.531579 +step:7912 train loss:3.448423 +step:7913 train loss:3.462112 +step:7914 train loss:3.507893 +step:7915 train loss:3.435479 +step:7916 train loss:3.520204 +step:7917 train loss:3.446880 +step:7918 train loss:3.487196 +step:7919 train loss:3.453694 +step:7920 train loss:3.549126 +step:7921 train loss:3.448921 +step:7922 train loss:3.371577 +step:7923 train loss:3.492331 +step:7924 train loss:3.519985 +step:7925 train loss:3.519422 +step:7926 train loss:3.474510 +step:7927 train loss:3.428058 +step:7928 train loss:3.477336 +step:7929 train loss:3.443321 +step:7930 train loss:3.484517 +step:7931 train loss:3.508261 +step:7932 train loss:3.461935 +step:7933 train loss:3.490695 +step:7934 train loss:3.464983 +step:7935 train loss:3.502537 +step:7936 train loss:3.470017 +step:7937 train loss:3.473624 +step:7938 train loss:3.503219 +step:7939 train loss:3.442277 +step:7940 train loss:3.513689 +step:7941 train loss:3.414980 +step:7942 train loss:3.467353 +step:7943 train loss:3.482062 +step:7944 train loss:3.448802 +step:7945 train loss:3.498296 +step:7946 train loss:3.440772 +step:7947 train loss:3.504267 +step:7948 train loss:3.451471 +step:7949 train loss:3.449318 +step:7950 train loss:3.526875 +step:7951 train loss:3.417482 +step:7952 train loss:3.502501 +step:7953 train loss:3.511685 +step:7954 train loss:3.554268 +step:7955 train loss:3.472468 +step:7956 train loss:3.534324 +step:7957 train loss:3.496097 +step:7958 train loss:3.417543 +step:7959 train loss:3.461000 +step:7960 train loss:3.524865 +step:7961 train loss:3.428304 +step:7962 train loss:3.464736 +step:7963 train loss:3.407567 +step:7964 train loss:3.490714 +step:7965 train loss:3.481277 +step:7966 train loss:3.456206 +step:7967 train loss:3.518854 +step:7968 train loss:3.423144 +step:7969 train loss:3.496011 +step:7970 train loss:3.449186 +step:7971 train loss:3.447913 +step:7972 train loss:3.548623 +step:7973 train loss:3.459058 +step:7974 train loss:3.507562 +step:7975 train loss:3.460967 +step:7976 train loss:3.458707 +step:7977 train loss:3.474351 +step:7978 train loss:3.488672 +step:7979 train loss:3.547669 +step:7980 train loss:3.455240 +step:7981 train loss:3.457223 +step:7982 train loss:3.454835 +step:7983 train loss:3.507435 +step:7984 train loss:3.518167 +step:7985 train loss:3.437630 +step:7986 train loss:3.499687 +step:7987 train loss:3.508420 +step:7988 train loss:3.447642 +step:7989 train loss:3.506547 +step:7990 train loss:3.460328 +step:7991 train loss:3.505699 +step:7992 train loss:3.485078 +step:7993 train loss:3.449612 +step:7994 train loss:3.496073 +step:7995 train loss:3.475206 +step:7996 train loss:3.573581 +step:7997 train loss:3.467864 +step:7998 train loss:3.410073 +step:7999 train loss:3.503475 +step:8000 validation loss:3.447813 total_sharp:4.9698e-03 L1_sharp:3.6449e-03 L2_sharp:3.5086e-04 L3_sharp:1.1735e-03 L4_sharp:1.1768e-03 L5_sharp:8.4437e-04 L6_sharp:1.1897e-03 L7_sharp:1.1927e-03 L8_sharp:2.0608e-03 L9_sharp:1.5364e-03 L10_sharp:8.3074e-04 L11_sharp:7.3303e-04 L12_sharp:2.2187e-03 total_fnorm:2.2179e+00 total_l1_linf:1.9719e+04 total_spectral:2.2179e+00 L1_fnorm:4.9148e-01 L2_fnorm:5.0909e-01 L3_fnorm:5.0303e-01 L4_fnorm:5.0283e-01 L5_fnorm:4.9613e-01 L6_fnorm:5.0536e-01 L7_fnorm:5.1274e-01 L8_fnorm:5.0808e-01 L9_fnorm:5.1908e-01 L10_fnorm:5.3432e-01 L11_fnorm:5.4382e-01 L12_fnorm:5.3796e-01 L1_l1linf:5.7536e-01 L2_l1linf:5.8259e-01 L3_l1linf:6.7496e-01 L4_l1linf:6.3985e-01 L5_l1linf:5.7791e-01 L6_l1linf:6.3405e-01 L7_l1linf:5.5796e-01 L8_l1linf:5.4943e-01 L9_l1linf:5.3612e-01 L10_l1linf:5.4469e-01 L11_l1linf:5.5450e-01 L12_l1linf:5.7249e-01 L1_spectral:8.3566e-02 L2_spectral:8.3723e-02 L3_spectral:8.9703e-02 L4_spectral:9.4234e-02 L5_spectral:7.4669e-02 L6_spectral:8.2072e-02 L7_spectral:7.2829e-02 L8_spectral:7.0446e-02 L9_spectral:5.3566e-02 L10_spectral:4.4932e-02 L11_spectral:5.4641e-02 L12_spectral:7.5592e-02 v_norm:2.2179e+00 cos_v_-g_hvp:6.1620e-02 g_hvp_norm:3.6756e-01 cos_v_-g_t:7.6725e-02 g_t_norm:2.9620e-01 hv_norm:4.6440e-01 cos_v_hv:2.3735e-02 hg_norm:6.5779e+00 cos_g_hg:6.4064e-01 v_par:7.4723e-03 v_perp:2.2178e+00 L1_cos_v_neg_g:1.1219e-01 L1_v_norm:4.9148e-01 L2_cos_v_neg_g:5.6466e-02 L2_v_norm:5.0909e-01 L3_cos_v_neg_g:5.2053e-02 L3_v_norm:5.0303e-01 L4_cos_v_neg_g:5.9267e-02 L4_v_norm:5.0283e-01 L5_cos_v_neg_g:4.2971e-02 L5_v_norm:4.9613e-01 L6_cos_v_neg_g:5.2424e-02 L6_v_norm:5.0536e-01 L7_cos_v_neg_g:5.9359e-02 L7_v_norm:5.1274e-01 L8_cos_v_neg_g:5.6299e-02 L8_v_norm:5.0808e-01 L9_cos_v_neg_g:5.4058e-02 L9_v_norm:5.1908e-01 L10_cos_v_neg_g:6.4404e-02 L10_v_norm:5.3432e-01 L11_cos_v_neg_g:8.5559e-02 L11_v_norm:5.4382e-01 L12_cos_v_neg_g:1.2185e-01 L12_v_norm:5.3796e-01 +step:8000 train loss:3.424620 +step:8001 train loss:3.431521 +step:8002 train loss:3.585897 +step:8003 train loss:3.487001 +step:8004 train loss:3.469112 +step:8005 train loss:3.498246 +step:8006 train loss:3.447105 +step:8007 train loss:3.440760 +step:8008 train loss:3.412061 +step:8009 train loss:3.544654 +step:8010 train loss:3.475474 +step:8011 train loss:3.505457 +step:8012 train loss:3.667922 +step:8013 train loss:3.546630 +step:8014 train loss:3.481810 +step:8015 train loss:3.456409 +step:8016 train loss:3.500137 +step:8017 train loss:3.493755 +step:8018 train loss:3.536405 +step:8019 train loss:3.506769 +step:8020 train loss:3.512910 +step:8021 train loss:3.462125 +step:8022 train loss:3.515024 +step:8023 train loss:3.571358 +step:8024 train loss:3.516859 +step:8025 train loss:3.499556 +step:8026 train loss:3.562478 +step:8027 train loss:3.510516 +step:8028 train loss:3.483395 +step:8029 train loss:3.523378 +step:8030 train loss:3.499253 +step:8031 train loss:3.515177 +step:8032 train loss:3.508484 +step:8033 train loss:3.534135 +step:8034 train loss:3.434947 +step:8035 train loss:3.497713 +step:8036 train loss:3.471370 +step:8037 train loss:3.507659 +step:8038 train loss:3.454613 +step:8039 train loss:3.402097 +step:8040 train loss:3.516933 +step:8041 train loss:3.470130 +step:8042 train loss:3.487249 +step:8043 train loss:3.554000 +step:8044 train loss:3.489761 +step:8045 train loss:3.530060 +step:8046 train loss:3.542886 +step:8047 train loss:3.524379 +step:8048 train loss:3.555675 +step:8049 train loss:3.496034 +step:8050 train loss:3.535633 +step:8051 train loss:3.528419 +step:8052 train loss:3.517853 +step:8053 train loss:3.465159 +step:8054 train loss:3.499911 +step:8055 train loss:3.493938 +step:8056 train loss:3.501940 +step:8057 train loss:3.550345 +step:8058 train loss:3.553591 +step:8059 train loss:3.518985 +step:8060 train loss:3.531376 +step:8061 train loss:3.499310 +step:8062 train loss:3.499311 +step:8063 train loss:3.455160 +step:8064 train loss:3.521717 +step:8065 train loss:3.484336 +step:8066 train loss:3.482494 +step:8067 train loss:3.466351 +step:8068 train loss:3.530636 +step:8069 train loss:3.547740 +step:8070 train loss:3.472962 +step:8071 train loss:3.486326 +step:8072 train loss:3.473452 +step:8073 train loss:3.506446 +step:8074 train loss:3.488122 +step:8075 train loss:3.494271 +step:8076 train loss:3.435482 +step:8077 train loss:3.509425 +step:8078 train loss:3.452440 +step:8079 train loss:3.500459 +step:8080 train loss:3.548946 +step:8081 train loss:3.582902 +step:8082 train loss:3.688075 +step:8083 train loss:3.500805 +step:8084 train loss:3.515883 +step:8085 train loss:3.595511 +step:8086 train loss:3.460842 +step:8087 train loss:3.481108 +step:8088 train loss:3.534752 +step:8089 train loss:3.513838 +step:8090 train loss:3.483018 +step:8091 train loss:3.506349 +step:8092 train loss:3.528989 +step:8093 train loss:3.522507 +step:8094 train loss:3.440980 +step:8095 train loss:3.516088 +step:8096 train loss:3.465308 +step:8097 train loss:3.476532 +step:8098 train loss:3.516050 +step:8099 train loss:3.481295 +step:8100 train loss:3.521294 +step:8101 train loss:3.491202 +step:8102 train loss:3.544060 +step:8103 train loss:3.505172 +step:8104 train loss:3.472212 +step:8105 train loss:3.494684 +step:8106 train loss:3.560113 +step:8107 train loss:3.475809 +step:8108 train loss:3.461618 +step:8109 train loss:3.544182 +step:8110 train loss:3.429426 +step:8111 train loss:3.500365 +step:8112 train loss:3.463723 +step:8113 train loss:3.483914 +step:8114 train loss:3.468083 +step:8115 train loss:3.518048 +step:8116 train loss:3.488681 +step:8117 train loss:3.465477 +step:8118 train loss:3.451901 +step:8119 train loss:3.475878 +step:8120 train loss:3.531280 +step:8121 train loss:3.480947 +step:8122 train loss:3.550915 +step:8123 train loss:3.524508 +step:8124 train loss:3.514289 +step:8125 train loss:3.540808 +step:8126 train loss:3.471051 +step:8127 train loss:3.491045 +step:8128 train loss:3.493395 +step:8129 train loss:3.537270 +step:8130 train loss:3.539220 +step:8131 train loss:3.551876 +step:8132 train loss:3.512830 +step:8133 train loss:3.476987 +step:8134 train loss:3.520408 +step:8135 train loss:3.479267 +step:8136 train loss:3.527017 +step:8137 train loss:3.523187 +step:8138 train loss:3.486366 +step:8139 train loss:3.483746 +step:8140 train loss:3.441985 +step:8141 train loss:3.527271 +step:8142 train loss:3.505392 +step:8143 train loss:3.469157 +step:8144 train loss:3.498304 +step:8145 train loss:3.487429 +step:8146 train loss:3.463049 +step:8147 train loss:3.483332 +step:8148 train loss:3.523318 +step:8149 train loss:3.479405 +step:8150 train loss:3.435535 +step:8151 train loss:3.471600 +step:8152 train loss:3.525849 +step:8153 train loss:3.525826 +step:8154 train loss:3.419395 +step:8155 train loss:3.435918 +step:8156 train loss:3.554504 +step:8157 train loss:3.482078 +step:8158 train loss:3.518116 +step:8159 train loss:3.509959 +step:8160 train loss:3.498012 +step:8161 train loss:3.509867 +step:8162 train loss:3.509123 +step:8163 train loss:3.517574 +step:8164 train loss:3.439747 +step:8165 train loss:3.526819 +step:8166 train loss:3.470060 +step:8167 train loss:3.556685 +step:8168 train loss:3.484697 +step:8169 train loss:3.492362 +step:8170 train loss:3.546959 +step:8171 train loss:3.454427 +step:8172 train loss:3.479606 +step:8173 train loss:3.446267 +step:8174 train loss:3.481153 +step:8175 train loss:3.462157 +step:8176 train loss:3.488373 +step:8177 train loss:3.522551 +step:8178 train loss:3.488714 +step:8179 train loss:3.519031 +step:8180 train loss:3.502700 +step:8181 train loss:3.476981 +step:8182 train loss:3.432958 +step:8183 train loss:3.522695 +step:8184 train loss:3.522608 +step:8185 train loss:3.456147 +step:8186 train loss:3.474180 +step:8187 train loss:3.522895 +step:8188 train loss:3.508817 +step:8189 train loss:3.523556 +step:8190 train loss:3.505290 +step:8191 train loss:3.499937 +step:8192 train loss:3.531427 +step:8193 train loss:3.518520 +step:8194 train loss:3.505274 +step:8195 train loss:3.483161 +step:8196 train loss:3.478070 +step:8197 train loss:3.441767 +step:8198 train loss:3.516885 +step:8199 train loss:3.487658 +step:8200 train loss:3.444561 +step:8201 train loss:3.508356 +step:8202 train loss:3.463378 +step:8203 train loss:3.515957 +step:8204 train loss:3.529481 +step:8205 train loss:3.554010 +step:8206 train loss:3.512361 +step:8207 train loss:3.539856 +step:8208 train loss:3.506561 +step:8209 train loss:3.507830 +step:8210 train loss:3.490075 +step:8211 train loss:3.481411 +step:8212 train loss:3.535253 +step:8213 train loss:3.494915 +step:8214 train loss:3.481419 +step:8215 train loss:3.496554 +step:8216 train loss:3.514022 +step:8217 train loss:3.529698 +step:8218 train loss:3.498783 +step:8219 train loss:3.477582 +step:8220 train loss:3.485434 +step:8221 train loss:3.502817 +step:8222 train loss:3.483026 +step:8223 train loss:3.531850 +step:8224 train loss:3.494301 +step:8225 train loss:3.518828 +step:8226 train loss:3.511638 +step:8227 train loss:3.545563 +step:8228 train loss:3.498413 +step:8229 train loss:3.483474 +step:8230 train loss:3.455701 +step:8231 train loss:3.473348 +step:8232 train loss:3.467914 +step:8233 train loss:3.550895 +step:8234 train loss:3.509857 +step:8235 train loss:3.522252 +step:8236 train loss:3.546703 +step:8237 train loss:3.521849 +step:8238 train loss:3.544001 +step:8239 train loss:3.475119 +step:8240 train loss:3.482374 +step:8241 train loss:3.517363 +step:8242 train loss:3.506267 +step:8243 train loss:3.435635 +step:8244 train loss:3.500677 +step:8245 train loss:3.551094 +step:8246 train loss:3.516572 +step:8247 train loss:3.451622 +step:8248 train loss:3.490357 +step:8249 train loss:3.524110 +step:8250 validation loss:3.434456 +step:8250 train loss:3.492428 +step:8251 train loss:3.448967 +step:8252 train loss:3.520331 +step:8253 train loss:3.476310 +step:8254 train loss:3.540438 +step:8255 train loss:3.512150 +step:8256 train loss:3.505618 +step:8257 train loss:3.533917 +step:8258 train loss:3.492272 +step:8259 train loss:3.579085 +step:8260 train loss:3.527018 +step:8261 train loss:3.482925 +step:8262 train loss:3.522965 +step:8263 train loss:3.533711 +step:8264 train loss:3.489258 +step:8265 train loss:3.510404 +step:8266 train loss:3.550140 +step:8267 train loss:3.557522 +step:8268 train loss:3.537553 +step:8269 train loss:3.498508 +step:8270 train loss:3.496786 +step:8271 train loss:3.540916 +step:8272 train loss:3.491296 +step:8273 train loss:3.520675 +step:8274 train loss:3.530157 +step:8275 train loss:3.442219 +step:8276 train loss:3.447079 +step:8277 train loss:3.499698 +step:8278 train loss:3.505958 +step:8279 train loss:3.477222 +step:8280 train loss:3.504737 +step:8281 train loss:3.579540 +step:8282 train loss:3.481744 +step:8283 train loss:3.485929 +step:8284 train loss:3.458072 +step:8285 train loss:3.436045 +step:8286 train loss:3.496957 +step:8287 train loss:3.464905 +step:8288 train loss:3.494946 +step:8289 train loss:3.563654 +step:8290 train loss:3.458559 +step:8291 train loss:3.484345 +step:8292 train loss:3.495843 +step:8293 train loss:3.501363 +step:8294 train loss:3.508822 +step:8295 train loss:3.545385 +step:8296 train loss:3.471236 +step:8297 train loss:3.488004 +step:8298 train loss:3.480840 +step:8299 train loss:3.526712 +step:8300 train loss:3.481516 +step:8301 train loss:3.568944 +step:8302 train loss:3.458004 +step:8303 train loss:3.491663 +step:8304 train loss:3.415092 +step:8305 train loss:3.523740 +step:8306 train loss:3.456714 +step:8307 train loss:3.489862 +step:8308 train loss:3.479880 +step:8309 train loss:3.505857 +step:8310 train loss:3.535691 +step:8311 train loss:3.478086 +step:8312 train loss:3.511075 +step:8313 train loss:3.515314 +step:8314 train loss:3.495502 +step:8315 train loss:3.440994 +step:8316 train loss:3.527011 +step:8317 train loss:3.544512 +step:8318 train loss:3.512988 +step:8319 train loss:3.521802 +step:8320 train loss:3.514464 +step:8321 train loss:3.475961 +step:8322 train loss:3.515172 +step:8323 train loss:3.487684 +step:8324 train loss:3.508978 +step:8325 train loss:3.495733 +step:8326 train loss:3.462884 +step:8327 train loss:3.522077 +step:8328 train loss:3.516675 +step:8329 train loss:3.492739 +step:8330 train loss:3.575582 +step:8331 train loss:3.511495 +step:8332 train loss:3.489437 +step:8333 train loss:3.422189 +step:8334 train loss:3.500578 +step:8335 train loss:3.462682 +step:8336 train loss:3.446310 +step:8337 train loss:3.486489 +step:8338 train loss:3.510631 +step:8339 train loss:3.513235 +step:8340 train loss:3.429840 +step:8341 train loss:3.480815 +step:8342 train loss:3.453623 +step:8343 train loss:3.522220 +step:8344 train loss:3.463959 +step:8345 train loss:3.570831 +step:8346 train loss:3.483310 +step:8347 train loss:3.506281 +step:8348 train loss:3.489683 +step:8349 train loss:3.527521 +step:8350 train loss:3.475803 +step:8351 train loss:3.529295 +step:8352 train loss:3.530994 +step:8353 train loss:3.490799 +step:8354 train loss:3.493205 +step:8355 train loss:3.497981 +step:8356 train loss:3.461587 +step:8357 train loss:3.522175 +step:8358 train loss:3.549767 +step:8359 train loss:3.539891 +step:8360 train loss:3.521506 +step:8361 train loss:3.531887 +step:8362 train loss:3.540998 +step:8363 train loss:3.494794 +step:8364 train loss:3.494771 +step:8365 train loss:3.489377 +step:8366 train loss:3.446764 +step:8367 train loss:3.498394 +step:8368 train loss:3.526091 +step:8369 train loss:3.475032 +step:8370 train loss:3.569035 +step:8371 train loss:3.463534 +step:8372 train loss:3.492667 +step:8373 train loss:3.490551 +step:8374 train loss:3.503079 +step:8375 train loss:3.472651 +step:8376 train loss:3.448981 +step:8377 train loss:3.495018 +step:8378 train loss:3.433245 +step:8379 train loss:3.494042 +step:8380 train loss:3.450859 +step:8381 train loss:3.485322 +step:8382 train loss:3.471043 +step:8383 train loss:3.455447 +step:8384 train loss:3.457109 +step:8385 train loss:3.497622 +step:8386 train loss:3.511537 +step:8387 train loss:3.470284 +step:8388 train loss:3.497881 +step:8389 train loss:3.598982 +step:8390 train loss:3.468794 +step:8391 train loss:3.478047 +step:8392 train loss:3.470598 +step:8393 train loss:3.494663 +step:8394 train loss:3.548136 +step:8395 train loss:3.514498 +step:8396 train loss:3.528341 +step:8397 train loss:3.469838 +step:8398 train loss:3.487901 +step:8399 train loss:3.470640 +step:8400 train loss:3.472665 +step:8401 train loss:3.501277 +step:8402 train loss:3.510750 +step:8403 train loss:3.498169 +step:8404 train loss:3.495663 +step:8405 train loss:3.549234 +step:8406 train loss:3.492115 +step:8407 train loss:3.457168 +step:8408 train loss:3.528106 +step:8409 train loss:3.485089 +step:8410 train loss:3.433936 +step:8411 train loss:3.459639 +step:8412 train loss:3.528741 +step:8413 train loss:3.437999 +step:8414 train loss:3.508127 +step:8415 train loss:3.478566 +step:8416 train loss:3.457901 +step:8417 train loss:3.460190 +step:8418 train loss:3.464987 +step:8419 train loss:3.490826 +step:8420 train loss:3.455979 +step:8421 train loss:3.471023 +step:8422 train loss:3.489394 +step:8423 train loss:3.489296 +step:8424 train loss:3.510465 +step:8425 train loss:3.449028 +step:8426 train loss:3.453415 +step:8427 train loss:3.488525 +step:8428 train loss:3.528330 +step:8429 train loss:3.496727 +step:8430 train loss:3.519496 +step:8431 train loss:3.500288 +step:8432 train loss:3.426632 +step:8433 train loss:3.459917 +step:8434 train loss:3.461642 +step:8435 train loss:3.480768 +step:8436 train loss:3.453454 +step:8437 train loss:3.446759 +step:8438 train loss:3.454863 +step:8439 train loss:3.505876 +step:8440 train loss:3.425405 +step:8441 train loss:3.472447 +step:8442 train loss:3.487811 +step:8443 train loss:3.512472 +step:8444 train loss:3.507344 +step:8445 train loss:3.489772 +step:8446 train loss:3.468827 +step:8447 train loss:3.509628 +step:8448 train loss:3.515634 +step:8449 train loss:3.499899 +step:8450 train loss:3.456853 +step:8451 train loss:3.483946 +step:8452 train loss:3.487886 +step:8453 train loss:3.391674 +step:8454 train loss:3.522496 +step:8455 train loss:3.484924 +step:8456 train loss:3.501412 +step:8457 train loss:3.416507 +step:8458 train loss:3.474829 +step:8459 train loss:3.471771 +step:8460 train loss:3.491512 +step:8461 train loss:3.471427 +step:8462 train loss:3.475690 +step:8463 train loss:3.492949 +step:8464 train loss:3.455503 +step:8465 train loss:3.536982 +step:8466 train loss:3.466606 +step:8467 train loss:3.441448 +step:8468 train loss:3.485186 +step:8469 train loss:3.479006 +step:8470 train loss:3.462482 +step:8471 train loss:3.471183 +step:8472 train loss:3.534309 +step:8473 train loss:3.454847 +step:8474 train loss:3.447871 +step:8475 train loss:3.480742 +step:8476 train loss:3.539333 +step:8477 train loss:3.457636 +step:8478 train loss:3.447798 +step:8479 train loss:3.423104 +step:8480 train loss:3.452765 +step:8481 train loss:3.467535 +step:8482 train loss:3.465393 +step:8483 train loss:3.500148 +step:8484 train loss:3.455188 +step:8485 train loss:3.512124 +step:8486 train loss:3.457926 +step:8487 train loss:3.432947 +step:8488 train loss:3.458277 +step:8489 train loss:3.529683 +step:8490 train loss:3.449433 +step:8491 train loss:3.429907 +step:8492 train loss:3.498050 +step:8493 train loss:3.480036 +step:8494 train loss:3.519217 +step:8495 train loss:3.554135 +step:8496 train loss:3.496580 +step:8497 train loss:3.447333 +step:8498 train loss:3.453484 +step:8499 train loss:3.481112 +step:8500 validation loss:3.419829 total_sharp:4.4516e-03 L1_sharp:2.9876e-03 L2_sharp:3.6518e-04 L3_sharp:9.4999e-04 L4_sharp:1.0331e-03 L5_sharp:6.8098e-04 L6_sharp:1.1719e-03 L7_sharp:1.1069e-03 L8_sharp:1.8263e-03 L9_sharp:1.5332e-03 L10_sharp:8.1760e-04 L11_sharp:7.0730e-04 L12_sharp:2.5325e-03 total_fnorm:1.7095e+00 total_l1_linf:1.5237e+04 total_spectral:1.7095e+00 L1_fnorm:3.9061e-01 L2_fnorm:3.9296e-01 L3_fnorm:3.8873e-01 L4_fnorm:3.9335e-01 L5_fnorm:3.8609e-01 L6_fnorm:3.9275e-01 L7_fnorm:3.9813e-01 L8_fnorm:3.9703e-01 L9_fnorm:4.0459e-01 L10_fnorm:4.1318e-01 L11_fnorm:4.1814e-01 L12_fnorm:4.1445e-01 L1_l1linf:4.9942e-01 L2_l1linf:4.5107e-01 L3_l1linf:5.5347e-01 L4_l1linf:6.2651e-01 L5_l1linf:4.8975e-01 L6_l1linf:5.0596e-01 L7_l1linf:4.7658e-01 L8_l1linf:4.2152e-01 L9_l1linf:4.5270e-01 L10_l1linf:4.2902e-01 L11_l1linf:4.4315e-01 L12_l1linf:4.6358e-01 L1_spectral:6.9321e-02 L2_spectral:6.2021e-02 L3_spectral:6.6161e-02 L4_spectral:6.8933e-02 L5_spectral:5.9774e-02 L6_spectral:6.3520e-02 L7_spectral:5.8916e-02 L8_spectral:5.4929e-02 L9_spectral:4.9111e-02 L10_spectral:3.7059e-02 L11_spectral:4.2125e-02 L12_spectral:5.8919e-02 v_norm:1.7095e+00 cos_v_-g_hvp:5.9616e-02 g_hvp_norm:3.6281e-01 cos_v_-g_t:7.5433e-02 g_t_norm:2.8885e-01 hv_norm:3.2332e-01 cos_v_hv:2.3537e-02 hg_norm:8.0776e+00 cos_g_hg:5.5226e-01 v_par:5.9104e-03 v_perp:1.7095e+00 L1_cos_v_neg_g:1.1141e-01 L1_v_norm:3.9061e-01 L2_cos_v_neg_g:5.7214e-02 L2_v_norm:3.9296e-01 L3_cos_v_neg_g:4.4947e-02 L3_v_norm:3.8873e-01 L4_cos_v_neg_g:4.9828e-02 L4_v_norm:3.9335e-01 L5_cos_v_neg_g:3.8537e-02 L5_v_norm:3.8609e-01 L6_cos_v_neg_g:4.9023e-02 L6_v_norm:3.9275e-01 L7_cos_v_neg_g:5.5047e-02 L7_v_norm:3.9813e-01 L8_cos_v_neg_g:5.4636e-02 L8_v_norm:3.9703e-01 L9_cos_v_neg_g:5.4258e-02 L9_v_norm:4.0459e-01 L10_cos_v_neg_g:6.1736e-02 L10_v_norm:4.1318e-01 L11_cos_v_neg_g:8.2314e-02 L11_v_norm:4.1814e-01 L12_cos_v_neg_g:1.1314e-01 L12_v_norm:4.1445e-01 +step:8500 train loss:3.414102 +step:8501 train loss:3.468829 +step:8502 train loss:3.471078 +step:8503 train loss:3.439835 +step:8504 train loss:3.467116 +step:8505 train loss:3.494114 +step:8506 train loss:3.473912 +step:8507 train loss:3.445264 +step:8508 train loss:3.460521 +step:8509 train loss:3.524061 +step:8510 train loss:3.472116 +step:8511 train loss:3.453630 +step:8512 train loss:3.447137 +step:8513 train loss:3.492579 +step:8514 train loss:3.462564 +step:8515 train loss:3.427444 +step:8516 train loss:3.472427 +step:8517 train loss:3.456409 +step:8518 train loss:3.493936 +step:8519 train loss:3.504099 +step:8520 train loss:3.454539 +step:8521 train loss:3.434810 +step:8522 train loss:3.458523 +step:8523 train loss:3.489659 +step:8524 train loss:3.523728 +step:8525 train loss:3.479191 +step:8526 train loss:3.446346 +step:8527 train loss:3.492664 +step:8528 train loss:3.497998 +step:8529 train loss:3.465743 +step:8530 train loss:3.467307 +step:8531 train loss:3.477869 +step:8532 train loss:3.504944 +step:8533 train loss:3.562382 +step:8534 train loss:3.502867 +step:8535 train loss:3.537827 +step:8536 train loss:3.434004 +step:8537 train loss:3.480252 +step:8538 train loss:3.431327 +step:8539 train loss:3.533319 +step:8540 train loss:3.497581 +step:8541 train loss:3.447942 +step:8542 train loss:3.537881 +step:8543 train loss:3.446938 +step:8544 train loss:3.535428 +step:8545 train loss:3.433874 +step:8546 train loss:3.506871 +step:8547 train loss:3.445598 +step:8548 train loss:3.470120 +step:8549 train loss:3.448082 +step:8550 train loss:3.503724 +step:8551 train loss:3.505792 +step:8552 train loss:3.448243 +step:8553 train loss:3.478160 +step:8554 train loss:3.470031 +step:8555 train loss:3.557890 +step:8556 train loss:3.495483 +step:8557 train loss:3.470718 +step:8558 train loss:3.435089 +step:8559 train loss:3.450865 +step:8560 train loss:3.514817 +step:8561 train loss:3.438643 +step:8562 train loss:3.462566 +step:8563 train loss:3.455166 +step:8564 train loss:3.555074 +step:8565 train loss:3.446709 +step:8566 train loss:3.466649 +step:8567 train loss:3.465219 +step:8568 train loss:3.452374 +step:8569 train loss:3.477697 +step:8570 train loss:3.535176 +step:8571 train loss:3.478185 +step:8572 train loss:3.428853 +step:8573 train loss:3.449482 +step:8574 train loss:3.453998 +step:8575 train loss:3.487472 +step:8576 train loss:3.519382 +step:8577 train loss:3.494349 +step:8578 train loss:3.492794 +step:8579 train loss:3.544384 +step:8580 train loss:3.528776 +step:8581 train loss:3.471565 +step:8582 train loss:3.432903 +step:8583 train loss:3.470712 +step:8584 train loss:3.502936 +step:8585 train loss:3.491170 +step:8586 train loss:3.410328 +step:8587 train loss:3.486965 +step:8588 train loss:3.511910 +step:8589 train loss:3.431504 +step:8590 train loss:3.466337 +step:8591 train loss:3.496320 +step:8592 train loss:3.501794 +step:8593 train loss:3.506359 +step:8594 train loss:3.445510 +step:8595 train loss:3.458264 +step:8596 train loss:3.466455 +step:8597 train loss:3.523417 +step:8598 train loss:3.492485 +step:8599 train loss:3.508300 +step:8600 train loss:3.481253 +step:8601 train loss:3.511743 +step:8602 train loss:3.454516 +step:8603 train loss:3.426851 +step:8604 train loss:3.453570 +step:8605 train loss:3.426641 +step:8606 train loss:3.548131 +step:8607 train loss:3.462798 +step:8608 train loss:3.434630 +step:8609 train loss:3.503386 +step:8610 train loss:3.444923 +step:8611 train loss:3.460677 +step:8612 train loss:3.468411 +step:8613 train loss:3.475065 +step:8614 train loss:3.526274 +step:8615 train loss:3.502003 +step:8616 train loss:3.492869 +step:8617 train loss:3.426186 +step:8618 train loss:3.473714 +step:8619 train loss:3.448007 +step:8620 train loss:3.453507 +step:8621 train loss:3.473233 +step:8622 train loss:3.468001 +step:8623 train loss:3.396314 +step:8624 train loss:3.408354 +step:8625 train loss:3.470570 +step:8626 train loss:3.494402 +step:8627 train loss:3.467299 +step:8628 train loss:3.418016 +step:8629 train loss:3.495418 +step:8630 train loss:3.495013 +step:8631 train loss:3.473110 +step:8632 train loss:3.539787 +step:8633 train loss:3.486337 +step:8634 train loss:3.448824 +step:8635 train loss:3.465985 +step:8636 train loss:3.464505 +step:8637 train loss:3.517968 +step:8638 train loss:3.505437 +step:8639 train loss:3.389690 +step:8640 train loss:3.461699 +step:8641 train loss:3.422590 +step:8642 train loss:3.467220 +step:8643 train loss:3.470685 +step:8644 train loss:3.459251 +step:8645 train loss:3.480408 +step:8646 train loss:3.431133 +step:8647 train loss:3.412851 +step:8648 train loss:3.453198 +step:8649 train loss:3.419865 +step:8650 train loss:3.462406 +step:8651 train loss:3.484190 +step:8652 train loss:3.491783 +step:8653 train loss:3.444611 +step:8654 train loss:3.451165 +step:8655 train loss:3.479701 +step:8656 train loss:3.497218 +step:8657 train loss:3.479304 +step:8658 train loss:3.539603 +step:8659 train loss:3.496933 +step:8660 train loss:3.457258 +step:8661 train loss:3.428006 +step:8662 train loss:3.413539 +step:8663 train loss:3.417397 +step:8664 train loss:3.480088 +step:8665 train loss:3.430653 +step:8666 train loss:3.470484 +step:8667 train loss:3.480130 +step:8668 train loss:3.454012 +step:8669 train loss:3.496981 +step:8670 train loss:3.496108 +step:8671 train loss:3.534405 +step:8672 train loss:3.528859 +step:8673 train loss:3.462745 +step:8674 train loss:3.436410 +step:8675 train loss:3.448259 +step:8676 train loss:3.451938 +step:8677 train loss:3.477305 +step:8678 train loss:3.497436 +step:8679 train loss:3.506773 +step:8680 train loss:3.461754 +step:8681 train loss:3.440094 +step:8682 train loss:3.464244 +step:8683 train loss:3.540812 +step:8684 train loss:3.495642 +step:8685 train loss:3.444175 +step:8686 train loss:3.434129 +step:8687 train loss:3.488819 +step:8688 train loss:3.447837 +step:8689 train loss:3.429559 +step:8690 train loss:3.458396 +step:8691 train loss:3.474695 +step:8692 train loss:3.400072 +step:8693 train loss:3.536793 +step:8694 train loss:3.524783 +step:8695 train loss:3.439886 +step:8696 train loss:3.503121 +step:8697 train loss:3.428210 +step:8698 train loss:3.480620 +step:8699 train loss:3.398465 +step:8700 train loss:3.438985 +step:8701 train loss:3.493599 +step:8702 train loss:3.411916 +step:8703 train loss:3.450777 +step:8704 train loss:3.454060 +step:8705 train loss:3.466853 +step:8706 train loss:3.431484 +step:8707 train loss:3.479686 +step:8708 train loss:3.464189 +step:8709 train loss:3.437550 +step:8710 train loss:3.437300 +step:8711 train loss:3.462239 +step:8712 train loss:3.411994 +step:8713 train loss:3.412615 +step:8714 train loss:3.419978 +step:8715 train loss:3.431080 +step:8716 train loss:3.387475 +step:8717 train loss:3.429050 +step:8718 train loss:3.455523 +step:8719 train loss:3.453025 +step:8720 train loss:3.443219 +step:8721 train loss:3.419217 +step:8722 train loss:3.432649 +step:8723 train loss:3.427533 +step:8724 train loss:3.443752 +step:8725 train loss:3.435135 +step:8726 train loss:3.491994 +step:8727 train loss:3.460661 +step:8728 train loss:3.476747 +step:8729 train loss:3.494268 +step:8730 train loss:3.468783 +step:8731 train loss:3.535015 +step:8732 train loss:3.415672 +step:8733 train loss:3.483332 +step:8734 train loss:3.525760 +step:8735 train loss:3.350451 +step:8736 train loss:3.480462 +step:8737 train loss:3.453616 +step:8738 train loss:3.432226 +step:8739 train loss:3.466399 +step:8740 train loss:3.459147 +step:8741 train loss:3.531047 +step:8742 train loss:3.488689 +step:8743 train loss:3.433921 +step:8744 train loss:3.548384 +step:8745 train loss:3.535922 +step:8746 train loss:3.424577 +step:8747 train loss:3.496071 +step:8748 train loss:3.497174 +step:8749 train loss:3.469868 +step:8750 validation loss:3.399027 +step:8750 train loss:3.479860 +step:8751 train loss:3.518524 +step:8752 train loss:3.424421 +step:8753 train loss:3.484552 +step:8754 train loss:3.404313 +step:8755 train loss:3.437929 +step:8756 train loss:3.447393 +step:8757 train loss:3.442527 +step:8758 train loss:3.466366 +step:8759 train loss:3.472873 +step:8760 train loss:3.427405 +step:8761 train loss:3.456595 +step:8762 train loss:3.449513 +step:8763 train loss:3.440755 +step:8764 train loss:3.481689 +step:8765 train loss:3.492547 +step:8766 train loss:3.545785 +step:8767 train loss:3.475736 +step:8768 train loss:3.539895 +step:8769 train loss:3.434481 +step:8770 train loss:3.425297 +step:8771 train loss:3.450058 +step:8772 train loss:3.496215 +step:8773 train loss:3.466576 +step:8774 train loss:3.515017 +step:8775 train loss:3.526548 +step:8776 train loss:3.484705 +step:8777 train loss:3.481044 +step:8778 train loss:3.465848 +step:8779 train loss:3.495894 +step:8780 train loss:3.431597 +step:8781 train loss:3.459824 +step:8782 train loss:3.487833 +step:8783 train loss:3.489576 +step:8784 train loss:3.460990 +step:8785 train loss:3.486926 +step:8786 train loss:3.452035 +step:8787 train loss:3.524316 +step:8788 train loss:3.499167 +step:8789 train loss:3.426884 +step:8790 train loss:3.448725 +step:8791 train loss:3.412628 +step:8792 train loss:3.457449 +step:8793 train loss:3.511128 +step:8794 train loss:3.439374 +step:8795 train loss:3.448784 +step:8796 train loss:3.457439 +step:8797 train loss:3.444681 +step:8798 train loss:3.424808 +step:8799 train loss:3.406568 +step:8800 train loss:3.528311 +step:8801 train loss:3.441027 +step:8802 train loss:3.420401 +step:8803 train loss:3.521920 +step:8804 train loss:3.482465 +step:8805 train loss:3.456629 +step:8806 train loss:3.463885 +step:8807 train loss:3.466156 +step:8808 train loss:3.526907 +step:8809 train loss:3.444480 +step:8810 train loss:3.443945 +step:8811 train loss:3.575288 +step:8812 train loss:3.547549 +step:8813 train loss:3.561347 +step:8814 train loss:3.440002 +step:8815 train loss:3.497187 +step:8816 train loss:3.433157 +step:8817 train loss:3.440249 +step:8818 train loss:3.436363 +step:8819 train loss:3.453680 +step:8820 train loss:3.453792 +step:8821 train loss:3.527668 +step:8822 train loss:3.408699 +step:8823 train loss:3.404523 +step:8824 train loss:3.415817 +step:8825 train loss:3.391912 +step:8826 train loss:3.527533 +step:8827 train loss:3.567275 +step:8828 train loss:3.526632 +step:8829 train loss:3.440392 +step:8830 train loss:3.436895 +step:8831 train loss:3.480044 +step:8832 train loss:3.492315 +step:8833 train loss:3.407955 +step:8834 train loss:3.468769 +step:8835 train loss:3.429498 +step:8836 train loss:3.533926 +step:8837 train loss:3.459672 +step:8838 train loss:3.395235 +step:8839 train loss:3.508592 +step:8840 train loss:3.448086 +step:8841 train loss:3.461316 +step:8842 train loss:3.463450 +step:8843 train loss:3.441061 +step:8844 train loss:3.470757 +step:8845 train loss:3.432149 +step:8846 train loss:3.432492 +step:8847 train loss:3.450305 +step:8848 train loss:3.431258 +step:8849 train loss:3.469330 +step:8850 train loss:3.456980 +step:8851 train loss:3.420739 +step:8852 train loss:3.452150 +step:8853 train loss:3.426092 +step:8854 train loss:3.403929 +step:8855 train loss:3.446620 +step:8856 train loss:3.413899 +step:8857 train loss:3.444315 +step:8858 train loss:3.462977 +step:8859 train loss:3.481185 +step:8860 train loss:3.415769 +step:8861 train loss:3.385767 +step:8862 train loss:3.445954 +step:8863 train loss:3.363788 +step:8864 train loss:3.349010 +step:8865 train loss:3.438713 +step:8866 train loss:3.461689 +step:8867 train loss:3.405985 +step:8868 train loss:3.483027 +step:8869 train loss:3.450632 +step:8870 train loss:3.478579 +step:8871 train loss:3.445939 +step:8872 train loss:3.470951 +step:8873 train loss:3.429126 +step:8874 train loss:3.481619 +step:8875 train loss:3.445814 +step:8876 train loss:3.446324 +step:8877 train loss:3.441287 +step:8878 train loss:3.492656 +step:8879 train loss:3.416800 +step:8880 train loss:3.422337 +step:8881 train loss:3.397710 +step:8882 train loss:3.415459 +step:8883 train loss:3.478446 +step:8884 train loss:3.410639 +step:8885 train loss:3.454767 +step:8886 train loss:3.435834 +step:8887 train loss:3.448497 +step:8888 train loss:3.437796 +step:8889 train loss:3.496496 +step:8890 train loss:3.459402 +step:8891 train loss:3.384550 +step:8892 train loss:3.393989 +step:8893 train loss:3.411100 +step:8894 train loss:3.474407 +step:8895 train loss:3.490240 +step:8896 train loss:3.454647 +step:8897 train loss:3.535699 +step:8898 train loss:3.517584 +step:8899 train loss:3.448943 +step:8900 train loss:3.487970 +step:8901 train loss:3.423092 +step:8902 train loss:3.443116 +step:8903 train loss:3.450257 +step:8904 train loss:3.441826 +step:8905 train loss:3.412777 +step:8906 train loss:3.504726 +step:8907 train loss:3.421906 +step:8908 train loss:3.435282 +step:8909 train loss:3.443844 +step:8910 train loss:3.462734 +step:8911 train loss:3.412070 +step:8912 train loss:3.416831 +step:8913 train loss:3.433270 +step:8914 train loss:3.449081 +step:8915 train loss:3.448770 +step:8916 train loss:3.358966 +step:8917 train loss:3.434809 +step:8918 train loss:3.405523 +step:8919 train loss:3.355044 +step:8920 train loss:3.413345 +step:8921 train loss:3.465278 +step:8922 train loss:3.374012 +step:8923 train loss:3.429061 +step:8924 train loss:3.383166 +step:8925 train loss:3.452606 +step:8926 train loss:3.482099 +step:8927 train loss:3.443395 +step:8928 train loss:3.457187 +step:8929 train loss:3.410687 +step:8930 train loss:3.447872 +step:8931 train loss:3.427999 +step:8932 train loss:3.433930 +step:8933 train loss:3.415378 +step:8934 train loss:3.441002 +step:8935 train loss:3.486964 +step:8936 train loss:3.422977 +step:8937 train loss:3.455893 +step:8938 train loss:3.441252 +step:8939 train loss:3.433878 +step:8940 train loss:3.426863 +step:8941 train loss:3.458196 +step:8942 train loss:3.413070 +step:8943 train loss:3.442970 +step:8944 train loss:3.546198 +step:8945 train loss:3.412339 +step:8946 train loss:3.425297 +step:8947 train loss:3.393383 +step:8948 train loss:3.385522 +step:8949 train loss:3.381648 +step:8950 train loss:3.386934 +step:8951 train loss:3.371216 +step:8952 train loss:3.420751 +step:8953 train loss:3.412956 +step:8954 train loss:3.456081 +step:8955 train loss:3.408595 +step:8956 train loss:3.485970 +step:8957 train loss:3.348520 +step:8958 train loss:3.423559 +step:8959 train loss:3.399865 +step:8960 train loss:3.448202 +step:8961 train loss:3.404132 +step:8962 train loss:3.378099 +step:8963 train loss:3.386517 +step:8964 train loss:3.467139 +step:8965 train loss:3.445228 +step:8966 train loss:3.474200 +step:8967 train loss:3.436281 +step:8968 train loss:3.421902 +step:8969 train loss:3.373960 +step:8970 train loss:3.458955 +step:8971 train loss:3.421196 +step:8972 train loss:3.455293 +step:8973 train loss:3.471338 +step:8974 train loss:3.443325 +step:8975 train loss:3.443367 +step:8976 train loss:3.373930 +step:8977 train loss:3.433154 +step:8978 train loss:3.430552 +step:8979 train loss:3.394147 +step:8980 train loss:3.370730 +step:8981 train loss:3.391134 +step:8982 train loss:3.403864 +step:8983 train loss:3.374961 +step:8984 train loss:3.415088 +step:8985 train loss:3.437263 +step:8986 train loss:3.447297 +step:8987 train loss:3.477077 +step:8988 train loss:3.415602 +step:8989 train loss:3.428724 +step:8990 train loss:3.457310 +step:8991 train loss:3.447917 +step:8992 train loss:3.387000 +step:8993 train loss:3.436630 +step:8994 train loss:3.384820 +step:8995 train loss:3.442259 +step:8996 train loss:3.511910 +step:8997 train loss:3.427504 +step:8998 train loss:3.424740 +step:8999 train loss:3.331174 +step:9000 validation loss:3.382611 total_sharp:1.0530e-02 L1_sharp:1.0448e-02 L2_sharp:3.8236e-03 L3_sharp:4.2724e-03 L4_sharp:2.4262e-03 L5_sharp:1.0992e-03 L6_sharp:2.1723e-03 L7_sharp:1.5249e-03 L8_sharp:2.8094e-03 L9_sharp:2.3668e-03 L10_sharp:1.8245e-03 L11_sharp:1.6271e-03 L12_sharp:2.8494e-03 total_fnorm:1.1862e+00 total_l1_linf:1.0616e+04 total_spectral:1.1862e+00 L1_fnorm:2.7791e-01 L2_fnorm:2.8059e-01 L3_fnorm:2.7841e-01 L4_fnorm:2.7790e-01 L5_fnorm:2.7518e-01 L6_fnorm:2.7691e-01 L7_fnorm:2.7923e-01 L8_fnorm:2.7858e-01 L9_fnorm:2.8271e-01 L10_fnorm:2.9176e-01 L11_fnorm:2.9390e-01 L12_fnorm:2.9046e-01 L1_l1linf:3.8743e-01 L2_l1linf:3.3474e-01 L3_l1linf:3.7479e-01 L4_l1linf:3.5961e-01 L5_l1linf:3.1637e-01 L6_l1linf:3.5405e-01 L7_l1linf:3.3041e-01 L8_l1linf:3.0862e-01 L9_l1linf:2.9594e-01 L10_l1linf:3.0771e-01 L11_l1linf:3.1130e-01 L12_l1linf:3.3662e-01 L1_spectral:5.2791e-02 L2_spectral:4.4534e-02 L3_spectral:5.4612e-02 L4_spectral:5.2980e-02 L5_spectral:4.3508e-02 L6_spectral:4.6850e-02 L7_spectral:4.2042e-02 L8_spectral:4.3040e-02 L9_spectral:3.5860e-02 L10_spectral:2.9280e-02 L11_spectral:3.6304e-02 L12_spectral:4.0412e-02 v_norm:1.1862e+00 cos_v_-g_hvp:7.0842e-02 g_hvp_norm:3.9396e-01 cos_v_-g_t:8.9910e-02 g_t_norm:3.1091e-01 hv_norm:4.4801e-01 cos_v_hv:2.7880e-02 hg_norm:1.5245e+01 cos_g_hg:5.7965e-01 v_par:3.9358e-03 v_perp:1.1862e+00 L1_cos_v_neg_g:1.3311e-01 L1_v_norm:2.7791e-01 L2_cos_v_neg_g:5.2250e-02 L2_v_norm:2.8059e-01 L3_cos_v_neg_g:5.3499e-02 L3_v_norm:2.7841e-01 L4_cos_v_neg_g:5.4430e-02 L4_v_norm:2.7790e-01 L5_cos_v_neg_g:4.7638e-02 L5_v_norm:2.7518e-01 L6_cos_v_neg_g:6.0075e-02 L6_v_norm:2.7691e-01 L7_cos_v_neg_g:6.6286e-02 L7_v_norm:2.7923e-01 L8_cos_v_neg_g:6.5807e-02 L8_v_norm:2.7858e-01 L9_cos_v_neg_g:6.7113e-02 L9_v_norm:2.8271e-01 L10_cos_v_neg_g:8.2516e-02 L10_v_norm:2.9176e-01 L11_cos_v_neg_g:1.0874e-01 L11_v_norm:2.9390e-01 L12_cos_v_neg_g:1.4091e-01 L12_v_norm:2.9046e-01 +step:9000 train loss:3.435966 +step:9001 train loss:3.433644 +step:9002 train loss:3.441354 +step:9003 train loss:3.396276 +step:9004 train loss:3.420732 +step:9005 train loss:3.426633 +step:9006 train loss:3.430290 +step:9007 train loss:3.423912 +step:9008 train loss:3.415732 +step:9009 train loss:3.483555 +step:9010 train loss:3.398117 +step:9011 train loss:3.466065 +step:9012 train loss:3.402824 +step:9013 train loss:3.440632 +step:9014 train loss:3.385049 +step:9015 train loss:3.473763 +step:9016 train loss:3.472506 +step:9017 train loss:3.471868 +step:9018 train loss:3.472991 +step:9019 train loss:3.424362 +step:9020 train loss:3.460786 +step:9021 train loss:3.406653 +step:9022 train loss:3.493761 +step:9023 train loss:3.444720 +step:9024 train loss:3.426167 +step:9025 train loss:3.430019 +step:9026 train loss:3.513021 +step:9027 train loss:3.442816 +step:9028 train loss:3.443028 +step:9029 train loss:3.438653 +step:9030 train loss:3.384530 +step:9031 train loss:3.379466 +step:9032 train loss:3.458758 +step:9033 train loss:3.412568 +step:9034 train loss:3.479117 +step:9035 train loss:3.460816 +step:9036 train loss:3.440299 +step:9037 train loss:3.448020 +step:9038 train loss:3.351965 +step:9039 train loss:3.411467 +step:9040 train loss:3.397910 +step:9041 train loss:3.421329 +step:9042 train loss:3.372433 +step:9043 train loss:3.508834 +step:9044 train loss:3.363371 +step:9045 train loss:3.392025 +step:9046 train loss:3.431466 +step:9047 train loss:3.367033 +step:9048 train loss:3.439780 +step:9049 train loss:3.405771 +step:9050 train loss:3.430426 +step:9051 train loss:3.403937 +step:9052 train loss:3.453292 +step:9053 train loss:3.400036 +step:9054 train loss:3.463130 +step:9055 train loss:3.418705 +step:9056 train loss:3.461132 +step:9057 train loss:3.469182 +step:9058 train loss:3.402172 +step:9059 train loss:3.431897 +step:9060 train loss:3.366472 +step:9061 train loss:3.419603 +step:9062 train loss:3.399915 +step:9063 train loss:3.401846 +step:9064 train loss:3.442070 +step:9065 train loss:3.454171 +step:9066 train loss:3.384999 +step:9067 train loss:3.395514 +step:9068 train loss:3.442748 +step:9069 train loss:3.438795 +step:9070 train loss:3.404872 +step:9071 train loss:3.412904 +step:9072 train loss:3.444666 +step:9073 train loss:3.404132 +step:9074 train loss:3.515270 +step:9075 train loss:3.434548 +step:9076 train loss:3.432353 +step:9077 train loss:3.463666 +step:9078 train loss:3.383735 +step:9079 train loss:3.469400 +step:9080 train loss:3.488431 +step:9081 train loss:3.437348 +step:9082 train loss:3.446776 +step:9083 train loss:3.384577 +step:9084 train loss:3.447934 +step:9085 train loss:3.426540 +step:9086 train loss:3.419382 +step:9087 train loss:3.468253 +step:9088 train loss:3.410521 +step:9089 train loss:3.448089 +step:9090 train loss:3.433562 +step:9091 train loss:3.503231 +step:9092 train loss:3.469228 +step:9093 train loss:3.554303 +step:9094 train loss:3.393978 +step:9095 train loss:3.470258 +step:9096 train loss:3.393293 +step:9097 train loss:3.411141 +step:9098 train loss:3.451208 +step:9099 train loss:3.422235 +step:9100 train loss:3.369883 +step:9101 train loss:3.445810 +step:9102 train loss:3.393673 +step:9103 train loss:3.407166 +step:9104 train loss:3.457363 +step:9105 train loss:3.464015 +step:9106 train loss:3.422910 +step:9107 train loss:3.475654 +step:9108 train loss:3.434396 +step:9109 train loss:3.443461 +step:9110 train loss:3.436315 +step:9111 train loss:3.453847 +step:9112 train loss:3.370088 +step:9113 train loss:3.403524 +step:9114 train loss:3.439346 +step:9115 train loss:3.481486 +step:9116 train loss:3.490624 +step:9117 train loss:3.421450 +step:9118 train loss:3.434355 +step:9119 train loss:3.464916 +step:9120 train loss:3.477701 +step:9121 train loss:3.458286 +step:9122 train loss:3.422235 +step:9123 train loss:3.412687 +step:9124 train loss:3.466673 +step:9125 train loss:3.468446 +step:9126 train loss:3.472023 +step:9127 train loss:3.420996 +step:9128 train loss:3.490747 +step:9129 train loss:3.435424 +step:9130 train loss:3.425684 +step:9131 train loss:3.406336 +step:9132 train loss:3.409042 +step:9133 train loss:3.455765 +step:9134 train loss:3.399184 +step:9135 train loss:3.419482 +step:9136 train loss:3.400194 +step:9137 train loss:3.376631 +step:9138 train loss:3.473752 +step:9139 train loss:3.406582 +step:9140 train loss:3.495257 +step:9141 train loss:3.418336 +step:9142 train loss:3.424417 +step:9143 train loss:3.452937 +step:9144 train loss:3.399819 +step:9145 train loss:3.434007 +step:9146 train loss:3.449348 +step:9147 train loss:3.424253 +step:9148 train loss:3.405362 +step:9149 train loss:3.408359 +step:9150 train loss:3.398584 +step:9151 train loss:3.454208 +step:9152 train loss:3.407730 +step:9153 train loss:3.416352 +step:9154 train loss:3.334823 +step:9155 train loss:3.399102 +step:9156 train loss:3.441646 +step:9157 train loss:3.484494 +step:9158 train loss:3.442789 +step:9159 train loss:3.383533 +step:9160 train loss:3.427208 +step:9161 train loss:3.372114 +step:9162 train loss:3.389843 +step:9163 train loss:3.382913 +step:9164 train loss:3.389705 +step:9165 train loss:3.451825 +step:9166 train loss:3.440380 +step:9167 train loss:3.468089 +step:9168 train loss:3.412980 +step:9169 train loss:3.409634 +step:9170 train loss:3.403416 +step:9171 train loss:3.461742 +step:9172 train loss:3.454382 +step:9173 train loss:3.434842 +step:9174 train loss:3.398903 +step:9175 train loss:3.414303 +step:9176 train loss:3.471707 +step:9177 train loss:3.413466 +step:9178 train loss:3.404301 +step:9179 train loss:3.413065 +step:9180 train loss:3.329790 +step:9181 train loss:3.439034 +step:9182 train loss:3.428547 +step:9183 train loss:3.480601 +step:9184 train loss:3.446797 +step:9185 train loss:3.447948 +step:9186 train loss:3.420258 +step:9187 train loss:3.427486 +step:9188 train loss:3.445555 +step:9189 train loss:3.404914 +step:9190 train loss:3.461946 +step:9191 train loss:3.477535 +step:9192 train loss:3.439463 +step:9193 train loss:3.374175 +step:9194 train loss:3.450698 +step:9195 train loss:3.359566 +step:9196 train loss:3.411617 +step:9197 train loss:3.452404 +step:9198 train loss:3.460392 +step:9199 train loss:3.470909 +step:9200 train loss:3.396326 +step:9201 train loss:3.454448 +step:9202 train loss:3.392272 +step:9203 train loss:3.505716 +step:9204 train loss:3.494020 +step:9205 train loss:3.441206 +step:9206 train loss:3.425171 +step:9207 train loss:3.403464 +step:9208 train loss:3.429488 +step:9209 train loss:3.448573 +step:9210 train loss:3.415472 +step:9211 train loss:3.383216 +step:9212 train loss:3.473619 +step:9213 train loss:3.368400 +step:9214 train loss:3.369756 +step:9215 train loss:3.378694 +step:9216 train loss:3.401511 +step:9217 train loss:3.433796 +step:9218 train loss:3.336184 +step:9219 train loss:3.352843 +step:9220 train loss:3.335489 +step:9221 train loss:3.399646 +step:9222 train loss:3.371058 +step:9223 train loss:3.448954 +step:9224 train loss:3.402067 +step:9225 train loss:3.414941 +step:9226 train loss:3.376056 +step:9227 train loss:3.441376 +step:9228 train loss:3.469083 +step:9229 train loss:3.459100 +step:9230 train loss:3.461662 +step:9231 train loss:3.403146 +step:9232 train loss:3.429676 +step:9233 train loss:3.363438 +step:9234 train loss:3.471230 +step:9235 train loss:3.409742 +step:9236 train loss:3.422214 +step:9237 train loss:3.404541 +step:9238 train loss:3.361023 +step:9239 train loss:3.407664 +step:9240 train loss:3.439586 +step:9241 train loss:3.484140 +step:9242 train loss:3.399074 +step:9243 train loss:3.405480 +step:9244 train loss:3.378126 +step:9245 train loss:3.441503 +step:9246 train loss:3.369792 +step:9247 train loss:3.443281 +step:9248 train loss:3.332288 +step:9249 train loss:3.445431 +step:9250 validation loss:3.366824 +step:9250 train loss:3.458088 +step:9251 train loss:3.452944 +step:9252 train loss:3.398176 +step:9253 train loss:3.480710 +step:9254 train loss:3.406412 +step:9255 train loss:3.409732 +step:9256 train loss:3.383426 +step:9257 train loss:3.425914 +step:9258 train loss:3.425937 +step:9259 train loss:3.435662 +step:9260 train loss:3.429974 +step:9261 train loss:3.437263 +step:9262 train loss:3.393348 +step:9263 train loss:3.397708 +step:9264 train loss:3.414638 +step:9265 train loss:3.395655 +step:9266 train loss:3.419655 +step:9267 train loss:3.398326 +step:9268 train loss:3.473402 +step:9269 train loss:3.393541 +step:9270 train loss:3.433389 +step:9271 train loss:3.436802 +step:9272 train loss:3.401789 +step:9273 train loss:3.429745 +step:9274 train loss:3.405351 +step:9275 train loss:3.379146 +step:9276 train loss:3.395941 +step:9277 train loss:3.340104 +step:9278 train loss:3.443362 +step:9279 train loss:3.406921 +step:9280 train loss:3.358673 +step:9281 train loss:3.432316 +step:9282 train loss:3.396578 +step:9283 train loss:3.405855 +step:9284 train loss:3.443521 +step:9285 train loss:3.434895 +step:9286 train loss:3.433735 +step:9287 train loss:3.433764 +step:9288 train loss:3.428365 +step:9289 train loss:3.431466 +step:9290 train loss:3.424719 +step:9291 train loss:3.459555 +step:9292 train loss:3.447310 +step:9293 train loss:3.401338 +step:9294 train loss:3.385570 +step:9295 train loss:3.393958 +step:9296 train loss:3.386807 +step:9297 train loss:3.406915 +step:9298 train loss:3.345197 +step:9299 train loss:3.422984 +step:9300 train loss:3.430940 +step:9301 train loss:3.376859 +step:9302 train loss:3.405141 +step:9303 train loss:3.441675 +step:9304 train loss:3.423099 +step:9305 train loss:3.391860 +step:9306 train loss:3.514597 +step:9307 train loss:3.398652 +step:9308 train loss:3.419376 +step:9309 train loss:3.493790 +step:9310 train loss:3.387381 +step:9311 train loss:3.422889 +step:9312 train loss:3.390353 +step:9313 train loss:3.385536 +step:9314 train loss:3.423038 +step:9315 train loss:3.401883 +step:9316 train loss:3.427893 +step:9317 train loss:3.455379 +step:9318 train loss:3.456867 +step:9319 train loss:3.422368 +step:9320 train loss:3.418932 +step:9321 train loss:3.400159 +step:9322 train loss:3.452858 +step:9323 train loss:3.421938 +step:9324 train loss:3.378251 +step:9325 train loss:3.418300 +step:9326 train loss:3.473974 +step:9327 train loss:3.353660 +step:9328 train loss:3.427014 +step:9329 train loss:3.368216 +step:9330 train loss:3.443121 +step:9331 train loss:3.449576 +step:9332 train loss:3.439936 +step:9333 train loss:3.446815 +step:9334 train loss:3.393004 +step:9335 train loss:3.430582 +step:9336 train loss:3.431964 +step:9337 train loss:3.358787 +step:9338 train loss:3.391706 +step:9339 train loss:3.376223 +step:9340 train loss:3.408950 +step:9341 train loss:3.375518 +step:9342 train loss:3.368630 +step:9343 train loss:3.421705 +step:9344 train loss:3.414361 +step:9345 train loss:3.448574 +step:9346 train loss:3.370764 +step:9347 train loss:3.367212 +step:9348 train loss:3.428811 +step:9349 train loss:3.364345 +step:9350 train loss:3.427620 +step:9351 train loss:3.341988 +step:9352 train loss:3.357990 +step:9353 train loss:3.371560 +step:9354 train loss:3.386744 +step:9355 train loss:3.397962 +step:9356 train loss:3.377516 +step:9357 train loss:3.416441 +step:9358 train loss:3.389596 +step:9359 train loss:3.461882 +step:9360 train loss:3.317620 +step:9361 train loss:3.413236 +step:9362 train loss:3.397419 +step:9363 train loss:3.400966 +step:9364 train loss:3.291170 +step:9365 train loss:3.427423 +step:9366 train loss:3.413105 +step:9367 train loss:3.417637 +step:9368 train loss:3.383489 +step:9369 train loss:3.343555 +step:9370 train loss:3.440458 +step:9371 train loss:3.389671 +step:9372 train loss:3.366179 +step:9373 train loss:3.357827 +step:9374 train loss:3.385495 +step:9375 train loss:3.343622 +step:9376 train loss:3.434226 +step:9377 train loss:3.369135 +step:9378 train loss:3.353350 +step:9379 train loss:3.298759 +step:9380 train loss:3.400567 +step:9381 train loss:3.380516 +step:9382 train loss:3.483489 +step:9383 train loss:3.400735 +step:9384 train loss:3.430940 +step:9385 train loss:3.457946 +step:9386 train loss:3.346739 +step:9387 train loss:3.441696 +step:9388 train loss:3.341128 +step:9389 train loss:3.408742 +step:9390 train loss:3.432582 +step:9391 train loss:3.403108 +step:9392 train loss:3.395789 +step:9393 train loss:3.428986 +step:9394 train loss:3.345767 +step:9395 train loss:3.393774 +step:9396 train loss:3.387813 +step:9397 train loss:3.359921 +step:9398 train loss:3.402829 +step:9399 train loss:3.354640 +step:9400 train loss:3.409696 +step:9401 train loss:3.402068 +step:9402 train loss:3.397533 +step:9403 train loss:3.357866 +step:9404 train loss:3.356463 +step:9405 train loss:3.436177 +step:9406 train loss:3.385487 +step:9407 train loss:3.418184 +step:9408 train loss:3.367295 +step:9409 train loss:3.449124 +step:9410 train loss:3.355124 +step:9411 train loss:3.402493 +step:9412 train loss:3.399174 +step:9413 train loss:3.365420 +step:9414 train loss:3.460495 +step:9415 train loss:3.434113 +step:9416 train loss:3.424060 +step:9417 train loss:3.333886 +step:9418 train loss:3.460180 +step:9419 train loss:3.378764 +step:9420 train loss:3.380250 +step:9421 train loss:3.358416 +step:9422 train loss:3.393329 +step:9423 train loss:3.317898 +step:9424 train loss:3.415653 +step:9425 train loss:3.359578 +step:9426 train loss:3.366304 +step:9427 train loss:3.394645 +step:9428 train loss:3.419759 +step:9429 train loss:3.407570 +step:9430 train loss:3.397527 +step:9431 train loss:3.400391 +step:9432 train loss:3.354924 +step:9433 train loss:3.435637 +step:9434 train loss:3.360025 +step:9435 train loss:3.510708 +step:9436 train loss:3.699986 +step:9437 train loss:3.364789 +step:9438 train loss:3.388951 +step:9439 train loss:3.386708 +step:9440 train loss:3.382667 +step:9441 train loss:3.354155 +step:9442 train loss:3.411847 +step:9443 train loss:3.405382 +step:9444 train loss:3.403177 +step:9445 train loss:3.382377 +step:9446 train loss:3.340025 +step:9447 train loss:3.376230 +step:9448 train loss:3.345627 +step:9449 train loss:3.382657 +step:9450 train loss:3.370216 +step:9451 train loss:3.332384 +step:9452 train loss:3.352667 +step:9453 train loss:3.407118 +step:9454 train loss:3.349187 +step:9455 train loss:3.661633 +step:9456 train loss:3.352106 +step:9457 train loss:3.462035 +step:9458 train loss:3.468228 +step:9459 train loss:3.501761 +step:9460 train loss:3.350613 +step:9461 train loss:3.403654 +step:9462 train loss:3.455070 +step:9463 train loss:3.382778 +step:9464 train loss:3.407114 +step:9465 train loss:3.384864 +step:9466 train loss:3.405899 +step:9467 train loss:3.384246 +step:9468 train loss:3.413816 +step:9469 train loss:3.369434 +step:9470 train loss:3.403698 +step:9471 train loss:3.409235 +step:9472 train loss:3.372437 +step:9473 train loss:3.413568 +step:9474 train loss:3.365497 +step:9475 train loss:3.430817 +step:9476 train loss:3.384295 +step:9477 train loss:3.385877 +step:9478 train loss:3.451195 +step:9479 train loss:3.345529 +step:9480 train loss:3.444281 +step:9481 train loss:3.348027 +step:9482 train loss:3.350152 +step:9483 train loss:3.396503 +step:9484 train loss:3.392527 +step:9485 train loss:3.394190 +step:9486 train loss:3.375559 +step:9487 train loss:3.391178 +step:9488 train loss:3.353296 +step:9489 train loss:3.455317 +step:9490 train loss:3.383449 +step:9491 train loss:3.434422 +step:9492 train loss:3.428783 +step:9493 train loss:3.339057 +step:9494 train loss:3.413741 +step:9495 train loss:3.316574 +step:9496 train loss:3.453057 +step:9497 train loss:3.363541 +step:9498 train loss:3.346906 +step:9499 train loss:3.488095 +step:9500 validation loss:3.354106 total_sharp:8.0563e-03 L1_sharp:8.1505e-03 L2_sharp:1.2088e-03 L3_sharp:1.3440e-03 L4_sharp:1.7373e-03 L5_sharp:1.2067e-03 L6_sharp:2.5747e-03 L7_sharp:2.0789e-03 L8_sharp:4.1973e-03 L9_sharp:2.6218e-03 L10_sharp:1.2729e-03 L11_sharp:9.8321e-04 L12_sharp:2.3013e-03 total_fnorm:5.9577e-01 total_l1_linf:5.3367e+03 total_spectral:5.9577e-01 L1_fnorm:1.4251e-01 L2_fnorm:1.4323e-01 L3_fnorm:1.3952e-01 L4_fnorm:1.3974e-01 L5_fnorm:1.3832e-01 L6_fnorm:1.3977e-01 L7_fnorm:1.4121e-01 L8_fnorm:1.4096e-01 L9_fnorm:1.4256e-01 L10_fnorm:1.4461e-01 L11_fnorm:1.4619e-01 L12_fnorm:1.4461e-01 L1_l1linf:1.8231e-01 L2_l1linf:1.5860e-01 L3_l1linf:1.6148e-01 L4_l1linf:1.7696e-01 L5_l1linf:1.6862e-01 L6_l1linf:1.9760e-01 L7_l1linf:1.9706e-01 L8_l1linf:1.8466e-01 L9_l1linf:1.5708e-01 L10_l1linf:1.5066e-01 L11_l1linf:1.5408e-01 L12_l1linf:1.5998e-01 L1_spectral:2.5319e-02 L2_spectral:2.2203e-02 L3_spectral:2.4433e-02 L4_spectral:2.7072e-02 L5_spectral:2.4969e-02 L6_spectral:2.8440e-02 L7_spectral:2.6153e-02 L8_spectral:2.5718e-02 L9_spectral:1.8425e-02 L10_spectral:1.3527e-02 L11_spectral:1.5050e-02 L12_spectral:1.9738e-02 v_norm:5.9577e-01 cos_v_-g_hvp:6.6032e-02 g_hvp_norm:3.5966e-01 cos_v_-g_t:9.1686e-02 g_t_norm:2.6111e-01 hv_norm:1.9904e-01 cos_v_hv:2.4114e-02 hg_norm:1.1508e+01 cos_g_hg:5.3892e-01 v_par:2.0754e-03 v_perp:5.9577e-01 L1_cos_v_neg_g:1.2203e-01 L1_v_norm:1.4251e-01 L2_cos_v_neg_g:5.7957e-02 L2_v_norm:1.4323e-01 L3_cos_v_neg_g:5.1039e-02 L3_v_norm:1.3952e-01 L4_cos_v_neg_g:5.5235e-02 L4_v_norm:1.3974e-01 L5_cos_v_neg_g:4.3792e-02 L5_v_norm:1.3832e-01 L6_cos_v_neg_g:5.3721e-02 L6_v_norm:1.3977e-01 L7_cos_v_neg_g:6.1170e-02 L7_v_norm:1.4121e-01 L8_cos_v_neg_g:5.9313e-02 L8_v_norm:1.4096e-01 L9_cos_v_neg_g:5.7636e-02 L9_v_norm:1.4256e-01 L10_cos_v_neg_g:7.0359e-02 L10_v_norm:1.4461e-01 L11_cos_v_neg_g:9.2951e-02 L11_v_norm:1.4619e-01 L12_cos_v_neg_g:1.2740e-01 L12_v_norm:1.4461e-01 +step:9500 train loss:3.403926 +step:9501 train loss:3.428626 +step:9502 train loss:3.314220 +step:9503 train loss:3.418956 +step:9504 train loss:3.394612 +step:9505 train loss:3.396655 +step:9506 train loss:3.393480 +step:9507 train loss:3.341058 +step:9508 train loss:3.428866 +step:9509 train loss:3.422693 +step:9510 train loss:3.467571 +step:9511 train loss:3.350236 +step:9512 train loss:3.431853 +step:9513 train loss:3.439940 +step:9514 train loss:3.431831 +step:9515 train loss:3.385344 +step:9516 train loss:3.379027 +step:9517 train loss:3.489920 +step:9518 train loss:3.338254 +step:9519 train loss:3.379870 +step:9520 train loss:3.379196 +step:9521 train loss:3.413158 +step:9522 train loss:3.418202 +step:9523 train loss:3.394875 +step:9524 train loss:3.417381 +step:9525 train loss:3.367236 +step:9526 train loss:3.347935 +step:9527 train loss:3.426828 +step:9528 train loss:3.362643 +step:9529 train loss:3.455427 +step:9530 train loss:3.342118 +step:9531 train loss:3.380263 +step:9532 train loss:3.337776 +step:9533 train loss:3.393421 +step:9534 train loss:3.395873 +step:9535 train loss:3.392879 +step:9536 train loss:3.372318 +step:9537 train loss:3.413458 +step:9538 train loss:3.380704 +step:9539 train loss:3.414526 +step:9540 train loss:3.400604 +step:9541 train loss:3.368106 +step:9542 train loss:3.381763 +step:9543 train loss:3.364025 +step:9544 train loss:3.380485 +step:9545 train loss:3.350696 +step:9546 train loss:3.419853 +step:9547 train loss:3.399772 +step:9548 train loss:3.368850 +step:9549 train loss:3.381013 +step:9550 train loss:3.349815 +step:9551 train loss:3.418323 +step:9552 train loss:3.384166 +step:9553 train loss:3.382936 +step:9554 train loss:3.429418 +step:9555 train loss:3.390993 +step:9556 train loss:3.427330 +step:9557 train loss:3.381610 +step:9558 train loss:3.396452 +step:9559 train loss:3.387620 +step:9560 train loss:3.387813 +step:9561 train loss:3.387283 +step:9562 train loss:3.371817 +step:9563 train loss:3.346723 +step:9564 train loss:3.410445 +step:9565 train loss:3.384417 +step:9566 train loss:3.376642 +step:9567 train loss:3.392331 +step:9568 train loss:3.332223 +step:9569 train loss:3.333037 +step:9570 train loss:3.378699 +step:9571 train loss:3.424091 +step:9572 train loss:3.447507 +step:9573 train loss:3.421306 +step:9574 train loss:3.386851 +step:9575 train loss:3.428522 +step:9576 train loss:3.415526 +step:9577 train loss:3.455069 +step:9578 train loss:3.406759 +step:9579 train loss:3.349854 +step:9580 train loss:3.387287 +step:9581 train loss:3.406550 +step:9582 train loss:3.356601 +step:9583 train loss:3.350417 +step:9584 train loss:3.375685 +step:9585 train loss:3.409272 +step:9586 train loss:3.430173 +step:9587 train loss:3.356849 +step:9588 train loss:3.408551 +step:9589 train loss:3.395354 +step:9590 train loss:3.414598 +step:9591 train loss:3.361436 +step:9592 train loss:3.352089 +step:9593 train loss:3.378140 +step:9594 train loss:3.415004 +step:9595 train loss:3.409406 +step:9596 train loss:3.398439 +step:9597 train loss:3.377138 +step:9598 train loss:3.394781 +step:9599 train loss:3.389531 +step:9600 train loss:3.452540 +step:9601 train loss:3.356113 +step:9602 train loss:3.390923 +step:9603 train loss:3.424017 +step:9604 train loss:3.385004 +step:9605 train loss:3.415484 +step:9606 train loss:3.359601 +step:9607 train loss:3.365682 +step:9608 train loss:3.401720 +step:9609 train loss:3.360982 +step:9610 train loss:3.401402 +step:9611 train loss:3.387532 +step:9612 train loss:3.324861 +step:9613 train loss:3.381996 +step:9614 train loss:3.363514 +step:9615 train loss:3.448121 +step:9616 train loss:3.479531 +step:9617 train loss:3.455778 +step:9618 train loss:3.376264 +step:9619 train loss:3.406913 +step:9620 train loss:3.420632 +step:9621 train loss:3.424440 +step:9622 train loss:3.371381 +step:9623 train loss:3.369504 +step:9624 train loss:3.389784 +step:9625 train loss:3.336736 +step:9626 train loss:3.390427 +step:9627 train loss:3.468612 +step:9628 train loss:3.398403 +step:9629 train loss:3.374723 +step:9630 train loss:3.452132 +step:9631 train loss:3.353963 +step:9632 train loss:3.383228 +step:9633 train loss:3.353909 +step:9634 train loss:3.406782 +step:9635 train loss:3.337430 +step:9636 train loss:3.399622 +step:9637 train loss:3.353864 +step:9638 train loss:3.315156 +step:9639 train loss:3.414766 +step:9640 train loss:3.392076 +step:9641 train loss:3.464110 +step:9642 train loss:3.392499 +step:9643 train loss:3.565206 +step:9644 train loss:3.395217 +step:9645 train loss:3.368218 +step:9646 train loss:3.359296 +step:9647 train loss:3.372507 +step:9648 train loss:3.382087 +step:9649 train loss:3.418838 +step:9650 train loss:3.397712 +step:9651 train loss:3.412118 +step:9652 train loss:3.348714 +step:9653 train loss:3.389325 +step:9654 train loss:3.419716 +step:9655 train loss:3.350016 +step:9656 train loss:3.430922 +step:9657 train loss:3.391527 +step:9658 train loss:3.376754 +step:9659 train loss:3.409059 +step:9660 train loss:3.350358 +step:9661 train loss:3.360665 +step:9662 train loss:3.345488 +step:9663 train loss:3.418759 +step:9664 train loss:3.401669 +step:9665 train loss:3.351659 +step:9666 train loss:3.322020 +step:9667 train loss:3.387121 +step:9668 train loss:3.414999 +step:9669 train loss:3.374459 +step:9670 train loss:3.380016 +step:9671 train loss:3.378529 +step:9672 train loss:3.434085 +step:9673 train loss:3.389473 +step:9674 train loss:3.395970 +step:9675 train loss:3.345053 +step:9676 train loss:3.397154 +step:9677 train loss:3.393950 +step:9678 train loss:3.404234 +step:9679 train loss:3.364807 +step:9680 train loss:3.404027 +step:9681 train loss:3.427745 +step:9682 train loss:3.382740 +step:9683 train loss:3.359936 +step:9684 train loss:3.395206 +step:9685 train loss:3.387475 +step:9686 train loss:3.384040 +step:9687 train loss:3.364442 +step:9688 train loss:3.373974 +step:9689 train loss:3.382871 +step:9690 train loss:3.367416 +step:9691 train loss:3.373946 +step:9692 train loss:3.373062 +step:9693 train loss:3.407946 +step:9694 train loss:3.346009 +step:9695 train loss:3.397341 +step:9696 train loss:3.313745 +step:9697 train loss:3.418207 +step:9698 train loss:3.352369 +step:9699 train loss:3.361268 +step:9700 train loss:3.341159 +step:9701 train loss:3.401041 +step:9702 train loss:3.399642 +step:9703 train loss:3.379219 +step:9704 train loss:3.377495 +step:9705 train loss:3.374162 +step:9706 train loss:3.409543 +step:9707 train loss:3.378014 +step:9708 train loss:3.361424 +step:9709 train loss:3.417857 +step:9710 train loss:3.331033 +step:9711 train loss:3.427243 +step:9712 train loss:3.363382 +step:9713 train loss:3.370827 +step:9714 train loss:3.379415 +step:9715 train loss:3.344702 +step:9716 train loss:3.354524 +step:9717 train loss:3.360379 +step:9718 train loss:3.417778 +step:9719 train loss:3.379680 +step:9720 train loss:3.385255 +step:9721 train loss:3.394233 +step:9722 train loss:3.351933 +step:9723 train loss:3.351511 +step:9724 train loss:3.384403 +step:9725 train loss:3.380673 +step:9726 train loss:3.402869 +step:9727 train loss:3.430995 +step:9728 train loss:3.430896 +step:9729 train loss:3.403156 +step:9730 train loss:3.434596 +step:9731 train loss:3.507120 +step:9732 train loss:3.445480 +step:9733 train loss:3.405174 +step:9734 train loss:3.407633 +step:9735 train loss:3.472111 +step:9736 train loss:3.358989 +step:9737 train loss:3.390599 +step:9738 train loss:3.400306 +step:9739 train loss:3.459412 +step:9740 train loss:3.415705 +step:9741 train loss:3.419338 +step:9742 train loss:3.455122 +step:9743 train loss:3.386323 +step:9744 train loss:3.381787 +step:9745 train loss:3.369322 +step:9746 train loss:3.367192 +step:9747 train loss:3.345700 +step:9748 train loss:3.389710 +step:9749 train loss:3.364632 +step:9750 validation loss:3.340990 +step:9750 train loss:3.308589 +step:9751 train loss:3.428223 +step:9752 train loss:3.323825 +step:9753 train loss:3.472395 +step:9754 train loss:3.363819 +step:9755 train loss:3.396551 +step:9756 train loss:3.370345 +step:9757 train loss:3.339731 +step:9758 train loss:3.366242 +step:9759 train loss:3.315955 +step:9760 train loss:3.384327 +step:9761 train loss:3.315287 +step:9762 train loss:3.324016 +step:9763 train loss:3.380122 +step:9764 train loss:3.367349 +step:9765 train loss:3.380168 +step:9766 train loss:3.382068 +step:9767 train loss:3.354938 +step:9768 train loss:3.386499 +step:9769 train loss:3.386921 +step:9770 train loss:3.365401 +step:9771 train loss:3.340887 +step:9772 train loss:3.356525 +step:9773 train loss:3.415797 +step:9774 train loss:3.358847 +step:9775 train loss:3.383920 +step:9776 train loss:3.418167 +step:9777 train loss:3.352821 +step:9778 train loss:3.415023 +step:9779 train loss:3.359472 +step:9780 train loss:3.319920 +step:9781 train loss:3.335291 +step:9782 train loss:3.345959 +step:9783 train loss:3.346413 +step:9784 train loss:3.453069 +step:9785 train loss:3.333490 +step:9786 train loss:3.387270 +step:9787 train loss:3.361145 +step:9788 train loss:3.396995 +step:9789 train loss:3.388696 +step:9790 train loss:3.348916 +step:9791 train loss:3.361504 +step:9792 train loss:3.350258 +step:9793 train loss:3.366092 +step:9794 train loss:3.413685 +step:9795 train loss:3.393340 +step:9796 train loss:3.352272 +step:9797 train loss:3.369077 +step:9798 train loss:3.358619 +step:9799 train loss:3.415954 +step:9800 train loss:3.383377 +step:9801 train loss:3.435937 +step:9802 train loss:3.368425 +step:9803 train loss:3.404897 +step:9804 train loss:3.367106 +step:9805 train loss:3.345021 +step:9806 train loss:3.403115 +step:9807 train loss:3.386611 +step:9808 train loss:3.343560 +step:9809 train loss:3.329458 +step:9810 train loss:3.309664 +step:9811 train loss:3.423469 +step:9812 train loss:3.355103 +step:9813 train loss:3.374016 +step:9814 train loss:3.422000 +step:9815 train loss:3.364401 +step:9816 train loss:3.401868 +step:9817 train loss:3.333883 +step:9818 train loss:3.376020 +step:9819 train loss:3.319114 +step:9820 train loss:3.420085 +step:9821 train loss:3.365023 +step:9822 train loss:3.412318 +step:9823 train loss:3.417500 +step:9824 train loss:3.389774 +step:9825 train loss:3.395663 +step:9826 train loss:3.403228 +step:9827 train loss:3.398397 +step:9828 train loss:3.431482 +step:9829 train loss:3.388933 +step:9830 train loss:3.316330 +step:9831 train loss:3.379884 +step:9832 train loss:3.362426 +step:9833 train loss:3.387631 +step:9834 train loss:3.337812 +step:9835 train loss:3.418476 +step:9836 train loss:3.322617 +step:9837 train loss:3.396868 +step:9838 train loss:3.393846 +step:9839 train loss:3.416338 +step:9840 train loss:3.424935 +step:9841 train loss:3.331495 +step:9842 train loss:3.384096 +step:9843 train loss:3.362723 +step:9844 train loss:3.453410 +step:9845 train loss:3.368196 +step:9846 train loss:3.376086 +step:9847 train loss:3.408780 +step:9848 train loss:3.410527 +step:9849 train loss:3.397620 +step:9850 train loss:3.382640 +step:9851 train loss:3.406391 +step:9852 train loss:3.306993 +step:9853 train loss:3.398064 +step:9854 train loss:3.359415 +step:9855 train loss:3.362273 +step:9856 train loss:3.358850 +step:9857 train loss:3.312968 +step:9858 train loss:3.349306 +step:9859 train loss:3.353359 +step:9860 train loss:3.400025 +step:9861 train loss:3.361245 +step:9862 train loss:3.454738 +step:9863 train loss:3.357003 +step:9864 train loss:3.350260 +step:9865 train loss:3.346699 +step:9866 train loss:3.425739 +step:9867 train loss:3.349537 +step:9868 train loss:3.347642 +step:9869 train loss:3.412367 +step:9870 train loss:3.386958 +step:9871 train loss:3.348452 +step:9872 train loss:3.400686 +step:9873 train loss:3.344077 +step:9874 train loss:3.373141 +step:9875 train loss:3.375700 +step:9876 train loss:3.343887 +step:9877 train loss:3.322378 +step:9878 train loss:3.358445 +step:9879 train loss:3.371090 +step:9880 train loss:3.464044 +step:9881 train loss:3.354568 +step:9882 train loss:3.342697 +step:9883 train loss:3.348126 +step:9884 train loss:3.371818 +step:9885 train loss:3.368379 +step:9886 train loss:3.370184 +step:9887 train loss:3.420428 +step:9888 train loss:3.381431 +step:9889 train loss:3.434221 +step:9890 train loss:3.430945 +step:9891 train loss:3.368769 +step:9892 train loss:3.302353 +step:9893 train loss:3.368632 +step:9894 train loss:3.411574 +step:9895 train loss:3.362302 +step:9896 train loss:3.338748 +step:9897 train loss:3.364575 +step:9898 train loss:3.413647 +step:9899 train loss:3.330005 +step:9900 train loss:3.442230 +step:9901 train loss:3.323898 +step:9902 train loss:3.367621 +step:9903 train loss:3.369048 +step:9904 train loss:3.406809 +step:9905 train loss:3.366146 +step:9906 train loss:3.385679 +step:9907 train loss:3.308861 +step:9908 train loss:3.375140 +step:9909 train loss:3.336366 +step:9910 train loss:3.383584 +step:9911 train loss:3.390534 +step:9912 train loss:3.424431 +step:9913 train loss:3.399325 +step:9914 train loss:3.382556 +step:9915 train loss:3.375147 +step:9916 train loss:3.357323 +step:9917 train loss:3.392122 +step:9918 train loss:3.358527 +step:9919 train loss:3.397827 +step:9920 train loss:3.352777 +step:9921 train loss:3.388847 +step:9922 train loss:3.335159 +step:9923 train loss:3.387552 +step:9924 train loss:3.382684 +step:9925 train loss:3.379178 +step:9926 train loss:3.419559 +step:9927 train loss:3.305928 +step:9928 train loss:3.452949 +step:9929 train loss:3.368869 +step:9930 train loss:3.388097 +step:9931 train loss:3.359421 +step:9932 train loss:3.385199 +step:9933 train loss:3.386386 +step:9934 train loss:3.365316 +step:9935 train loss:3.323858 +step:9936 train loss:3.375379 +step:9937 train loss:3.390161 +step:9938 train loss:3.353089 +step:9939 train loss:3.369526 +step:9940 train loss:3.366101 +step:9941 train loss:3.462126 +step:9942 train loss:3.377885 +step:9943 train loss:3.323205 +step:9944 train loss:3.426853 +step:9945 train loss:3.332401 +step:9946 train loss:3.390802 +step:9947 train loss:3.348733 +step:9948 train loss:3.346274 +step:9949 train loss:3.315580 +step:9950 train loss:3.464519 +step:9951 train loss:3.300871 +step:9952 train loss:3.377865 +step:9953 train loss:3.363834 +step:9954 train loss:3.329931 +step:9955 train loss:3.426751 +step:9956 train loss:3.388995 +step:9957 train loss:3.383054 +step:9958 train loss:3.377149 +step:9959 train loss:3.455033 +step:9960 train loss:3.392541 +step:9961 train loss:3.438749 +step:9962 train loss:3.366122 +step:9963 train loss:3.393968 +step:9964 train loss:3.428680 +step:9965 train loss:3.313477 +step:9966 train loss:3.440241 +step:9967 train loss:3.334659 +step:9968 train loss:3.426480 +step:9969 train loss:3.282826 +step:9970 train loss:3.434650 +step:9971 train loss:3.399187 +step:9972 train loss:3.363955 +step:9973 train loss:3.409291 +step:9974 train loss:3.388326 +step:9975 train loss:3.420521 +step:9976 train loss:3.407915 +step:9977 train loss:3.400595 +step:9978 train loss:3.371192 +step:9979 train loss:3.430294 +step:9980 train loss:3.356354 +step:9981 train loss:3.406013 +step:9982 train loss:3.379492 +step:9983 train loss:3.327527 +step:9984 train loss:3.379140 +step:9985 train loss:3.326721 +step:9986 train loss:3.402359 +step:9987 train loss:3.372414 +step:9988 train loss:3.340110 +step:9989 train loss:3.436314 +step:9990 train loss:3.349368 +step:9991 train loss:3.413789 +step:9992 train loss:3.309907 +step:9993 train loss:3.372463 +step:9994 train loss:3.317008 +step:9995 train loss:3.342158 +step:9996 train loss:3.395329 +step:9997 train loss:3.385578 +step:9998 train loss:3.390998 +step:9999 train loss:3.380155 +step:10000 validation loss:3.331933 total_sharp:1.7571e-02 L1_sharp:8.1847e-03 L2_sharp:1.5146e-03 L3_sharp:3.3373e-03 L4_sharp:4.2718e-03 L5_sharp:2.1473e-03 L6_sharp:4.7968e-03 L7_sharp:4.4732e-03 L8_sharp:7.5532e-03 L9_sharp:6.3380e-03 L10_sharp:3.1012e-03 L11_sharp:1.9539e-03 L12_sharp:8.7301e-03 total_fnorm:1.2716e-03 total_l1_linf:1.1408e+01 total_spectral:1.2716e-03 L1_fnorm:3.0867e-04 L2_fnorm:3.0443e-04 L3_fnorm:3.0892e-04 L4_fnorm:3.0976e-04 L5_fnorm:3.0400e-04 L6_fnorm:3.1179e-04 L7_fnorm:3.1137e-04 L8_fnorm:3.1090e-04 L9_fnorm:3.1075e-04 L10_fnorm:3.1065e-04 L11_fnorm:3.1086e-04 L12_fnorm:3.1323e-04 L1_l1linf:4.3238e-04 L2_l1linf:3.9669e-04 L3_l1linf:5.1173e-04 L4_l1linf:5.1686e-04 L5_l1linf:4.0346e-04 L6_l1linf:4.8735e-04 L7_l1linf:5.3948e-04 L8_l1linf:4.4632e-04 L9_l1linf:3.8955e-04 L10_l1linf:3.4255e-04 L11_l1linf:3.5285e-04 L12_l1linf:4.1735e-04 L1_spectral:6.4558e-05 L2_spectral:5.0188e-05 L3_spectral:7.2604e-05 L4_spectral:8.0274e-05 L5_spectral:6.7300e-05 L6_spectral:8.2578e-05 L7_spectral:7.6456e-05 L8_spectral:7.0426e-05 L9_spectral:5.4091e-05 L10_spectral:3.6581e-05 L11_spectral:3.5295e-05 L12_spectral:5.9900e-05 v_norm:1.2716e-03 cos_v_-g_hvp:7.1934e-02 g_hvp_norm:2.9504e-01 cos_v_-g_t:1.4112e-01 g_t_norm:1.5191e-01 hv_norm:6.6179e-04 cos_v_hv:3.3760e-02 hg_norm:8.6964e+00 cos_g_hg:3.9328e-01 v_par:5.3838e-06 v_perp:1.2716e-03 L1_cos_v_neg_g:1.0570e-01 L1_v_norm:3.0867e-04 L2_cos_v_neg_g:6.2711e-02 L2_v_norm:3.0443e-04 L3_cos_v_neg_g:5.5669e-02 L3_v_norm:3.0892e-04 L4_cos_v_neg_g:5.7022e-02 L4_v_norm:3.0976e-04 L5_cos_v_neg_g:4.3268e-02 L5_v_norm:3.0400e-04 L6_cos_v_neg_g:5.6180e-02 L6_v_norm:3.1179e-04 L7_cos_v_neg_g:6.3723e-02 L7_v_norm:3.1137e-04 L8_cos_v_neg_g:6.4163e-02 L8_v_norm:3.1090e-04 L9_cos_v_neg_g:6.6566e-02 L9_v_norm:3.1075e-04 L10_cos_v_neg_g:8.0861e-02 L10_v_norm:3.1065e-04 L11_cos_v_neg_g:1.0081e-01 L11_v_norm:3.1086e-04 L12_cos_v_neg_g:1.3363e-01 L12_v_norm:3.1323e-04 diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/config.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/config.json new file mode 100644 index 0000000000000000000000000000000000000000..3304151d959f8dbf6a432f38417217ab125e07c0 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure_qk_nonorm_no_clip/layer_wise_new_code_rand", + "model": "d12", + "batch_size": 4, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 10000.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "shuffle_files": true, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 44, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500 + }, + "run_uuid": "0b068980-f311-47ed-ae52-48ae6fff6ec3", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..5552b80109442da176c3cf8c8ab927ba994a0137 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.4049031734466553, + "total_l1_linf_norm": 20623.29296875, + "total_spectral_norm": 2.404902935028076, + "embed_lm_head_update_fnorm": 1.3399829864501953, + "embed_lm_head_max_l1_linf_norm": 0.37447667121887207, + "embed_lm_head_max_spectral_norm": 0.29078319668769836, + "layer_1_update_fnorm": 0.6014585494995117, + "layer_1_max_l1_linf_norm": 0.43707793951034546, + "layer_1_max_spectral_norm": 0.012046471238136292, + "layer_2_update_fnorm": 0.5381199717521667, + "layer_2_max_l1_linf_norm": 0.44027993083000183, + "layer_2_max_spectral_norm": 0.012047190219163895, + "layer_3_update_fnorm": 0.5230478048324585, + "layer_3_max_l1_linf_norm": 0.4438554644584656, + "layer_3_max_spectral_norm": 0.012054955586791039, + "layer_4_update_fnorm": 0.5401294827461243, + "layer_4_max_l1_linf_norm": 0.40989768505096436, + "layer_4_max_spectral_norm": 0.012040291912853718, + "layer_5_update_fnorm": 0.573545515537262, + "layer_5_max_l1_linf_norm": 0.4029410481452942, + "layer_5_max_spectral_norm": 0.012045557610690594, + "layer_6_update_fnorm": 0.5741089582443237, + "layer_6_max_l1_linf_norm": 0.39823561906814575, + "layer_6_max_spectral_norm": 0.012047002092003822, + "layer_7_update_fnorm": 0.5879704356193542, + "layer_7_max_l1_linf_norm": 0.3979354798793793, + "layer_7_max_spectral_norm": 0.012059814296662807, + "layer_8_update_fnorm": 0.5888318419456482, + "layer_8_max_l1_linf_norm": 0.39938271045684814, + "layer_8_max_spectral_norm": 0.01204587984830141, + "layer_9_update_fnorm": 0.5948294997215271, + "layer_9_max_l1_linf_norm": 0.39984387159347534, + "layer_9_max_spectral_norm": 0.012042691931128502, + "layer_10_update_fnorm": 0.5949435234069824, + "layer_10_max_l1_linf_norm": 0.39965859055519104, + "layer_10_max_spectral_norm": 0.012050079181790352, + "layer_11_update_fnorm": 0.5948403477668762, + "layer_11_max_l1_linf_norm": 0.4029242992401123, + "layer_11_max_spectral_norm": 0.01204319205135107, + "layer_12_update_fnorm": 0.5989630222320557, + "layer_12_max_l1_linf_norm": 0.40072101354599, + "layer_12_max_spectral_norm": 0.012047083117067814, + "block0_q_update_fnorm": 0.24510006606578827, + "block0_q_max_l1_linf_norm": 0.20684461295604706, + "block0_q_max_spectral_norm": 0.012043172493577003, + "block0_k_update_fnorm": 0.24491089582443237, + "block0_k_max_l1_linf_norm": 0.20788292586803436, + "block0_k_max_spectral_norm": 0.012042107991874218, + "block0_v_update_fnorm": 0.22542254626750946, + "block0_v_max_l1_linf_norm": 0.2024669647216797, + "block0_v_max_spectral_norm": 0.012037072330713272, + "block0_o_update_fnorm": 0.2229810655117035, + "block0_o_max_l1_linf_norm": 0.18797923624515533, + "block0_o_max_spectral_norm": 0.012033253908157349, + "block0_mlp_win_update_fnorm": 0.2652798295021057, + "block0_mlp_win_max_l1_linf_norm": 0.1561065912246704, + "block0_mlp_win_max_spectral_norm": 0.012046471238136292, + "block0_mlp_wout_update_fnorm": 0.26576483249664307, + "block0_mlp_wout_max_l1_linf_norm": 0.43707793951034546, + "block0_mlp_wout_max_spectral_norm": 0.012041225098073483, + "block3_q_update_fnorm": 0.20755623281002045, + "block3_q_max_l1_linf_norm": 0.21122795343399048, + "block3_q_max_spectral_norm": 0.01203505415469408, + "block3_k_update_fnorm": 0.18030281364917755, + "block3_k_max_l1_linf_norm": 0.21065151691436768, + "block3_k_max_spectral_norm": 0.01203816756606102, + "block3_v_update_fnorm": 0.1920904666185379, + "block3_v_max_l1_linf_norm": 0.2022152543067932, + "block3_v_max_spectral_norm": 0.01203257404267788, + "block3_o_update_fnorm": 0.2204466313123703, + "block3_o_max_l1_linf_norm": 0.18706616759300232, + "block3_o_max_spectral_norm": 0.012036621570587158, + "block3_mlp_win_update_fnorm": 0.2630015015602112, + "block3_mlp_win_max_l1_linf_norm": 0.1824118196964264, + "block3_mlp_win_max_spectral_norm": 0.012040291912853718, + "block3_mlp_wout_update_fnorm": 0.24765685200691223, + "block3_mlp_wout_max_l1_linf_norm": 0.40989768505096436, + "block3_mlp_wout_max_spectral_norm": 0.012036393396556377, + "block7_q_update_fnorm": 0.2387707382440567, + "block7_q_max_l1_linf_norm": 0.21463853120803833, + "block7_q_max_spectral_norm": 0.012041257694363594, + "block7_k_update_fnorm": 0.23847845196723938, + "block7_k_max_l1_linf_norm": 0.21976469457149506, + "block7_k_max_spectral_norm": 0.0120387002825737, + "block7_v_update_fnorm": 0.23345160484313965, + "block7_v_max_l1_linf_norm": 0.2078612744808197, + "block7_v_max_spectral_norm": 0.01204587984830141, + "block7_o_update_fnorm": 0.24310290813446045, + "block7_o_max_l1_linf_norm": 0.2030124068260193, + "block7_o_max_spectral_norm": 0.012041701003909111, + "block7_mlp_win_update_fnorm": 0.2475929707288742, + "block7_mlp_win_max_l1_linf_norm": 0.14203086495399475, + "block7_mlp_win_max_spectral_norm": 0.012032500468194485, + "block7_mlp_wout_update_fnorm": 0.24046923220157623, + "block7_mlp_wout_max_l1_linf_norm": 0.39938271045684814, + "block7_mlp_wout_max_spectral_norm": 0.01141207292675972, + "block11_q_update_fnorm": 0.24769331514835358, + "block11_q_max_l1_linf_norm": 0.21172116696834564, + "block11_q_max_spectral_norm": 0.012044747360050678, + "block11_k_update_fnorm": 0.24814511835575104, + "block11_k_max_l1_linf_norm": 0.21185386180877686, + "block11_k_max_spectral_norm": 0.012042009271681309, + "block11_v_update_fnorm": 0.2433619499206543, + "block11_v_max_l1_linf_norm": 0.20878706872463226, + "block11_v_max_spectral_norm": 0.012042309157550335, + "block11_o_update_fnorm": 0.24578824639320374, + "block11_o_max_l1_linf_norm": 0.20508360862731934, + "block11_o_max_spectral_norm": 0.012047083117067814, + "block11_mlp_win_update_fnorm": 0.24037480354309082, + "block11_mlp_win_max_l1_linf_norm": 0.14377747476100922, + "block11_mlp_win_max_spectral_norm": 0.011407685466110706, + "block11_mlp_wout_update_fnorm": 0.24146145582199097, + "block11_mlp_wout_max_l1_linf_norm": 0.40072101354599, + "block11_mlp_wout_max_spectral_norm": 0.011587929911911488, + "total_sharpness": 0.00980954710394144, + "block_total_sharpness": 0.012673922814428806, + "v_norm_block": 1.9969984292984009, + "v_T_H_v_block": 0.050543636083602905, + "v_norm": 2.4049031734466553, + "ip_v_neg_g_hvp": 0.07364193350076675, + "cos_v_neg_g_hvp": 0.05663074180483818, + "g_hvp_norm": 0.5407236218452454, + "ip_v_neg_g_t": 0.0737275630235672, + "cos_v_neg_g_t": 0.0637601986527443, + "g_t_norm": 0.48082008957862854, + "g_norm": 0.5407236218452454, + "hv_norm": 0.8054064512252808, + "cos_v_hv": 0.02929081581532955, + "hg_norm": 25.01093101501465, + "cos_g_hg": 0.39915117621421814, + "v_parallel_norm": 0.008272696286439896, + "v_perp_norm": 2.404888868331909, + "embed_lm_head_v_norm": 1.3399829864501953, + "embed_lm_head_cos_v_neg_g": 0.10945083945989609, + "layer_1_v_norm": 0.6014585494995117, + "layer_1_cos_v_neg_g": 0.05313713848590851, + "layer_2_v_norm": 0.5381199717521667, + "layer_2_cos_v_neg_g": 0.040902648121118546, + "layer_3_v_norm": 0.5230478048324585, + "layer_3_cos_v_neg_g": 0.03919277340173721, + "layer_4_v_norm": 0.5401294827461243, + "layer_4_cos_v_neg_g": 0.04930414259433746, + "layer_5_v_norm": 0.573545515537262, + "layer_5_cos_v_neg_g": 0.05607765167951584, + "layer_6_v_norm": 0.5741089582443237, + "layer_6_cos_v_neg_g": 0.060303375124931335, + "layer_7_v_norm": 0.5879704356193542, + "layer_7_cos_v_neg_g": 0.060509033501148224, + "layer_8_v_norm": 0.5888318419456482, + "layer_8_cos_v_neg_g": 0.06679538637399673, + "layer_9_v_norm": 0.5948294997215271, + "layer_9_cos_v_neg_g": 0.06739525496959686, + "layer_10_v_norm": 0.5949435234069824, + "layer_10_cos_v_neg_g": 0.0699128583073616, + "layer_11_v_norm": 0.5948403477668762, + "layer_11_cos_v_neg_g": 0.07891663908958435, + "layer_12_v_norm": 0.5989630222320557, + "layer_12_cos_v_neg_g": 0.08514448255300522, + "block0_q_v_norm": 0.24510006606578827, + "block0_q_cos_v_neg_g": 0.08740019053220749, + "block0_k_v_norm": 0.24491089582443237, + "block0_k_cos_v_neg_g": 0.09100501239299774, + "block0_v_v_norm": 0.22542254626750946, + "block0_v_cos_v_neg_g": 0.04549550637602806, + "block0_o_v_norm": 0.2229810655117035, + "block0_o_cos_v_neg_g": 0.06838736683130264, + "block0_mlp_win_v_norm": 0.2652798295021057, + "block0_mlp_win_cos_v_neg_g": 0.07021117210388184, + "block0_mlp_wout_v_norm": 0.26576483249664307, + "block0_mlp_wout_cos_v_neg_g": 0.08582088351249695, + "block3_q_v_norm": 0.20755623281002045, + "block3_q_cos_v_neg_g": 0.058499764651060104, + "block3_k_v_norm": 0.18030281364917755, + "block3_k_cos_v_neg_g": 0.06542392075061798, + "block3_v_v_norm": 0.1920904666185379, + "block3_v_cos_v_neg_g": 0.048702090978622437, + "block3_o_v_norm": 0.2204466313123703, + "block3_o_cos_v_neg_g": 0.05763508751988411, + "block3_mlp_win_v_norm": 0.2630015015602112, + "block3_mlp_win_cos_v_neg_g": 0.0579199343919754, + "block3_mlp_wout_v_norm": 0.24765685200691223, + "block3_mlp_wout_cos_v_neg_g": 0.0901060700416565, + "block7_q_v_norm": 0.2387707382440567, + "block7_q_cos_v_neg_g": 0.06950144469738007, + "block7_k_v_norm": 0.23847845196723938, + "block7_k_cos_v_neg_g": 0.08222294598817825, + "block7_v_v_norm": 0.23345160484313965, + "block7_v_cos_v_neg_g": 0.05159018561244011, + "block7_o_v_norm": 0.24310290813446045, + "block7_o_cos_v_neg_g": 0.08086547255516052, + "block7_mlp_win_v_norm": 0.2475929707288742, + "block7_mlp_win_cos_v_neg_g": 0.09240373969078064, + "block7_mlp_wout_v_norm": 0.24046923220157623, + "block7_mlp_wout_cos_v_neg_g": 0.13365711271762848, + "block11_q_v_norm": 0.24769331514835358, + "block11_q_cos_v_neg_g": 0.09685131162405014, + "block11_k_v_norm": 0.24814511835575104, + "block11_k_cos_v_neg_g": 0.10042208433151245, + "block11_v_v_norm": 0.2433619499206543, + "block11_v_cos_v_neg_g": 0.07707724720239639, + "block11_o_v_norm": 0.24578824639320374, + "block11_o_cos_v_neg_g": 0.09198888391256332, + "block11_mlp_win_v_norm": 0.24037480354309082, + "block11_mlp_win_cos_v_neg_g": 0.11321122199296951, + "block11_mlp_wout_v_norm": 0.24146145582199097, + "block11_mlp_wout_cos_v_neg_g": 0.09817446768283844, + "embed_lm_head_sharpness": 0.0006660206709057093, + "layer_1_sharpness": 0.009484066627919674, + "layer_2_sharpness": 0.002365914173424244, + "layer_3_sharpness": 0.0031180952209979296, + "layer_4_sharpness": 0.0030117977876216173, + "layer_5_sharpness": 0.0024321586824953556, + "layer_6_sharpness": 0.002215801738202572, + "layer_7_sharpness": 0.0017587431939318776, + "layer_8_sharpness": 0.0013749322388321161, + "layer_9_sharpness": 0.000898809521459043, + "layer_10_sharpness": 0.0006231989245861769, + "layer_11_sharpness": 0.0006394275114871562, + "layer_12_sharpness": 0.000642823928501457, + "block0_q_sharpness": 0.0006061494932509959, + "block0_k_sharpness": 0.0007608253508806229, + "block0_v_sharpness": 0.008907238952815533, + "block0_o_sharpness": 0.002436283975839615, + "block0_mlp_win_sharpness": 0.0025882499758154154, + "block0_mlp_wout_sharpness": 0.0025385182816535234, + "block3_q_sharpness": 0.0001439112238585949, + "block3_k_sharpness": 0.004633678123354912, + "block3_v_sharpness": 0.004475160501897335, + "block3_o_sharpness": 0.000582083419431001, + "block3_mlp_win_sharpness": 0.00021705286053474993, + "block3_mlp_wout_sharpness": 0.00027028046315535903, + "block7_q_sharpness": 0.00017105838924180716, + "block7_k_sharpness": 0.00032400203053839505, + "block7_v_sharpness": 0.002192428568378091, + "block7_o_sharpness": 0.0003219507052563131, + "block7_mlp_win_sharpness": 0.00024241639766842127, + "block7_mlp_wout_sharpness": 0.0002768418926279992, + "block11_q_sharpness": 3.307625956949778e-05, + "block11_k_sharpness": 5.275625517242588e-05, + "block11_v_sharpness": 0.0002608720969874412, + "block11_o_sharpness": 7.998516230145469e-05, + "block11_mlp_win_sharpness": 0.0004185268480796367, + "block11_mlp_wout_sharpness": 0.0006802555290050805, + "sum_layer_numerators": 0.00945830420240798, + "block_diag_sharpness": 0.0023716895024351262, + "cross_layer_sharpness": 0.01030223331199368 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_10000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..0b60bdc6e17acfd7aa731582569f6d483e847bff --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_10000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.0010683872969821095, + "total_l1_linf_norm": 8.781900405883789, + "total_spectral_norm": 0.0010683872969821095, + "embed_lm_head_update_fnorm": 0.0006621339125558734, + "embed_lm_head_max_l1_linf_norm": 0.00017065659631043673, + "embed_lm_head_max_spectral_norm": 0.0001412596320733428, + "layer_1_update_fnorm": 0.00018714016187004745, + "layer_1_max_l1_linf_norm": 0.0002836926141753793, + "layer_1_max_spectral_norm": 6.228373422345612e-06, + "layer_2_update_fnorm": 0.00011353904847055674, + "layer_2_max_l1_linf_norm": 0.00032169645419344306, + "layer_2_max_spectral_norm": 7.196299520728644e-06, + "layer_3_update_fnorm": 0.00012411028728820384, + "layer_3_max_l1_linf_norm": 0.0003684040857478976, + "layer_3_max_spectral_norm": 8.238881491706707e-06, + "layer_4_update_fnorm": 0.00017756785382516682, + "layer_4_max_l1_linf_norm": 0.0003997136082034558, + "layer_4_max_spectral_norm": 1.0647908311511856e-05, + "layer_5_update_fnorm": 0.0002171425730921328, + "layer_5_max_l1_linf_norm": 0.0003733743214979768, + "layer_5_max_spectral_norm": 9.7318334155716e-06, + "layer_6_update_fnorm": 0.0002510473132133484, + "layer_6_max_l1_linf_norm": 0.00037779510603286326, + "layer_6_max_spectral_norm": 8.333004188898485e-06, + "layer_7_update_fnorm": 0.00028286228189244866, + "layer_7_max_l1_linf_norm": 0.00038196827517822385, + "layer_7_max_spectral_norm": 8.475041795463767e-06, + "layer_8_update_fnorm": 0.00028397340793162584, + "layer_8_max_l1_linf_norm": 0.0004059593193233013, + "layer_8_max_spectral_norm": 8.875299499777611e-06, + "layer_9_update_fnorm": 0.0002889119496103376, + "layer_9_max_l1_linf_norm": 0.0004080194339621812, + "layer_9_max_spectral_norm": 8.926182090363e-06, + "layer_10_update_fnorm": 0.0002926751913037151, + "layer_10_max_l1_linf_norm": 0.0003780064871534705, + "layer_10_max_spectral_norm": 8.293317478091922e-06, + "layer_11_update_fnorm": 0.0002802395902108401, + "layer_11_max_l1_linf_norm": 0.0003443386231083423, + "layer_11_max_spectral_norm": 7.663756150577683e-06, + "layer_12_update_fnorm": 0.0002994879614561796, + "layer_12_max_l1_linf_norm": 0.0003388107870705426, + "layer_12_max_spectral_norm": 7.542928869952448e-06, + "block0_q_update_fnorm": 0.0001082786257029511, + "block0_q_max_l1_linf_norm": 0.00010556662164162844, + "block0_q_max_spectral_norm": 6.0324159676383715e-06, + "block0_k_update_fnorm": 0.00010604604904074222, + "block0_k_max_l1_linf_norm": 0.00010490574641153216, + "block0_k_max_spectral_norm": 6.031150860508205e-06, + "block0_v_update_fnorm": 4.967112545273267e-05, + "block0_v_max_l1_linf_norm": 6.296216452028602e-05, + "block0_v_max_spectral_norm": 6.013971869833767e-06, + "block0_o_update_fnorm": 6.946002395125106e-05, + "block0_o_max_l1_linf_norm": 7.669934711884707e-05, + "block0_o_max_spectral_norm": 6.0168194977450185e-06, + "block0_mlp_win_update_fnorm": 5.320860873325728e-05, + "block0_mlp_win_max_l1_linf_norm": 9.650986612541601e-05, + "block0_mlp_win_max_spectral_norm": 6.013536221871618e-06, + "block0_mlp_wout_update_fnorm": 4.277117477613501e-05, + "block0_mlp_wout_max_l1_linf_norm": 7.009264663793147e-05, + "block0_mlp_wout_max_spectral_norm": 5.998047527100425e-06, + "block3_q_update_fnorm": 1.5121737305889837e-05, + "block3_q_max_l1_linf_norm": 3.325939906062558e-05, + "block3_q_max_spectral_norm": 4.668119800044224e-06, + "block3_k_update_fnorm": 2.062075145659037e-05, + "block3_k_max_l1_linf_norm": 4.267603071639314e-05, + "block3_k_max_spectral_norm": 5.995440005790442e-06, + "block3_v_update_fnorm": 4.490612627705559e-05, + "block3_v_max_l1_linf_norm": 5.979742854833603e-05, + "block3_v_max_spectral_norm": 6.014497557771392e-06, + "block3_o_update_fnorm": 6.376730016199872e-05, + "block3_o_max_l1_linf_norm": 5.4576412367168814e-05, + "block3_o_max_spectral_norm": 6.016125098540215e-06, + "block3_mlp_win_update_fnorm": 9.328132000518963e-05, + "block3_mlp_win_max_l1_linf_norm": 8.990048809209839e-05, + "block3_mlp_win_max_spectral_norm": 6.030219083186239e-06, + "block3_mlp_wout_update_fnorm": 0.00012547273945529014, + "block3_mlp_wout_max_l1_linf_norm": 0.0002025665162364021, + "block3_mlp_wout_max_spectral_norm": 6.035022579453653e-06, + "block7_q_update_fnorm": 0.00011565088061615825, + "block7_q_max_l1_linf_norm": 0.00010332887177355587, + "block7_q_max_spectral_norm": 6.045063855708577e-06, + "block7_k_update_fnorm": 0.00012258780770935118, + "block7_k_max_l1_linf_norm": 0.00010549771832302213, + "block7_k_max_spectral_norm": 6.037511411705054e-06, + "block7_v_update_fnorm": 7.256023673107848e-05, + "block7_v_max_l1_linf_norm": 9.043968748301268e-05, + "block7_v_max_spectral_norm": 6.025789389241254e-06, + "block7_o_update_fnorm": 0.000123124715173617, + "block7_o_max_l1_linf_norm": 0.00010323099559172988, + "block7_o_max_spectral_norm": 6.0448046497185715e-06, + "block7_mlp_win_update_fnorm": 0.0001328308426309377, + "block7_mlp_win_max_l1_linf_norm": 8.25900788186118e-05, + "block7_mlp_win_max_spectral_norm": 6.0423449212976266e-06, + "block7_mlp_wout_update_fnorm": 0.00011826973786810413, + "block7_mlp_wout_max_l1_linf_norm": 0.0001948333956534043, + "block7_mlp_wout_max_spectral_norm": 5.697139840776799e-06, + "block11_q_update_fnorm": 0.00012370779586490244, + "block11_q_max_l1_linf_norm": 0.00010802998440340161, + "block11_q_max_spectral_norm": 6.0256152210058644e-06, + "block11_k_update_fnorm": 0.00012451382644940168, + "block11_k_max_l1_linf_norm": 0.00010958060011034831, + "block11_k_max_spectral_norm": 6.022624802426435e-06, + "block11_v_update_fnorm": 0.00011909680324606597, + "block11_v_max_l1_linf_norm": 0.0001019324263324961, + "block11_v_max_spectral_norm": 6.036953436705517e-06, + "block11_o_update_fnorm": 0.00012381297710817307, + "block11_o_max_l1_linf_norm": 0.00010348988871555775, + "block11_o_max_spectral_norm": 6.028860752849141e-06, + "block11_mlp_win_update_fnorm": 0.00011803502275142819, + "block11_mlp_win_max_l1_linf_norm": 8.675590652273968e-05, + "block11_mlp_win_max_spectral_norm": 5.805592991237063e-06, + "block11_mlp_wout_update_fnorm": 0.00012372883793432266, + "block11_mlp_wout_max_l1_linf_norm": 0.0002269768010592088, + "block11_mlp_wout_max_spectral_norm": 6.031924385752063e-06, + "total_sharpness": 0.0709008052945137, + "block_total_sharpness": 0.11052391678094864, + "v_norm_block": 0.0008384687826037407, + "v_T_H_v_block": 7.770162113729384e-08, + "v_norm": 0.0010683877626433969, + "ip_v_neg_g_hvp": 1.225544747285312e-05, + "cos_v_neg_g_hvp": 0.02166965790092945, + "g_hvp_norm": 0.5293564796447754, + "ip_v_neg_g_t": 1.2237946066306904e-05, + "cos_v_neg_g_t": 0.007286413107067347, + "g_t_norm": 1.5720481872558594, + "g_norm": 0.5293564796447754, + "hv_norm": 0.021398257464170456, + "cos_v_hv": 0.0035399836488068104, + "hg_norm": 20848.1796875, + "cos_g_hg": 0.06822162121534348, + "v_parallel_norm": 2.943272647826234e-06, + "v_perp_norm": 0.001068384270183742, + "embed_lm_head_v_norm": 0.0006621346692554653, + "embed_lm_head_cos_v_neg_g": 0.03771234676241875, + "layer_1_v_norm": 0.00018714282487053424, + "layer_1_cos_v_neg_g": 0.013302105478942394, + "layer_2_v_norm": 0.00011354345042491332, + "layer_2_cos_v_neg_g": 0.003828673157840967, + "layer_3_v_norm": 0.0001241143181687221, + "layer_3_cos_v_neg_g": 0.01599840074777603, + "layer_4_v_norm": 0.0001775706623448059, + "layer_4_cos_v_neg_g": 0.013181786052882671, + "layer_5_v_norm": 0.0002171448722947389, + "layer_5_cos_v_neg_g": 0.018504569306969643, + "layer_6_v_norm": 0.0002510493213776499, + "layer_6_cos_v_neg_g": 0.02332083322107792, + "layer_7_v_norm": 0.0002828640572261065, + "layer_7_cos_v_neg_g": 0.02301476150751114, + "layer_8_v_norm": 0.00028397515416145325, + "layer_8_cos_v_neg_g": 0.023356324061751366, + "layer_9_v_norm": 0.000288913695840165, + "layer_9_cos_v_neg_g": 0.025933926925063133, + "layer_10_v_norm": 0.00029267690842971206, + "layer_10_cos_v_neg_g": 0.029618827626109123, + "layer_11_v_norm": 0.00028024136554449797, + "layer_11_cos_v_neg_g": 0.03842485696077347, + "layer_12_v_norm": 0.00029948962037451565, + "layer_12_cos_v_neg_g": 0.0644778311252594, + "block0_q_v_norm": 0.00010828324593603611, + "block0_q_cos_v_neg_g": 0.013164536096155643, + "block0_k_v_norm": 0.00010605077113723382, + "block0_k_cos_v_neg_g": 0.009529534727334976, + "block0_v_v_norm": 4.968119174009189e-05, + "block0_v_cos_v_neg_g": 0.026310136541724205, + "block0_o_v_norm": 6.94672271492891e-05, + "block0_o_cos_v_neg_g": 0.019792042672634125, + "block0_mlp_win_v_norm": 5.3218005632516e-05, + "block0_mlp_win_cos_v_neg_g": 0.01830437034368515, + "block0_mlp_wout_v_norm": 4.27828636020422e-05, + "block0_mlp_wout_cos_v_neg_g": 0.033850137144327164, + "block3_q_v_norm": 1.5154766515479423e-05, + "block3_q_cos_v_neg_g": 0.013629673048853874, + "block3_k_v_norm": 2.0644984033424407e-05, + "block3_k_cos_v_neg_g": 0.033622317016124725, + "block3_v_v_norm": 4.49172621301841e-05, + "block3_v_cos_v_neg_g": 0.0025764561723917723, + "block3_o_v_norm": 6.37751363683492e-05, + "block3_o_cos_v_neg_g": 0.013740417547523975, + "block3_mlp_win_v_norm": 9.328667510999367e-05, + "block3_mlp_win_cos_v_neg_g": 0.013416041620075703, + "block3_mlp_wout_v_norm": 0.0001254767266800627, + "block3_mlp_wout_cos_v_neg_g": 0.0351952388882637, + "block7_q_v_norm": 0.00011565520981093869, + "block7_q_cos_v_neg_g": 0.026856979355216026, + "block7_k_v_norm": 0.00012259188224561512, + "block7_k_cos_v_neg_g": 0.06519322842359543, + "block7_v_v_norm": 7.256712706293911e-05, + "block7_v_cos_v_neg_g": 0.019663730636239052, + "block7_o_v_norm": 0.00012312877515796572, + "block7_o_cos_v_neg_g": 0.0786457434296608, + "block7_mlp_win_v_norm": 0.00013283461157698184, + "block7_mlp_win_cos_v_neg_g": 0.02562694624066353, + "block7_mlp_wout_v_norm": 0.00011827396519947797, + "block7_mlp_wout_cos_v_neg_g": 0.10979553312063217, + "block11_q_v_norm": 0.00012371184129733592, + "block11_q_cos_v_neg_g": 0.06618954986333847, + "block11_k_v_norm": 0.0001245178427780047, + "block11_k_cos_v_neg_g": 0.09239233285188675, + "block11_v_v_norm": 0.00011910100147360936, + "block11_v_cos_v_neg_g": 0.037751998752355576, + "block11_o_v_norm": 0.00012381700798869133, + "block11_o_cos_v_neg_g": 0.08249440044164658, + "block11_mlp_win_v_norm": 0.00011803925735875964, + "block11_mlp_win_cos_v_neg_g": 0.08329349011182785, + "block11_mlp_wout_v_norm": 0.00012373288336675614, + "block11_mlp_wout_cos_v_neg_g": 0.07388201355934143, + "embed_lm_head_sharpness": 0.0054643950425088406, + "layer_1_sharpness": 0.6332511305809021, + "layer_2_sharpness": 0.2661033272743225, + "layer_3_sharpness": 0.03785308450460434, + "layer_4_sharpness": 0.03950493410229683, + "layer_5_sharpness": 0.008561300113797188, + "layer_6_sharpness": 0.004967080429196358, + "layer_7_sharpness": 0.003952634055167437, + "layer_8_sharpness": 0.0036328977439552546, + "layer_9_sharpness": 0.0019071652786806226, + "layer_10_sharpness": 0.0009569422109052539, + "layer_11_sharpness": 0.0008323352667503059, + "layer_12_sharpness": 0.0009125001379288733, + "block0_q_sharpness": 0.0002776144247036427, + "block0_k_sharpness": 0.00014550909691024572, + "block0_v_sharpness": 1.1824417114257812, + "block0_o_sharpness": 0.1341741532087326, + "block0_mlp_win_sharpness": 0.4790211617946625, + "block0_mlp_wout_sharpness": 1.2591326236724854, + "block3_q_sharpness": 0.0005671285325661302, + "block3_k_sharpness": -0.07132484763860703, + "block3_v_sharpness": 0.06313315778970718, + "block3_o_sharpness": 0.011495418846607208, + "block3_mlp_win_sharpness": 0.010453109629452229, + "block3_mlp_wout_sharpness": 0.008039042353630066, + "block7_q_sharpness": 0.0002173285756725818, + "block7_k_sharpness": 0.0001062912488123402, + "block7_v_sharpness": 0.014018232934176922, + "block7_o_sharpness": 0.00011099394760094583, + "block7_mlp_win_sharpness": 0.00163937802426517, + "block7_mlp_wout_sharpness": 0.00012880965368822217, + "block11_q_sharpness": 0.0001177577069029212, + "block11_k_sharpness": 6.490342639153823e-05, + "block11_v_sharpness": 0.0001793390401871875, + "block11_o_sharpness": 4.676827302318998e-05, + "block11_mlp_win_sharpness": 0.0007501038489863276, + "block11_mlp_wout_sharpness": 0.0014002479147166014, + "sum_layer_numerators": 2.9151714577215228e-08, + "block_diag_sharpness": 0.041465824713932224, + "cross_layer_sharpness": 0.06905809206701641 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..79fe0b232c1c05670d54e33c86fe070103d827a4 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_1500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.407736301422119, + "total_l1_linf_norm": 20593.767578125, + "total_spectral_norm": 2.407736301422119, + "embed_lm_head_update_fnorm": 1.3265166282653809, + "embed_lm_head_max_l1_linf_norm": 0.33925020694732666, + "embed_lm_head_max_spectral_norm": 0.27568715810775757, + "layer_1_update_fnorm": 0.5962146520614624, + "layer_1_max_l1_linf_norm": 0.43087249994277954, + "layer_1_max_spectral_norm": 0.01204593200236559, + "layer_2_update_fnorm": 0.5373536348342896, + "layer_2_max_l1_linf_norm": 0.4098469018936157, + "layer_2_max_spectral_norm": 0.012044835835695267, + "layer_3_update_fnorm": 0.519766628742218, + "layer_3_max_l1_linf_norm": 0.40659964084625244, + "layer_3_max_spectral_norm": 0.012050115503370762, + "layer_4_update_fnorm": 0.5447944402694702, + "layer_4_max_l1_linf_norm": 0.3918645679950714, + "layer_4_max_spectral_norm": 0.012048809789121151, + "layer_5_update_fnorm": 0.5794599056243896, + "layer_5_max_l1_linf_norm": 0.4048880338668823, + "layer_5_max_spectral_norm": 0.01204847451299429, + "layer_6_update_fnorm": 0.5847160816192627, + "layer_6_max_l1_linf_norm": 0.4097803831100464, + "layer_6_max_spectral_norm": 0.012061497196555138, + "layer_7_update_fnorm": 0.592595100402832, + "layer_7_max_l1_linf_norm": 0.41069647669792175, + "layer_7_max_spectral_norm": 0.012048025615513325, + "layer_8_update_fnorm": 0.595936119556427, + "layer_8_max_l1_linf_norm": 0.41171330213546753, + "layer_8_max_spectral_norm": 0.012045512907207012, + "layer_9_update_fnorm": 0.5986543893814087, + "layer_9_max_l1_linf_norm": 0.41213297843933105, + "layer_9_max_spectral_norm": 0.012045701965689659, + "layer_10_update_fnorm": 0.6002227663993835, + "layer_10_max_l1_linf_norm": 0.4089599847793579, + "layer_10_max_spectral_norm": 0.0120411217212677, + "layer_11_update_fnorm": 0.6004051566123962, + "layer_11_max_l1_linf_norm": 0.4041251540184021, + "layer_11_max_spectral_norm": 0.012044967152178288, + "layer_12_update_fnorm": 0.6027033925056458, + "layer_12_max_l1_linf_norm": 0.397671639919281, + "layer_12_max_spectral_norm": 0.012042907066643238, + "block0_q_update_fnorm": 0.24612075090408325, + "block0_q_max_l1_linf_norm": 0.20826908946037292, + "block0_q_max_spectral_norm": 0.0120426369830966, + "block0_k_update_fnorm": 0.2465812861919403, + "block0_k_max_l1_linf_norm": 0.2098037749528885, + "block0_k_max_spectral_norm": 0.01203991286456585, + "block0_v_update_fnorm": 0.2020758092403412, + "block0_v_max_l1_linf_norm": 0.20309188961982727, + "block0_v_max_spectral_norm": 0.012034248560667038, + "block0_o_update_fnorm": 0.234167218208313, + "block0_o_max_l1_linf_norm": 0.1976887583732605, + "block0_o_max_spectral_norm": 0.012042193673551083, + "block0_mlp_win_update_fnorm": 0.2653525769710541, + "block0_mlp_win_max_l1_linf_norm": 0.1579090803861618, + "block0_mlp_win_max_spectral_norm": 0.012042800895869732, + "block0_mlp_wout_update_fnorm": 0.2606048583984375, + "block0_mlp_wout_max_l1_linf_norm": 0.43087249994277954, + "block0_mlp_wout_max_spectral_norm": 0.01204593200236559, + "block3_q_update_fnorm": 0.20516513288021088, + "block3_q_max_l1_linf_norm": 0.2128249853849411, + "block3_q_max_spectral_norm": 0.012037728913128376, + "block3_k_update_fnorm": 0.18346858024597168, + "block3_k_max_l1_linf_norm": 0.20844349265098572, + "block3_k_max_spectral_norm": 0.012041984125971794, + "block3_v_update_fnorm": 0.19180689752101898, + "block3_v_max_l1_linf_norm": 0.20685911178588867, + "block3_v_max_spectral_norm": 0.012033670209348202, + "block3_o_update_fnorm": 0.2345457822084427, + "block3_o_max_l1_linf_norm": 0.1965840458869934, + "block3_o_max_spectral_norm": 0.01203908957540989, + "block3_mlp_win_update_fnorm": 0.26696786284446716, + "block3_mlp_win_max_l1_linf_norm": 0.17830833792686462, + "block3_mlp_win_max_spectral_norm": 0.012048809789121151, + "block3_mlp_wout_update_fnorm": 0.2404828518629074, + "block3_mlp_wout_max_l1_linf_norm": 0.3918645679950714, + "block3_mlp_wout_max_spectral_norm": 0.011410162784159184, + "block7_q_update_fnorm": 0.24269963800907135, + "block7_q_max_l1_linf_norm": 0.20949095487594604, + "block7_q_max_spectral_norm": 0.012040797621011734, + "block7_k_update_fnorm": 0.24340380728244781, + "block7_k_max_l1_linf_norm": 0.21374846994876862, + "block7_k_max_spectral_norm": 0.012039413675665855, + "block7_v_update_fnorm": 0.23427416384220123, + "block7_v_max_l1_linf_norm": 0.2105935961008072, + "block7_v_max_spectral_norm": 0.012039462104439735, + "block7_o_update_fnorm": 0.24762941896915436, + "block7_o_max_l1_linf_norm": 0.2079167366027832, + "block7_o_max_spectral_norm": 0.012045512907207012, + "block7_mlp_win_update_fnorm": 0.24303290247917175, + "block7_mlp_win_max_l1_linf_norm": 0.1398799866437912, + "block7_mlp_win_max_spectral_norm": 0.011862007901072502, + "block7_mlp_wout_update_fnorm": 0.24821920692920685, + "block7_mlp_wout_max_l1_linf_norm": 0.41171330213546753, + "block7_mlp_wout_max_spectral_norm": 0.011386648751795292, + "block11_q_update_fnorm": 0.24890843033790588, + "block11_q_max_l1_linf_norm": 0.2100881040096283, + "block11_q_max_spectral_norm": 0.012037105858325958, + "block11_k_update_fnorm": 0.24913765490055084, + "block11_k_max_l1_linf_norm": 0.21370989084243774, + "block11_k_max_spectral_norm": 0.012041972950100899, + "block11_v_update_fnorm": 0.24616098403930664, + "block11_v_max_l1_linf_norm": 0.20731519162654877, + "block11_v_max_spectral_norm": 0.012041876092553139, + "block11_o_update_fnorm": 0.24880723655223846, + "block11_o_max_l1_linf_norm": 0.2078762799501419, + "block11_o_max_spectral_norm": 0.012042907066643238, + "block11_mlp_win_update_fnorm": 0.24345962703227997, + "block11_mlp_win_max_l1_linf_norm": 0.1632135510444641, + "block11_mlp_win_max_spectral_norm": 0.011377681978046894, + "block11_mlp_wout_update_fnorm": 0.23945772647857666, + "block11_mlp_wout_max_l1_linf_norm": 0.397671639919281, + "block11_mlp_wout_max_spectral_norm": 0.01137975137680769, + "total_sharpness": 0.0077046602964401245, + "block_total_sharpness": 0.009882933460175991, + "v_norm_block": 2.0093648433685303, + "v_T_H_v_block": 0.03990280628204346, + "v_norm": 2.407736301422119, + "ip_v_neg_g_hvp": 0.0698263868689537, + "cos_v_neg_g_hvp": 0.05253567546606064, + "g_hvp_norm": 0.5520219206809998, + "ip_v_neg_g_t": 0.0701073557138443, + "cos_v_neg_g_t": 0.05954330787062645, + "g_t_norm": 0.4890144467353821, + "g_norm": 0.5520219206809998, + "hv_norm": 0.7601580023765564, + "cos_v_hv": 0.02440386265516281, + "hg_norm": 17.08331871032715, + "cos_g_hg": 0.6257944703102112, + "v_parallel_norm": 0.007702235598117113, + "v_perp_norm": 2.407724142074585, + "embed_lm_head_v_norm": 1.3265166282653809, + "embed_lm_head_cos_v_neg_g": 0.10202466696500778, + "layer_1_v_norm": 0.5962146520614624, + "layer_1_cos_v_neg_g": 0.03681096062064171, + "layer_2_v_norm": 0.5373536348342896, + "layer_2_cos_v_neg_g": 0.04218572378158569, + "layer_3_v_norm": 0.519766628742218, + "layer_3_cos_v_neg_g": 0.0434335358440876, + "layer_4_v_norm": 0.5447944402694702, + "layer_4_cos_v_neg_g": 0.04812544956803322, + "layer_5_v_norm": 0.5794599056243896, + "layer_5_cos_v_neg_g": 0.055600687861442566, + "layer_6_v_norm": 0.5847160816192627, + "layer_6_cos_v_neg_g": 0.05764828994870186, + "layer_7_v_norm": 0.592595100402832, + "layer_7_cos_v_neg_g": 0.05819930136203766, + "layer_8_v_norm": 0.595936119556427, + "layer_8_cos_v_neg_g": 0.05953795090317726, + "layer_9_v_norm": 0.5986543893814087, + "layer_9_cos_v_neg_g": 0.06151174008846283, + "layer_10_v_norm": 0.6002227663993835, + "layer_10_cos_v_neg_g": 0.06743349134922028, + "layer_11_v_norm": 0.6004051566123962, + "layer_11_cos_v_neg_g": 0.07594150304794312, + "layer_12_v_norm": 0.6027033925056458, + "layer_12_cos_v_neg_g": 0.09194545447826385, + "block0_q_v_norm": 0.24612075090408325, + "block0_q_cos_v_neg_g": 0.07632362097501755, + "block0_k_v_norm": 0.2465812861919403, + "block0_k_cos_v_neg_g": 0.07789275050163269, + "block0_v_v_norm": 0.2020758092403412, + "block0_v_cos_v_neg_g": 0.03134872391819954, + "block0_o_v_norm": 0.234167218208313, + "block0_o_cos_v_neg_g": 0.0648389607667923, + "block0_mlp_win_v_norm": 0.2653525769710541, + "block0_mlp_win_cos_v_neg_g": 0.07618004083633423, + "block0_mlp_wout_v_norm": 0.2606048583984375, + "block0_mlp_wout_cos_v_neg_g": 0.09721753746271133, + "block3_q_v_norm": 0.20516513288021088, + "block3_q_cos_v_neg_g": 0.06054970622062683, + "block3_k_v_norm": 0.18346858024597168, + "block3_k_cos_v_neg_g": 0.07214616239070892, + "block3_v_v_norm": 0.19180689752101898, + "block3_v_cos_v_neg_g": 0.04096119850873947, + "block3_o_v_norm": 0.2345457822084427, + "block3_o_cos_v_neg_g": 0.07136455178260803, + "block3_mlp_win_v_norm": 0.26696786284446716, + "block3_mlp_win_cos_v_neg_g": 0.06405819952487946, + "block3_mlp_wout_v_norm": 0.2404828518629074, + "block3_mlp_wout_cos_v_neg_g": 0.1148061454296112, + "block7_q_v_norm": 0.24269963800907135, + "block7_q_cos_v_neg_g": 0.07293789833784103, + "block7_k_v_norm": 0.24340380728244781, + "block7_k_cos_v_neg_g": 0.0978701189160347, + "block7_v_v_norm": 0.23427416384220123, + "block7_v_cos_v_neg_g": 0.043567970395088196, + "block7_o_v_norm": 0.24762941896915436, + "block7_o_cos_v_neg_g": 0.08392538875341415, + "block7_mlp_win_v_norm": 0.24303290247917175, + "block7_mlp_win_cos_v_neg_g": 0.0927039384841919, + "block7_mlp_wout_v_norm": 0.24821920692920685, + "block7_mlp_wout_cos_v_neg_g": 0.14945769309997559, + "block11_q_v_norm": 0.24890843033790588, + "block11_q_cos_v_neg_g": 0.10322920233011246, + "block11_k_v_norm": 0.24913765490055084, + "block11_k_cos_v_neg_g": 0.10656007379293442, + "block11_v_v_norm": 0.24616098403930664, + "block11_v_cos_v_neg_g": 0.07916777580976486, + "block11_o_v_norm": 0.24880723655223846, + "block11_o_cos_v_neg_g": 0.10628684610128403, + "block11_mlp_win_v_norm": 0.24345962703227997, + "block11_mlp_win_cos_v_neg_g": 0.1280120611190796, + "block11_mlp_wout_v_norm": 0.23945772647857666, + "block11_mlp_wout_cos_v_neg_g": 0.10082807391881943, + "embed_lm_head_sharpness": 0.0005587597261182964, + "layer_1_sharpness": 0.00766791170462966, + "layer_2_sharpness": 0.0011072061024606228, + "layer_3_sharpness": 0.002402602694928646, + "layer_4_sharpness": 0.002252846024930477, + "layer_5_sharpness": 0.0017059801612049341, + "layer_6_sharpness": 0.001758632599376142, + "layer_7_sharpness": 0.0018745603738352656, + "layer_8_sharpness": 0.0013935762690380216, + "layer_9_sharpness": 0.0009538530721329153, + "layer_10_sharpness": 0.0005868198350071907, + "layer_11_sharpness": 0.0006357523379847407, + "layer_12_sharpness": 0.0005867055151611567, + "block0_q_sharpness": 0.00032296235440298915, + "block0_k_sharpness": 0.00023267397773452103, + "block0_v_sharpness": 0.013767498545348644, + "block0_o_sharpness": 0.0015213626902550459, + "block0_mlp_win_sharpness": 0.002162407850846648, + "block0_mlp_wout_sharpness": 0.0014140643179416656, + "block3_q_sharpness": 0.00016017549205571413, + "block3_k_sharpness": 0.003529920242726803, + "block3_v_sharpness": 0.0032441813964396715, + "block3_o_sharpness": 0.00030681767384521663, + "block3_mlp_win_sharpness": 0.00025538713089190423, + "block3_mlp_wout_sharpness": 0.00013492380094248801, + "block7_q_sharpness": 0.00027230969863012433, + "block7_k_sharpness": 0.00032350103720091283, + "block7_v_sharpness": 0.001838608761318028, + "block7_o_sharpness": 0.00017323228530585766, + "block7_mlp_win_sharpness": 0.000533851096406579, + "block7_mlp_wout_sharpness": 0.0001808092201827094, + "block11_q_sharpness": 5.18247252330184e-05, + "block11_k_sharpness": 5.489116665557958e-05, + "block11_v_sharpness": 0.00017487099103163928, + "block11_o_sharpness": 7.105303666321561e-05, + "block11_mlp_win_sharpness": 0.0005821040831506252, + "block11_mlp_wout_sharpness": 0.0006355875520966947, + "sum_layer_numerators": 0.0076860110607349244, + "block_diag_sharpness": 0.0019036337955477777, + "cross_layer_sharpness": 0.007979299664628214 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..cff54631ff2a6648e00fd4a0274f9a4f543ab05e --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.400442123413086, + "total_l1_linf_norm": 20522.265625, + "total_spectral_norm": 2.400442361831665, + "embed_lm_head_update_fnorm": 1.319787621498108, + "embed_lm_head_max_l1_linf_norm": 0.33836787939071655, + "embed_lm_head_max_spectral_norm": 0.24672561883926392, + "layer_1_update_fnorm": 0.5882329344749451, + "layer_1_max_l1_linf_norm": 0.43702632188796997, + "layer_1_max_spectral_norm": 0.012051810510456562, + "layer_2_update_fnorm": 0.5206120610237122, + "layer_2_max_l1_linf_norm": 0.4044942259788513, + "layer_2_max_spectral_norm": 0.012042402289807796, + "layer_3_update_fnorm": 0.5095199942588806, + "layer_3_max_l1_linf_norm": 0.39358580112457275, + "layer_3_max_spectral_norm": 0.014898818917572498, + "layer_4_update_fnorm": 0.5505496859550476, + "layer_4_max_l1_linf_norm": 0.40023279190063477, + "layer_4_max_spectral_norm": 0.012045340612530708, + "layer_5_update_fnorm": 0.5834766626358032, + "layer_5_max_l1_linf_norm": 0.4108102321624756, + "layer_5_max_spectral_norm": 0.012050433084368706, + "layer_6_update_fnorm": 0.5882514715194702, + "layer_6_max_l1_linf_norm": 0.41492173075675964, + "layer_6_max_spectral_norm": 0.012065698392689228, + "layer_7_update_fnorm": 0.5966218709945679, + "layer_7_max_l1_linf_norm": 0.4104252755641937, + "layer_7_max_spectral_norm": 0.012048219330608845, + "layer_8_update_fnorm": 0.5978879928588867, + "layer_8_max_l1_linf_norm": 0.4174478054046631, + "layer_8_max_spectral_norm": 0.012047231197357178, + "layer_9_update_fnorm": 0.5981070399284363, + "layer_9_max_l1_linf_norm": 0.4163776636123657, + "layer_9_max_spectral_norm": 0.012043616734445095, + "layer_10_update_fnorm": 0.5988594889640808, + "layer_10_max_l1_linf_norm": 0.41084685921669006, + "layer_10_max_spectral_norm": 0.012043609283864498, + "layer_11_update_fnorm": 0.5999561548233032, + "layer_11_max_l1_linf_norm": 0.40676701068878174, + "layer_11_max_spectral_norm": 0.012043187394738197, + "layer_12_update_fnorm": 0.6035633087158203, + "layer_12_max_l1_linf_norm": 0.3985135555267334, + "layer_12_max_spectral_norm": 0.012044084258377552, + "block0_q_update_fnorm": 0.2447081059217453, + "block0_q_max_l1_linf_norm": 0.2078048586845398, + "block0_q_max_spectral_norm": 0.012041611596941948, + "block0_k_update_fnorm": 0.2455703318119049, + "block0_k_max_l1_linf_norm": 0.2102327048778534, + "block0_k_max_spectral_norm": 0.01204449962824583, + "block0_v_update_fnorm": 0.17254963517189026, + "block0_v_max_l1_linf_norm": 0.20158499479293823, + "block0_v_max_spectral_norm": 0.012031834572553635, + "block0_o_update_fnorm": 0.22967517375946045, + "block0_o_max_l1_linf_norm": 0.1943136751651764, + "block0_o_max_spectral_norm": 0.012040011584758759, + "block0_mlp_win_update_fnorm": 0.2712767422199249, + "block0_mlp_win_max_l1_linf_norm": 0.17143067717552185, + "block0_mlp_win_max_spectral_norm": 0.012051810510456562, + "block0_mlp_wout_update_fnorm": 0.2638680040836334, + "block0_mlp_wout_max_l1_linf_norm": 0.43702632188796997, + "block0_mlp_wout_max_spectral_norm": 0.012044099159538746, + "block3_q_update_fnorm": 0.2072942554950714, + "block3_q_max_l1_linf_norm": 0.21282023191452026, + "block3_q_max_spectral_norm": 0.012040980160236359, + "block3_k_update_fnorm": 0.19310161471366882, + "block3_k_max_l1_linf_norm": 0.21237298846244812, + "block3_k_max_spectral_norm": 0.012033060193061829, + "block3_v_update_fnorm": 0.1922234445810318, + "block3_v_max_l1_linf_norm": 0.20245039463043213, + "block3_v_max_spectral_norm": 0.012034598737955093, + "block3_o_update_fnorm": 0.2366802841424942, + "block3_o_max_l1_linf_norm": 0.19825147092342377, + "block3_o_max_spectral_norm": 0.012035314925014973, + "block3_mlp_win_update_fnorm": 0.2664608061313629, + "block3_mlp_win_max_l1_linf_norm": 0.18432208895683289, + "block3_mlp_win_max_spectral_norm": 0.012045340612530708, + "block3_mlp_wout_update_fnorm": 0.2423625886440277, + "block3_mlp_wout_max_l1_linf_norm": 0.40023279190063477, + "block3_mlp_wout_max_spectral_norm": 0.011388714425265789, + "block7_q_update_fnorm": 0.2436731606721878, + "block7_q_max_l1_linf_norm": 0.21025219559669495, + "block7_q_max_spectral_norm": 0.012046225368976593, + "block7_k_update_fnorm": 0.24569952487945557, + "block7_k_max_l1_linf_norm": 0.21912406384944916, + "block7_k_max_spectral_norm": 0.012044234201312065, + "block7_v_update_fnorm": 0.2328912764787674, + "block7_v_max_l1_linf_norm": 0.20755010843276978, + "block7_v_max_spectral_norm": 0.012041997164487839, + "block7_o_update_fnorm": 0.2479444444179535, + "block7_o_max_l1_linf_norm": 0.20945186913013458, + "block7_o_max_spectral_norm": 0.012047231197357178, + "block7_mlp_win_update_fnorm": 0.24456800520420074, + "block7_mlp_win_max_l1_linf_norm": 0.14227646589279175, + "block7_mlp_win_max_spectral_norm": 0.012015457265079021, + "block7_mlp_wout_update_fnorm": 0.24917691946029663, + "block7_mlp_wout_max_l1_linf_norm": 0.4174478054046631, + "block7_mlp_wout_max_spectral_norm": 0.011398269794881344, + "block11_q_update_fnorm": 0.24905511736869812, + "block11_q_max_l1_linf_norm": 0.20905713737010956, + "block11_q_max_spectral_norm": 0.012035793624818325, + "block11_k_update_fnorm": 0.24946172535419464, + "block11_k_max_l1_linf_norm": 0.2130998969078064, + "block11_k_max_spectral_norm": 0.012036919593811035, + "block11_v_update_fnorm": 0.24675793945789337, + "block11_v_max_l1_linf_norm": 0.20747962594032288, + "block11_v_max_spectral_norm": 0.012039604596793652, + "block11_o_update_fnorm": 0.24892234802246094, + "block11_o_max_l1_linf_norm": 0.20775112509727478, + "block11_o_max_spectral_norm": 0.012044084258377552, + "block11_mlp_win_update_fnorm": 0.24502860009670258, + "block11_mlp_win_max_l1_linf_norm": 0.16135165095329285, + "block11_mlp_win_max_spectral_norm": 0.011370733380317688, + "block11_mlp_wout_update_fnorm": 0.2387697398662567, + "block11_mlp_wout_max_l1_linf_norm": 0.3985135555267334, + "block11_mlp_wout_max_spectral_norm": 0.011390703730285168, + "total_sharpness": 0.005318650044500828, + "block_total_sharpness": 0.006664367392659187, + "v_norm_block": 2.0050644874572754, + "v_T_H_v_block": 0.02679264359176159, + "v_norm": 2.400442123413086, + "ip_v_neg_g_hvp": 0.05199957266449928, + "cos_v_neg_g_hvp": 0.03986215963959694, + "g_hvp_norm": 0.5434351563453674, + "ip_v_neg_g_t": 0.05242086946964264, + "cos_v_neg_g_t": 0.046213217079639435, + "g_t_norm": 0.4725489020347595, + "g_norm": 0.5434351563453674, + "hv_norm": 0.6650830507278442, + "cos_v_hv": 0.019196266308426857, + "hg_norm": 22.98400115966797, + "cos_g_hg": 0.5856155157089233, + "v_parallel_norm": 0.0069107781164348125, + "v_perp_norm": 2.4004321098327637, + "embed_lm_head_v_norm": 1.319787621498108, + "embed_lm_head_cos_v_neg_g": 0.08522817492485046, + "layer_1_v_norm": 0.5882329344749451, + "layer_1_cos_v_neg_g": 0.024094702675938606, + "layer_2_v_norm": 0.5206120610237122, + "layer_2_cos_v_neg_g": 0.0290500707924366, + "layer_3_v_norm": 0.5095199942588806, + "layer_3_cos_v_neg_g": 0.02756453864276409, + "layer_4_v_norm": 0.5505496859550476, + "layer_4_cos_v_neg_g": 0.03479477018117905, + "layer_5_v_norm": 0.5834766626358032, + "layer_5_cos_v_neg_g": 0.040777500718832016, + "layer_6_v_norm": 0.5882514715194702, + "layer_6_cos_v_neg_g": 0.042094636708498, + "layer_7_v_norm": 0.5966218709945679, + "layer_7_cos_v_neg_g": 0.04449956864118576, + "layer_8_v_norm": 0.5978879928588867, + "layer_8_cos_v_neg_g": 0.0449710488319397, + "layer_9_v_norm": 0.5981070399284363, + "layer_9_cos_v_neg_g": 0.0459822341799736, + "layer_10_v_norm": 0.5988594889640808, + "layer_10_cos_v_neg_g": 0.050069671124219894, + "layer_11_v_norm": 0.5999560952186584, + "layer_11_cos_v_neg_g": 0.059519361704587936, + "layer_12_v_norm": 0.6035633087158203, + "layer_12_cos_v_neg_g": 0.08564137667417526, + "block0_q_v_norm": 0.2447081059217453, + "block0_q_cos_v_neg_g": 0.04847925156354904, + "block0_k_v_norm": 0.2455703318119049, + "block0_k_cos_v_neg_g": 0.06704528629779816, + "block0_v_v_norm": 0.17254963517189026, + "block0_v_cos_v_neg_g": 0.025548961013555527, + "block0_o_v_norm": 0.22967517375946045, + "block0_o_cos_v_neg_g": 0.04467242211103439, + "block0_mlp_win_v_norm": 0.2712767422199249, + "block0_mlp_win_cos_v_neg_g": 0.053742870688438416, + "block0_mlp_wout_v_norm": 0.2638680040836334, + "block0_mlp_wout_cos_v_neg_g": 0.06936582177877426, + "block3_q_v_norm": 0.2072942554950714, + "block3_q_cos_v_neg_g": 0.043503209948539734, + "block3_k_v_norm": 0.19310161471366882, + "block3_k_cos_v_neg_g": 0.05698143318295479, + "block3_v_v_norm": 0.1922234445810318, + "block3_v_cos_v_neg_g": 0.02855825610458851, + "block3_o_v_norm": 0.2366802841424942, + "block3_o_cos_v_neg_g": 0.057685624808073044, + "block3_mlp_win_v_norm": 0.2664608061313629, + "block3_mlp_win_cos_v_neg_g": 0.04662250727415085, + "block3_mlp_wout_v_norm": 0.2423625886440277, + "block3_mlp_wout_cos_v_neg_g": 0.09522201120853424, + "block7_q_v_norm": 0.2436731606721878, + "block7_q_cos_v_neg_g": 0.055838651955127716, + "block7_k_v_norm": 0.24569952487945557, + "block7_k_cos_v_neg_g": 0.08035740256309509, + "block7_v_v_norm": 0.2328912764787674, + "block7_v_cos_v_neg_g": 0.035471051931381226, + "block7_o_v_norm": 0.2479444444179535, + "block7_o_cos_v_neg_g": 0.07529550045728683, + "block7_mlp_win_v_norm": 0.24456800520420074, + "block7_mlp_win_cos_v_neg_g": 0.06950526684522629, + "block7_mlp_wout_v_norm": 0.24917691946029663, + "block7_mlp_wout_cos_v_neg_g": 0.1282133311033249, + "block11_q_v_norm": 0.24905511736869812, + "block11_q_cos_v_neg_g": 0.09548396617174149, + "block11_k_v_norm": 0.24946172535419464, + "block11_k_cos_v_neg_g": 0.1053730770945549, + "block11_v_v_norm": 0.24675793945789337, + "block11_v_cos_v_neg_g": 0.0730893462896347, + "block11_o_v_norm": 0.24892234802246094, + "block11_o_cos_v_neg_g": 0.09981393069028854, + "block11_mlp_win_v_norm": 0.24502860009670258, + "block11_mlp_win_cos_v_neg_g": 0.11133400350809097, + "block11_mlp_wout_v_norm": 0.2387697398662567, + "block11_mlp_wout_cos_v_neg_g": 0.09470213949680328, + "embed_lm_head_sharpness": 0.000494341307785362, + "layer_1_sharpness": 0.007284047547727823, + "layer_2_sharpness": 0.0011199766304343939, + "layer_3_sharpness": 0.001117250300012529, + "layer_4_sharpness": 0.0010819790186360478, + "layer_5_sharpness": 0.0010414543794468045, + "layer_6_sharpness": 0.00111132743768394, + "layer_7_sharpness": 0.0012376562226563692, + "layer_8_sharpness": 0.0011253014672547579, + "layer_9_sharpness": 0.0009519418235868216, + "layer_10_sharpness": 0.0005481117987073958, + "layer_11_sharpness": 0.0005434847553260624, + "layer_12_sharpness": 0.00048768820124678314, + "block0_q_sharpness": 0.00027879528352059424, + "block0_k_sharpness": 0.00033551815431565046, + "block0_v_sharpness": 0.03014868125319481, + "block0_o_sharpness": 0.0009909947402775288, + "block0_mlp_win_sharpness": 0.0018087602220475674, + "block0_mlp_wout_sharpness": 0.0009262300445698202, + "block3_q_sharpness": 8.108606562018394e-05, + "block3_k_sharpness": 0.0010546750854700804, + "block3_v_sharpness": 0.002755712252110243, + "block3_o_sharpness": 0.00018027238547801971, + "block3_mlp_win_sharpness": 0.00014617307169828564, + "block3_mlp_wout_sharpness": 7.703710434725508e-05, + "block7_q_sharpness": 0.0001229480403708294, + "block7_k_sharpness": 0.00013732927618548274, + "block7_v_sharpness": 0.0021415632218122482, + "block7_o_sharpness": 0.00014161551371216774, + "block7_mlp_win_sharpness": 0.0004414122086018324, + "block7_mlp_wout_sharpness": 0.00012450924259610474, + "block11_q_sharpness": 4.071180956088938e-05, + "block11_k_sharpness": 5.276551382848993e-05, + "block11_v_sharpness": 0.00017586165631655604, + "block11_o_sharpness": 5.7093999203061685e-05, + "block11_mlp_win_sharpness": 0.0003017432172782719, + "block11_mlp_wout_sharpness": 0.0006222581723704934, + "sum_layer_numerators": 0.005934302029448837, + "block_diag_sharpness": 0.0014760904009677763, + "cross_layer_sharpness": 0.005188276991691411 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..c9e81c161b915a5128dbaed40626db3af4e8837f --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_2500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.399641275405884, + "total_l1_linf_norm": 20521.28125, + "total_spectral_norm": 2.3996410369873047, + "embed_lm_head_update_fnorm": 1.3390792608261108, + "embed_lm_head_max_l1_linf_norm": 0.35020092129707336, + "embed_lm_head_max_spectral_norm": 0.22297078371047974, + "layer_1_update_fnorm": 0.5835604667663574, + "layer_1_max_l1_linf_norm": 0.4393727481365204, + "layer_1_max_spectral_norm": 0.01205426175147295, + "layer_2_update_fnorm": 0.4882414937019348, + "layer_2_max_l1_linf_norm": 0.4158093333244324, + "layer_2_max_spectral_norm": 0.012062904424965382, + "layer_3_update_fnorm": 0.4850960671901703, + "layer_3_max_l1_linf_norm": 0.3950442373752594, + "layer_3_max_spectral_norm": 0.014399661682546139, + "layer_4_update_fnorm": 0.5514404773712158, + "layer_4_max_l1_linf_norm": 0.4012983441352844, + "layer_4_max_spectral_norm": 0.012041477486491203, + "layer_5_update_fnorm": 0.5854904651641846, + "layer_5_max_l1_linf_norm": 0.41188210248947144, + "layer_5_max_spectral_norm": 0.012047064490616322, + "layer_6_update_fnorm": 0.5895702242851257, + "layer_6_max_l1_linf_norm": 0.4167426824569702, + "layer_6_max_spectral_norm": 0.012062487192451954, + "layer_7_update_fnorm": 0.5989713072776794, + "layer_7_max_l1_linf_norm": 0.41264548897743225, + "layer_7_max_spectral_norm": 0.012060035951435566, + "layer_8_update_fnorm": 0.5990012884140015, + "layer_8_max_l1_linf_norm": 0.4143780469894409, + "layer_8_max_spectral_norm": 0.01204591616988182, + "layer_9_update_fnorm": 0.5996091961860657, + "layer_9_max_l1_linf_norm": 0.41350802779197693, + "layer_9_max_spectral_norm": 0.012042163871228695, + "layer_10_update_fnorm": 0.5989722013473511, + "layer_10_max_l1_linf_norm": 0.41691315174102783, + "layer_10_max_spectral_norm": 0.01204269751906395, + "layer_11_update_fnorm": 0.5977121591567993, + "layer_11_max_l1_linf_norm": 0.41082656383514404, + "layer_11_max_spectral_norm": 0.012046700343489647, + "layer_12_update_fnorm": 0.6027407646179199, + "layer_12_max_l1_linf_norm": 0.3963104784488678, + "layer_12_max_spectral_norm": 0.01204967312514782, + "block0_q_update_fnorm": 0.24413800239562988, + "block0_q_max_l1_linf_norm": 0.21528714895248413, + "block0_q_max_spectral_norm": 0.012041687965393066, + "block0_k_update_fnorm": 0.24314303696155548, + "block0_k_max_l1_linf_norm": 0.20856548845767975, + "block0_k_max_spectral_norm": 0.012042831629514694, + "block0_v_update_fnorm": 0.15790599584579468, + "block0_v_max_l1_linf_norm": 0.19636905193328857, + "block0_v_max_spectral_norm": 0.012027726508677006, + "block0_o_update_fnorm": 0.22562134265899658, + "block0_o_max_l1_linf_norm": 0.19401146471500397, + "block0_o_max_spectral_norm": 0.012041734531521797, + "block0_mlp_win_update_fnorm": 0.2734300494194031, + "block0_mlp_win_max_l1_linf_norm": 0.18335264921188354, + "block0_mlp_win_max_spectral_norm": 0.01205426175147295, + "block0_mlp_wout_update_fnorm": 0.2666458785533905, + "block0_mlp_wout_max_l1_linf_norm": 0.4393727481365204, + "block0_mlp_wout_max_spectral_norm": 0.012040103785693645, + "block3_q_update_fnorm": 0.20479007065296173, + "block3_q_max_l1_linf_norm": 0.2114228904247284, + "block3_q_max_spectral_norm": 0.012040375731885433, + "block3_k_update_fnorm": 0.1983814835548401, + "block3_k_max_l1_linf_norm": 0.21274469792842865, + "block3_k_max_spectral_norm": 0.012040194123983383, + "block3_v_update_fnorm": 0.18987484276294708, + "block3_v_max_l1_linf_norm": 0.20482558012008667, + "block3_v_max_spectral_norm": 0.012032224796712399, + "block3_o_update_fnorm": 0.23805907368659973, + "block3_o_max_l1_linf_norm": 0.20184484124183655, + "block3_o_max_spectral_norm": 0.012035680003464222, + "block3_mlp_win_update_fnorm": 0.2664210796356201, + "block3_mlp_win_max_l1_linf_norm": 0.1849493682384491, + "block3_mlp_win_max_spectral_norm": 0.012041477486491203, + "block3_mlp_wout_update_fnorm": 0.2427896410226822, + "block3_mlp_wout_max_l1_linf_norm": 0.4012983441352844, + "block3_mlp_wout_max_spectral_norm": 0.011371593922376633, + "block7_q_update_fnorm": 0.24384890496730804, + "block7_q_max_l1_linf_norm": 0.20920789241790771, + "block7_q_max_spectral_norm": 0.012043144553899765, + "block7_k_update_fnorm": 0.24642117321491241, + "block7_k_max_l1_linf_norm": 0.21191421151161194, + "block7_k_max_spectral_norm": 0.012037131935358047, + "block7_v_update_fnorm": 0.23085729777812958, + "block7_v_max_l1_linf_norm": 0.20838841795921326, + "block7_v_max_spectral_norm": 0.012039419263601303, + "block7_o_update_fnorm": 0.24809852242469788, + "block7_o_max_l1_linf_norm": 0.20697584748268127, + "block7_o_max_spectral_norm": 0.012044793926179409, + "block7_mlp_win_update_fnorm": 0.24776577949523926, + "block7_mlp_win_max_l1_linf_norm": 0.14412133395671844, + "block7_mlp_win_max_spectral_norm": 0.01204591616988182, + "block7_mlp_wout_update_fnorm": 0.24955087900161743, + "block7_mlp_wout_max_l1_linf_norm": 0.4143780469894409, + "block7_mlp_wout_max_spectral_norm": 0.01139769982546568, + "block11_q_update_fnorm": 0.24911966919898987, + "block11_q_max_l1_linf_norm": 0.212137371301651, + "block11_q_max_spectral_norm": 0.012040912173688412, + "block11_k_update_fnorm": 0.24986518919467926, + "block11_k_max_l1_linf_norm": 0.21022993326187134, + "block11_k_max_spectral_norm": 0.012038592249155045, + "block11_v_update_fnorm": 0.24674464762210846, + "block11_v_max_l1_linf_norm": 0.20759466290473938, + "block11_v_max_spectral_norm": 0.01204967312514782, + "block11_o_update_fnorm": 0.24923664331436157, + "block11_o_max_l1_linf_norm": 0.20925965905189514, + "block11_o_max_spectral_norm": 0.012039901688694954, + "block11_mlp_win_update_fnorm": 0.24335454404354095, + "block11_mlp_win_max_l1_linf_norm": 0.1606067717075348, + "block11_mlp_win_max_spectral_norm": 0.011369815096259117, + "block11_mlp_wout_update_fnorm": 0.23757408559322357, + "block11_mlp_wout_max_l1_linf_norm": 0.3963104784488678, + "block11_mlp_wout_max_spectral_norm": 0.011371434666216373, + "total_sharpness": 0.006486429832875729, + "block_total_sharpness": 0.008376816287636757, + "v_norm_block": 1.9912670850753784, + "v_T_H_v_block": 0.03321528434753418, + "v_norm": 2.399641275405884, + "ip_v_neg_g_hvp": 0.055916182696819305, + "cos_v_neg_g_hvp": 0.0371413379907608, + "g_hvp_norm": 0.6273843050003052, + "ip_v_neg_g_t": 0.05649811774492264, + "cos_v_neg_g_t": 0.04140738025307655, + "g_t_norm": 0.5686039924621582, + "g_norm": 0.6273843050003052, + "hv_norm": 0.7686992883682251, + "cos_v_hv": 0.02024862729012966, + "hg_norm": 28.2022762298584, + "cos_g_hg": 0.5857387781143188, + "v_parallel_norm": 0.006324058398604393, + "v_perp_norm": 2.3996329307556152, + "embed_lm_head_v_norm": 1.3390792608261108, + "embed_lm_head_cos_v_neg_g": 0.07059556990861893, + "layer_1_v_norm": 0.5835604667663574, + "layer_1_cos_v_neg_g": 0.02922378107905388, + "layer_2_v_norm": 0.4882414937019348, + "layer_2_cos_v_neg_g": 0.04247057065367699, + "layer_3_v_norm": 0.4850960671901703, + "layer_3_cos_v_neg_g": 0.03193965554237366, + "layer_4_v_norm": 0.5514404773712158, + "layer_4_cos_v_neg_g": 0.03390926122665405, + "layer_5_v_norm": 0.5854904651641846, + "layer_5_cos_v_neg_g": 0.03687337040901184, + "layer_6_v_norm": 0.5895702242851257, + "layer_6_cos_v_neg_g": 0.038670044392347336, + "layer_7_v_norm": 0.5989713072776794, + "layer_7_cos_v_neg_g": 0.03679871931672096, + "layer_8_v_norm": 0.5990012884140015, + "layer_8_cos_v_neg_g": 0.039301320910453796, + "layer_9_v_norm": 0.5996091961860657, + "layer_9_cos_v_neg_g": 0.04006475582718849, + "layer_10_v_norm": 0.5989722013473511, + "layer_10_cos_v_neg_g": 0.04597878456115723, + "layer_11_v_norm": 0.5977122187614441, + "layer_11_cos_v_neg_g": 0.054270919412374496, + "layer_12_v_norm": 0.6027407646179199, + "layer_12_cos_v_neg_g": 0.07458104193210602, + "block0_q_v_norm": 0.24413800239562988, + "block0_q_cos_v_neg_g": 0.11468157917261124, + "block0_k_v_norm": 0.24314303696155548, + "block0_k_cos_v_neg_g": 0.11585141718387604, + "block0_v_v_norm": 0.15790599584579468, + "block0_v_cos_v_neg_g": 0.03187265619635582, + "block0_o_v_norm": 0.22562134265899658, + "block0_o_cos_v_neg_g": 0.05233069881796837, + "block0_mlp_win_v_norm": 0.2734300494194031, + "block0_mlp_win_cos_v_neg_g": 0.05034332722425461, + "block0_mlp_wout_v_norm": 0.2666458785533905, + "block0_mlp_wout_cos_v_neg_g": 0.07750431448221207, + "block3_q_v_norm": 0.20479007065296173, + "block3_q_cos_v_neg_g": 0.04535618796944618, + "block3_k_v_norm": 0.1983814835548401, + "block3_k_cos_v_neg_g": 0.05953851342201233, + "block3_v_v_norm": 0.18987484276294708, + "block3_v_cos_v_neg_g": 0.03237537294626236, + "block3_o_v_norm": 0.23805907368659973, + "block3_o_cos_v_neg_g": 0.06123587116599083, + "block3_mlp_win_v_norm": 0.2664210796356201, + "block3_mlp_win_cos_v_neg_g": 0.04193531721830368, + "block3_mlp_wout_v_norm": 0.2427896410226822, + "block3_mlp_wout_cos_v_neg_g": 0.10456420481204987, + "block7_q_v_norm": 0.24384890496730804, + "block7_q_cos_v_neg_g": 0.05310361459851265, + "block7_k_v_norm": 0.24642117321491241, + "block7_k_cos_v_neg_g": 0.08333148062229156, + "block7_v_v_norm": 0.23085729777812958, + "block7_v_cos_v_neg_g": 0.029422765597701073, + "block7_o_v_norm": 0.24809852242469788, + "block7_o_cos_v_neg_g": 0.0748174861073494, + "block7_mlp_win_v_norm": 0.24776577949523926, + "block7_mlp_win_cos_v_neg_g": 0.05749182775616646, + "block7_mlp_wout_v_norm": 0.24955087900161743, + "block7_mlp_wout_cos_v_neg_g": 0.12916386127471924, + "block11_q_v_norm": 0.24911966919898987, + "block11_q_cos_v_neg_g": 0.09397727996110916, + "block11_k_v_norm": 0.24986518919467926, + "block11_k_cos_v_neg_g": 0.10483328253030777, + "block11_v_v_norm": 0.24674464762210846, + "block11_v_cos_v_neg_g": 0.06696205586194992, + "block11_o_v_norm": 0.24923664331436157, + "block11_o_cos_v_neg_g": 0.09621555358171463, + "block11_mlp_win_v_norm": 0.24335454404354095, + "block11_mlp_win_cos_v_neg_g": 0.10372461378574371, + "block11_mlp_wout_v_norm": 0.23757408559322357, + "block11_mlp_wout_cos_v_neg_g": 0.07800018787384033, + "embed_lm_head_sharpness": 0.0004200558760203421, + "layer_1_sharpness": 0.011635766364634037, + "layer_2_sharpness": 0.005905527621507645, + "layer_3_sharpness": 0.0024646588135510683, + "layer_4_sharpness": 0.0010333078680559993, + "layer_5_sharpness": 0.000988717656582594, + "layer_6_sharpness": 0.0010634965728968382, + "layer_7_sharpness": 0.0011439525987952948, + "layer_8_sharpness": 0.0010111079318448901, + "layer_9_sharpness": 0.0007541164522990584, + "layer_10_sharpness": 0.00046027550706639886, + "layer_11_sharpness": 0.0004843104106839746, + "layer_12_sharpness": 0.0005744796944782138, + "block0_q_sharpness": 0.0022067760583013296, + "block0_k_sharpness": 0.002160040196031332, + "block0_v_sharpness": 0.029206614941358566, + "block0_o_sharpness": 0.001131004304625094, + "block0_mlp_win_sharpness": 0.001060586073435843, + "block0_mlp_wout_sharpness": 0.0015979217132553458, + "block3_q_sharpness": 6.354301876854151e-05, + "block3_k_sharpness": 0.0008016353822313249, + "block3_v_sharpness": 0.0029616982210427523, + "block3_o_sharpness": 0.00016963454254437238, + "block3_mlp_win_sharpness": 0.0001163444685516879, + "block3_mlp_wout_sharpness": 7.455796003341675e-05, + "block7_q_sharpness": 0.00013567422865889966, + "block7_k_sharpness": 0.00010971896699629724, + "block7_v_sharpness": 0.0022026405204087496, + "block7_o_sharpness": 0.00010146306885872036, + "block7_mlp_win_sharpness": 0.00036097440170124173, + "block7_mlp_wout_sharpness": 9.678929927758873e-05, + "block11_q_sharpness": 4.579603046295233e-05, + "block11_k_sharpness": 5.1321130740689114e-05, + "block11_v_sharpness": 0.00015563755005132407, + "block11_o_sharpness": 4.9693357141222805e-05, + "block11_mlp_win_sharpness": 0.0004372742841951549, + "block11_mlp_wout_sharpness": 0.0007426177617162466, + "sum_layer_numerators": 0.008564216159126933, + "block_diag_sharpness": 0.002159874863141567, + "cross_layer_sharpness": 0.00621694142449519 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..c0d60a4e4791f640f4cd2994270274369f2dd750 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.392993211746216, + "total_l1_linf_norm": 20474.994140625, + "total_spectral_norm": 2.392993211746216, + "embed_lm_head_update_fnorm": 1.341498851776123, + "embed_lm_head_max_l1_linf_norm": 0.342293918132782, + "embed_lm_head_max_spectral_norm": 0.21928240358829498, + "layer_1_update_fnorm": 0.5848245620727539, + "layer_1_max_l1_linf_norm": 0.43931159377098083, + "layer_1_max_spectral_norm": 0.012045595794916153, + "layer_2_update_fnorm": 0.5014548301696777, + "layer_2_max_l1_linf_norm": 0.42120957374572754, + "layer_2_max_spectral_norm": 0.012061803601682186, + "layer_3_update_fnorm": 0.4542337656021118, + "layer_3_max_l1_linf_norm": 0.4532829523086548, + "layer_3_max_spectral_norm": 0.017663855105638504, + "layer_4_update_fnorm": 0.5497094392776489, + "layer_4_max_l1_linf_norm": 0.401565283536911, + "layer_4_max_spectral_norm": 0.012042125687003136, + "layer_5_update_fnorm": 0.5851842761039734, + "layer_5_max_l1_linf_norm": 0.4072456657886505, + "layer_5_max_spectral_norm": 0.01205055508762598, + "layer_6_update_fnorm": 0.5881226658821106, + "layer_6_max_l1_linf_norm": 0.41197818517684937, + "layer_6_max_spectral_norm": 0.012048101983964443, + "layer_7_update_fnorm": 0.5969613790512085, + "layer_7_max_l1_linf_norm": 0.4081909656524658, + "layer_7_max_spectral_norm": 0.012050925754010677, + "layer_8_update_fnorm": 0.5970616936683655, + "layer_8_max_l1_linf_norm": 0.4105786383152008, + "layer_8_max_spectral_norm": 0.0120499636977911, + "layer_9_update_fnorm": 0.5957658886909485, + "layer_9_max_l1_linf_norm": 0.4106006324291229, + "layer_9_max_spectral_norm": 0.012054791674017906, + "layer_10_update_fnorm": 0.5965310335159302, + "layer_10_max_l1_linf_norm": 0.41133373975753784, + "layer_10_max_spectral_norm": 0.012046489864587784, + "layer_11_update_fnorm": 0.5925506949424744, + "layer_11_max_l1_linf_norm": 0.4071918725967407, + "layer_11_max_spectral_norm": 0.012044616974890232, + "layer_12_update_fnorm": 0.601443350315094, + "layer_12_max_l1_linf_norm": 0.4027511179447174, + "layer_12_max_spectral_norm": 0.012043466791510582, + "block0_q_update_fnorm": 0.2433982640504837, + "block0_q_max_l1_linf_norm": 0.21189826726913452, + "block0_q_max_spectral_norm": 0.012038975022733212, + "block0_k_update_fnorm": 0.2414703667163849, + "block0_k_max_l1_linf_norm": 0.21142441034317017, + "block0_k_max_spectral_norm": 0.012041778303682804, + "block0_v_update_fnorm": 0.16385811567306519, + "block0_v_max_l1_linf_norm": 0.2121983915567398, + "block0_v_max_spectral_norm": 0.012029503472149372, + "block0_o_update_fnorm": 0.2265842854976654, + "block0_o_max_l1_linf_norm": 0.19374695420265198, + "block0_o_max_spectral_norm": 0.012043515220284462, + "block0_mlp_win_update_fnorm": 0.2737393081188202, + "block0_mlp_win_max_l1_linf_norm": 0.16810062527656555, + "block0_mlp_win_max_spectral_norm": 0.01204530056566, + "block0_mlp_wout_update_fnorm": 0.26689136028289795, + "block0_mlp_wout_max_l1_linf_norm": 0.43931159377098083, + "block0_mlp_wout_max_spectral_norm": 0.012045595794916153, + "block3_q_update_fnorm": 0.20512445271015167, + "block3_q_max_l1_linf_norm": 0.2130165994167328, + "block3_q_max_spectral_norm": 0.012042125687003136, + "block3_k_update_fnorm": 0.1984093189239502, + "block3_k_max_l1_linf_norm": 0.21292459964752197, + "block3_k_max_spectral_norm": 0.012037266977131367, + "block3_v_update_fnorm": 0.18624961376190186, + "block3_v_max_l1_linf_norm": 0.2062874734401703, + "block3_v_max_spectral_norm": 0.012031642720103264, + "block3_o_update_fnorm": 0.23745326697826385, + "block3_o_max_l1_linf_norm": 0.19798007607460022, + "block3_o_max_spectral_norm": 0.012038853019475937, + "block3_mlp_win_update_fnorm": 0.2659003436565399, + "block3_mlp_win_max_l1_linf_norm": 0.1789853572845459, + "block3_mlp_win_max_spectral_norm": 0.012042034417390823, + "block3_mlp_wout_update_fnorm": 0.24249476194381714, + "block3_mlp_wout_max_l1_linf_norm": 0.401565283536911, + "block3_mlp_wout_max_spectral_norm": 0.011403667740523815, + "block7_q_update_fnorm": 0.2422194629907608, + "block7_q_max_l1_linf_norm": 0.2089492380619049, + "block7_q_max_spectral_norm": 0.01204372476786375, + "block7_k_update_fnorm": 0.24571624398231506, + "block7_k_max_l1_linf_norm": 0.21133863925933838, + "block7_k_max_spectral_norm": 0.012040887027978897, + "block7_v_update_fnorm": 0.21871954202651978, + "block7_v_max_l1_linf_norm": 0.21267616748809814, + "block7_v_max_spectral_norm": 0.012036015279591084, + "block7_o_update_fnorm": 0.24782152473926544, + "block7_o_max_l1_linf_norm": 0.21124333143234253, + "block7_o_max_spectral_norm": 0.012040756642818451, + "block7_mlp_win_update_fnorm": 0.25900542736053467, + "block7_mlp_win_max_l1_linf_norm": 0.14472880959510803, + "block7_mlp_win_max_spectral_norm": 0.0120499636977911, + "block7_mlp_wout_update_fnorm": 0.24691392481327057, + "block7_mlp_wout_max_l1_linf_norm": 0.4105786383152008, + "block7_mlp_wout_max_spectral_norm": 0.011369328014552593, + "block11_q_update_fnorm": 0.2487843632698059, + "block11_q_max_l1_linf_norm": 0.21271637082099915, + "block11_q_max_spectral_norm": 0.012036449275910854, + "block11_k_update_fnorm": 0.24927592277526855, + "block11_k_max_l1_linf_norm": 0.21268197894096375, + "block11_k_max_spectral_norm": 0.012041185051202774, + "block11_v_update_fnorm": 0.24644559621810913, + "block11_v_max_l1_linf_norm": 0.2057889997959137, + "block11_v_max_spectral_norm": 0.012043466791510582, + "block11_o_update_fnorm": 0.2489577829837799, + "block11_o_max_l1_linf_norm": 0.2069326937198639, + "block11_o_max_spectral_norm": 0.012042879126966, + "block11_mlp_win_update_fnorm": 0.24088619649410248, + "block11_mlp_win_max_l1_linf_norm": 0.16693779826164246, + "block11_mlp_win_max_spectral_norm": 0.011406024917960167, + "block11_mlp_wout_update_fnorm": 0.23832398653030396, + "block11_mlp_wout_max_l1_linf_norm": 0.39493727684020996, + "block11_mlp_wout_max_spectral_norm": 0.011612240225076675, + "total_sharpness": 0.005902765318751335, + "block_total_sharpness": 0.007470519281923771, + "v_norm_block": 1.9816147089004517, + "v_T_H_v_block": 0.02933521196246147, + "v_norm": 2.392993211746216, + "ip_v_neg_g_hvp": 0.05217517167329788, + "cos_v_neg_g_hvp": 0.035457078367471695, + "g_hvp_norm": 0.6149212121963501, + "ip_v_neg_g_t": 0.0526515357196331, + "cos_v_neg_g_t": 0.03986699506640434, + "g_t_norm": 0.55189448595047, + "g_norm": 0.6149212121963501, + "hv_norm": 0.8299695253372192, + "cos_v_hv": 0.017019029706716537, + "hg_norm": 85.61225891113281, + "cos_g_hg": 0.2017117589712143, + "v_parallel_norm": 0.006301082670688629, + "v_perp_norm": 2.3929848670959473, + "embed_lm_head_v_norm": 1.341498851776123, + "embed_lm_head_cos_v_neg_g": 0.06443565338850021, + "layer_1_v_norm": 0.5848245620727539, + "layer_1_cos_v_neg_g": 0.025541823357343674, + "layer_2_v_norm": 0.5014548301696777, + "layer_2_cos_v_neg_g": 0.026440029963850975, + "layer_3_v_norm": 0.4542337656021118, + "layer_3_cos_v_neg_g": 0.029125995934009552, + "layer_4_v_norm": 0.5497094392776489, + "layer_4_cos_v_neg_g": 0.03262509033083916, + "layer_5_v_norm": 0.5851842761039734, + "layer_5_cos_v_neg_g": 0.034198589622974396, + "layer_6_v_norm": 0.5881226658821106, + "layer_6_cos_v_neg_g": 0.0357276126742363, + "layer_7_v_norm": 0.5969613790512085, + "layer_7_cos_v_neg_g": 0.03558157756924629, + "layer_8_v_norm": 0.5970616936683655, + "layer_8_cos_v_neg_g": 0.03704826906323433, + "layer_9_v_norm": 0.5957658886909485, + "layer_9_cos_v_neg_g": 0.036682192236185074, + "layer_10_v_norm": 0.5965310335159302, + "layer_10_cos_v_neg_g": 0.03950022533535957, + "layer_11_v_norm": 0.5925506949424744, + "layer_11_cos_v_neg_g": 0.04925798997282982, + "layer_12_v_norm": 0.601443350315094, + "layer_12_cos_v_neg_g": 0.07268719375133514, + "block0_q_v_norm": 0.2433982640504837, + "block0_q_cos_v_neg_g": 0.07804520428180695, + "block0_k_v_norm": 0.2414703667163849, + "block0_k_cos_v_neg_g": 0.07349061965942383, + "block0_v_v_norm": 0.16385811567306519, + "block0_v_cos_v_neg_g": 0.026565302163362503, + "block0_o_v_norm": 0.2265842854976654, + "block0_o_cos_v_neg_g": 0.046868909150362015, + "block0_mlp_win_v_norm": 0.2737393081188202, + "block0_mlp_win_cos_v_neg_g": 0.041283611208200455, + "block0_mlp_wout_v_norm": 0.26689136028289795, + "block0_mlp_wout_cos_v_neg_g": 0.06571284681558609, + "block3_q_v_norm": 0.20512445271015167, + "block3_q_cos_v_neg_g": 0.04093387350440025, + "block3_k_v_norm": 0.1984093189239502, + "block3_k_cos_v_neg_g": 0.05337489768862724, + "block3_v_v_norm": 0.18624961376190186, + "block3_v_cos_v_neg_g": 0.03165754675865173, + "block3_o_v_norm": 0.23745326697826385, + "block3_o_cos_v_neg_g": 0.063103087246418, + "block3_mlp_win_v_norm": 0.2659003436565399, + "block3_mlp_win_cos_v_neg_g": 0.03718968480825424, + "block3_mlp_wout_v_norm": 0.24249476194381714, + "block3_mlp_wout_cos_v_neg_g": 0.10003151744604111, + "block7_q_v_norm": 0.2422194629907608, + "block7_q_cos_v_neg_g": 0.0457954928278923, + "block7_k_v_norm": 0.24571624398231506, + "block7_k_cos_v_neg_g": 0.08303230255842209, + "block7_v_v_norm": 0.21871954202651978, + "block7_v_cos_v_neg_g": 0.03715861216187477, + "block7_o_v_norm": 0.24782152473926544, + "block7_o_cos_v_neg_g": 0.07422878593206406, + "block7_mlp_win_v_norm": 0.25900542736053467, + "block7_mlp_win_cos_v_neg_g": 0.04855634272098541, + "block7_mlp_wout_v_norm": 0.24691392481327057, + "block7_mlp_wout_cos_v_neg_g": 0.1265132874250412, + "block11_q_v_norm": 0.2487843632698059, + "block11_q_cos_v_neg_g": 0.08873474597930908, + "block11_k_v_norm": 0.24927592277526855, + "block11_k_cos_v_neg_g": 0.10057699680328369, + "block11_v_v_norm": 0.24644559621810913, + "block11_v_cos_v_neg_g": 0.056252751499414444, + "block11_o_v_norm": 0.2489577829837799, + "block11_o_cos_v_neg_g": 0.09060255438089371, + "block11_mlp_win_v_norm": 0.24088619649410248, + "block11_mlp_win_cos_v_neg_g": 0.09850027412176132, + "block11_mlp_wout_v_norm": 0.23832398653030396, + "block11_mlp_wout_cos_v_neg_g": 0.08638447523117065, + "embed_lm_head_sharpness": 0.0004951607552357018, + "layer_1_sharpness": 0.008516328409314156, + "layer_2_sharpness": 0.0021648567635565996, + "layer_3_sharpness": 0.0019195297500118613, + "layer_4_sharpness": 0.001481161336414516, + "layer_5_sharpness": 0.001050092512741685, + "layer_6_sharpness": 0.0013360325247049332, + "layer_7_sharpness": 0.0014990540221333504, + "layer_8_sharpness": 0.0014873555628582835, + "layer_9_sharpness": 0.0009006574982777238, + "layer_10_sharpness": 0.0005173129611648619, + "layer_11_sharpness": 0.0005934723885729909, + "layer_12_sharpness": 0.0009728756849654019, + "block0_q_sharpness": 0.0013205517316237092, + "block0_k_sharpness": 0.0010114628821611404, + "block0_v_sharpness": 0.038052819669246674, + "block0_o_sharpness": 0.0008095439989119768, + "block0_mlp_win_sharpness": 0.0011101535055786371, + "block0_mlp_wout_sharpness": 0.0012729504378512502, + "block3_q_sharpness": 0.0001368581724818796, + "block3_k_sharpness": 0.0014690511161461473, + "block3_v_sharpness": 0.0036745246034115553, + "block3_o_sharpness": 0.00021002755966037512, + "block3_mlp_win_sharpness": 0.00012307887664064765, + "block3_mlp_wout_sharpness": 7.392944098683074e-05, + "block7_q_sharpness": 6.747834413545206e-05, + "block7_k_sharpness": 0.00010388500231783837, + "block7_v_sharpness": 0.0037865247577428818, + "block7_o_sharpness": 0.0001255140668945387, + "block7_mlp_win_sharpness": 0.00037604468525387347, + "block7_mlp_wout_sharpness": 0.00011033465125365183, + "block11_q_sharpness": 3.709891825565137e-05, + "block11_k_sharpness": 4.723844904219732e-05, + "block11_v_sharpness": 0.0001526138948975131, + "block11_o_sharpness": 5.1074177463306114e-05, + "block11_mlp_win_sharpness": 0.00041907475679181516, + "block11_mlp_wout_sharpness": 0.0021580569446086884, + "sum_layer_numerators": 0.007250950594177841, + "block_diag_sharpness": 0.001846530610772979, + "cross_layer_sharpness": 0.005623988671150792 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..6e84bde3fed16020ec420112009339daf2b4e738 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_3500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3757870197296143, + "total_l1_linf_norm": 20293.3203125, + "total_spectral_norm": 2.375786781311035, + "embed_lm_head_update_fnorm": 1.3333020210266113, + "embed_lm_head_max_l1_linf_norm": 0.3441784381866455, + "embed_lm_head_max_spectral_norm": 0.2072986364364624, + "layer_1_update_fnorm": 0.5751833319664001, + "layer_1_max_l1_linf_norm": 0.44025111198425293, + "layer_1_max_spectral_norm": 0.012050571851432323, + "layer_2_update_fnorm": 0.476030558347702, + "layer_2_max_l1_linf_norm": 0.4369146227836609, + "layer_2_max_spectral_norm": 0.012236948125064373, + "layer_3_update_fnorm": 0.4163254201412201, + "layer_3_max_l1_linf_norm": 0.5470151901245117, + "layer_3_max_spectral_norm": 0.02118607796728611, + "layer_4_update_fnorm": 0.5500814318656921, + "layer_4_max_l1_linf_norm": 0.402384877204895, + "layer_4_max_spectral_norm": 0.01204721536487341, + "layer_5_update_fnorm": 0.5851160287857056, + "layer_5_max_l1_linf_norm": 0.4075947403907776, + "layer_5_max_spectral_norm": 0.012055597268044949, + "layer_6_update_fnorm": 0.5914856791496277, + "layer_6_max_l1_linf_norm": 0.4122101664543152, + "layer_6_max_spectral_norm": 0.012051563709974289, + "layer_7_update_fnorm": 0.5966957211494446, + "layer_7_max_l1_linf_norm": 0.407633513212204, + "layer_7_max_spectral_norm": 0.012053691782057285, + "layer_8_update_fnorm": 0.5991431474685669, + "layer_8_max_l1_linf_norm": 0.4111359119415283, + "layer_8_max_spectral_norm": 0.012056306004524231, + "layer_9_update_fnorm": 0.5989130735397339, + "layer_9_max_l1_linf_norm": 0.41158944368362427, + "layer_9_max_spectral_norm": 0.012059511616826057, + "layer_10_update_fnorm": 0.5968837141990662, + "layer_10_max_l1_linf_norm": 0.4155460596084595, + "layer_10_max_spectral_norm": 0.012045308947563171, + "layer_11_update_fnorm": 0.5909299254417419, + "layer_11_max_l1_linf_norm": 0.4070199429988861, + "layer_11_max_spectral_norm": 0.012045568786561489, + "layer_12_update_fnorm": 0.6015909314155579, + "layer_12_max_l1_linf_norm": 0.39541515707969666, + "layer_12_max_spectral_norm": 0.012047132477164268, + "block0_q_update_fnorm": 0.2359895259141922, + "block0_q_max_l1_linf_norm": 0.21491089463233948, + "block0_q_max_spectral_norm": 0.012042553164064884, + "block0_k_update_fnorm": 0.232884019613266, + "block0_k_max_l1_linf_norm": 0.21649464964866638, + "block0_k_max_spectral_norm": 0.012041008099913597, + "block0_v_update_fnorm": 0.15819460153579712, + "block0_v_max_l1_linf_norm": 0.19888556003570557, + "block0_v_max_spectral_norm": 0.012029735371470451, + "block0_o_update_fnorm": 0.22386159002780914, + "block0_o_max_l1_linf_norm": 0.1907278299331665, + "block0_o_max_spectral_norm": 0.012039621360599995, + "block0_mlp_win_update_fnorm": 0.27157947421073914, + "block0_mlp_win_max_l1_linf_norm": 0.18214932084083557, + "block0_mlp_win_max_spectral_norm": 0.012047179974615574, + "block0_mlp_wout_update_fnorm": 0.26809367537498474, + "block0_mlp_wout_max_l1_linf_norm": 0.44025111198425293, + "block0_mlp_wout_max_spectral_norm": 0.012050571851432323, + "block3_q_update_fnorm": 0.20933233201503754, + "block3_q_max_l1_linf_norm": 0.21097365021705627, + "block3_q_max_spectral_norm": 0.012040222063660622, + "block3_k_update_fnorm": 0.20307068526744843, + "block3_k_max_l1_linf_norm": 0.21417120099067688, + "block3_k_max_spectral_norm": 0.012036681175231934, + "block3_v_update_fnorm": 0.18453216552734375, + "block3_v_max_l1_linf_norm": 0.1997835785150528, + "block3_v_max_spectral_norm": 0.012032799422740936, + "block3_o_update_fnorm": 0.23175807297229767, + "block3_o_max_l1_linf_norm": 0.1952252984046936, + "block3_o_max_spectral_norm": 0.01204721536487341, + "block3_mlp_win_update_fnorm": 0.2660979628562927, + "block3_mlp_win_max_l1_linf_norm": 0.18171484768390656, + "block3_mlp_win_max_spectral_norm": 0.012041033245623112, + "block3_mlp_wout_update_fnorm": 0.24237589538097382, + "block3_mlp_wout_max_l1_linf_norm": 0.40067189931869507, + "block3_mlp_wout_max_spectral_norm": 0.011382339522242546, + "block7_q_update_fnorm": 0.24177701771259308, + "block7_q_max_l1_linf_norm": 0.20659953355789185, + "block7_q_max_spectral_norm": 0.012041807174682617, + "block7_k_update_fnorm": 0.24628117680549622, + "block7_k_max_l1_linf_norm": 0.2096821516752243, + "block7_k_max_spectral_norm": 0.012038016691803932, + "block7_v_update_fnorm": 0.21972140669822693, + "block7_v_max_l1_linf_norm": 0.20986348390579224, + "block7_v_max_spectral_norm": 0.012037497013807297, + "block7_o_update_fnorm": 0.24803847074508667, + "block7_o_max_l1_linf_norm": 0.20845752954483032, + "block7_o_max_spectral_norm": 0.012047306634485722, + "block7_mlp_win_update_fnorm": 0.2620457410812378, + "block7_mlp_win_max_l1_linf_norm": 0.14994290471076965, + "block7_mlp_win_max_spectral_norm": 0.012056306004524231, + "block7_mlp_wout_update_fnorm": 0.24751964211463928, + "block7_mlp_wout_max_l1_linf_norm": 0.4111359119415283, + "block7_mlp_wout_max_spectral_norm": 0.011386813595890999, + "block11_q_update_fnorm": 0.2484968900680542, + "block11_q_max_l1_linf_norm": 0.21338710188865662, + "block11_q_max_spectral_norm": 0.012043259106576443, + "block11_k_update_fnorm": 0.24959032237529755, + "block11_k_max_l1_linf_norm": 0.21616879105567932, + "block11_k_max_spectral_norm": 0.012038600631058216, + "block11_v_update_fnorm": 0.2461409568786621, + "block11_v_max_l1_linf_norm": 0.20560353994369507, + "block11_v_max_spectral_norm": 0.012047132477164268, + "block11_o_update_fnorm": 0.24918825924396515, + "block11_o_max_l1_linf_norm": 0.20907056331634521, + "block11_o_max_spectral_norm": 0.012038253247737885, + "block11_mlp_win_update_fnorm": 0.24271687865257263, + "block11_mlp_win_max_l1_linf_norm": 0.16491714119911194, + "block11_mlp_win_max_spectral_norm": 0.011379743926227093, + "block11_mlp_wout_update_fnorm": 0.23690496385097504, + "block11_mlp_wout_max_l1_linf_norm": 0.39541515707969666, + "block11_mlp_wout_max_spectral_norm": 0.011367164552211761, + "total_sharpness": 0.005458020605146885, + "block_total_sharpness": 0.007192349061369896, + "v_norm_block": 1.9663845300674438, + "v_T_H_v_block": 0.02781042642891407, + "v_norm": 2.3757870197296143, + "ip_v_neg_g_hvp": 0.04920301213860512, + "cos_v_neg_g_hvp": 0.03124401904642582, + "g_hvp_norm": 0.6628530621528625, + "ip_v_neg_g_t": 0.05182259902358055, + "cos_v_neg_g_t": 0.03903089463710785, + "g_t_norm": 0.558860182762146, + "g_norm": 0.6628530621528625, + "hv_norm": 0.7081872224807739, + "cos_v_hv": 0.018310263752937317, + "hg_norm": 47.969512939453125, + "cos_g_hg": 0.430677205324173, + "v_parallel_norm": 0.005287289619445801, + "v_perp_norm": 2.375781297683716, + "embed_lm_head_v_norm": 1.3333020210266113, + "embed_lm_head_cos_v_neg_g": 0.058669887483119965, + "layer_1_v_norm": 0.5751833319664001, + "layer_1_cos_v_neg_g": 0.021938594058156013, + "layer_2_v_norm": 0.476030558347702, + "layer_2_cos_v_neg_g": 0.02887960895895958, + "layer_3_v_norm": 0.4163254201412201, + "layer_3_cos_v_neg_g": 0.030717693269252777, + "layer_4_v_norm": 0.5500814318656921, + "layer_4_cos_v_neg_g": 0.030901804566383362, + "layer_5_v_norm": 0.5851160287857056, + "layer_5_cos_v_neg_g": 0.033336617052555084, + "layer_6_v_norm": 0.5914856791496277, + "layer_6_cos_v_neg_g": 0.03384988009929657, + "layer_7_v_norm": 0.5966957211494446, + "layer_7_cos_v_neg_g": 0.03314706310629845, + "layer_8_v_norm": 0.5991430878639221, + "layer_8_cos_v_neg_g": 0.034327153116464615, + "layer_9_v_norm": 0.5989130735397339, + "layer_9_cos_v_neg_g": 0.0343027263879776, + "layer_10_v_norm": 0.5968837141990662, + "layer_10_cos_v_neg_g": 0.03704210743308067, + "layer_11_v_norm": 0.5909298658370972, + "layer_11_cos_v_neg_g": 0.04664405435323715, + "layer_12_v_norm": 0.6015909314155579, + "layer_12_cos_v_neg_g": 0.07450126856565475, + "block0_q_v_norm": 0.2359895259141922, + "block0_q_cos_v_neg_g": 0.05042719468474388, + "block0_k_v_norm": 0.232884019613266, + "block0_k_cos_v_neg_g": 0.030314594507217407, + "block0_v_v_norm": 0.15819460153579712, + "block0_v_cos_v_neg_g": 0.023315822705626488, + "block0_o_v_norm": 0.22386159002780914, + "block0_o_cos_v_neg_g": 0.044998954981565475, + "block0_mlp_win_v_norm": 0.27157947421073914, + "block0_mlp_win_cos_v_neg_g": 0.03059655986726284, + "block0_mlp_wout_v_norm": 0.26809367537498474, + "block0_mlp_wout_cos_v_neg_g": 0.05676652863621712, + "block3_q_v_norm": 0.20933233201503754, + "block3_q_cos_v_neg_g": 0.03463427722454071, + "block3_k_v_norm": 0.20307068526744843, + "block3_k_cos_v_neg_g": 0.05849948897957802, + "block3_v_v_norm": 0.18453216552734375, + "block3_v_cos_v_neg_g": 0.02986137941479683, + "block3_o_v_norm": 0.23175807297229767, + "block3_o_cos_v_neg_g": 0.05190133675932884, + "block3_mlp_win_v_norm": 0.2660979628562927, + "block3_mlp_win_cos_v_neg_g": 0.03544880449771881, + "block3_mlp_wout_v_norm": 0.24237589538097382, + "block3_mlp_wout_cos_v_neg_g": 0.09907221794128418, + "block7_q_v_norm": 0.24177701771259308, + "block7_q_cos_v_neg_g": 0.044526971876621246, + "block7_k_v_norm": 0.24628117680549622, + "block7_k_cos_v_neg_g": 0.0869116485118866, + "block7_v_v_norm": 0.21972140669822693, + "block7_v_cos_v_neg_g": 0.03211429342627525, + "block7_o_v_norm": 0.24803847074508667, + "block7_o_cos_v_neg_g": 0.07430437952280045, + "block7_mlp_win_v_norm": 0.2620457410812378, + "block7_mlp_win_cos_v_neg_g": 0.04485013335943222, + "block7_mlp_wout_v_norm": 0.24751964211463928, + "block7_mlp_wout_cos_v_neg_g": 0.1210847944021225, + "block11_q_v_norm": 0.2484968900680542, + "block11_q_cos_v_neg_g": 0.08216286450624466, + "block11_k_v_norm": 0.24959032237529755, + "block11_k_cos_v_neg_g": 0.09693410247564316, + "block11_v_v_norm": 0.2461409568786621, + "block11_v_cos_v_neg_g": 0.05555446073412895, + "block11_o_v_norm": 0.24918825924396515, + "block11_o_cos_v_neg_g": 0.08598435670137405, + "block11_mlp_win_v_norm": 0.24271687865257263, + "block11_mlp_win_cos_v_neg_g": 0.10530716180801392, + "block11_mlp_wout_v_norm": 0.23690496385097504, + "block11_mlp_wout_cos_v_neg_g": 0.08114811778068542, + "embed_lm_head_sharpness": 0.0003748461022041738, + "layer_1_sharpness": 0.005083085969090462, + "layer_2_sharpness": 0.004896683618426323, + "layer_3_sharpness": 0.003089082892984152, + "layer_4_sharpness": 0.0012783037964254618, + "layer_5_sharpness": 0.0011737651657313108, + "layer_6_sharpness": 0.0014158233534544706, + "layer_7_sharpness": 0.001708411262370646, + "layer_8_sharpness": 0.0012445306638255715, + "layer_9_sharpness": 0.0008193179965019226, + "layer_10_sharpness": 0.0004485888057388365, + "layer_11_sharpness": 0.000485732191009447, + "layer_12_sharpness": 0.00037856909330002964, + "block0_q_sharpness": 0.0011027198052033782, + "block0_k_sharpness": -0.00010728417692007497, + "block0_v_sharpness": 0.021529817953705788, + "block0_o_sharpness": 0.0010255764937028289, + "block0_mlp_win_sharpness": 0.0005518571124412119, + "block0_mlp_wout_sharpness": 0.0007511975709348917, + "block3_q_sharpness": 6.962980114622042e-05, + "block3_k_sharpness": 0.0006334596546366811, + "block3_v_sharpness": 0.0037149498239159584, + "block3_o_sharpness": 0.00038183401920832694, + "block3_mlp_win_sharpness": 0.00012385292211547494, + "block3_mlp_wout_sharpness": 5.685094220098108e-05, + "block7_q_sharpness": 7.631714106537402e-05, + "block7_k_sharpness": 9.89119871519506e-05, + "block7_v_sharpness": 0.003442078363150358, + "block7_o_sharpness": 9.551377297611907e-05, + "block7_mlp_win_sharpness": 0.0003487777430564165, + "block7_mlp_wout_sharpness": 9.284134284825996e-05, + "block11_q_sharpness": 3.30349248542916e-05, + "block11_k_sharpness": 4.593430276145227e-05, + "block11_v_sharpness": 0.00018783727136906236, + "block11_o_sharpness": 4.5106327888788655e-05, + "block11_mlp_win_sharpness": 0.00026280584279447794, + "block11_mlp_wout_sharpness": 0.00043239459046162665, + "sum_layer_numerators": 0.006426043254866256, + "block_diag_sharpness": 0.0016619071136415695, + "cross_layer_sharpness": 0.005530441947728327 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..531065dc351e8df458620d3b3284f2746414253f --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.370368003845215, + "total_l1_linf_norm": 20238.18359375, + "total_spectral_norm": 2.370368242263794, + "embed_lm_head_update_fnorm": 1.3428678512573242, + "embed_lm_head_max_l1_linf_norm": 0.37358811497688293, + "embed_lm_head_max_spectral_norm": 0.20969337224960327, + "layer_1_update_fnorm": 0.5681720972061157, + "layer_1_max_l1_linf_norm": 0.4361671805381775, + "layer_1_max_spectral_norm": 0.012045104056596756, + "layer_2_update_fnorm": 0.4424891173839569, + "layer_2_max_l1_linf_norm": 0.4559541940689087, + "layer_2_max_spectral_norm": 0.01450869720429182, + "layer_3_update_fnorm": 0.41227853298187256, + "layer_3_max_l1_linf_norm": 0.560361921787262, + "layer_3_max_spectral_norm": 0.021644923835992813, + "layer_4_update_fnorm": 0.5401502847671509, + "layer_4_max_l1_linf_norm": 0.43640339374542236, + "layer_4_max_spectral_norm": 0.01203998364508152, + "layer_5_update_fnorm": 0.5851784944534302, + "layer_5_max_l1_linf_norm": 0.4073023200035095, + "layer_5_max_spectral_norm": 0.012049292214214802, + "layer_6_update_fnorm": 0.5910917520523071, + "layer_6_max_l1_linf_norm": 0.41051626205444336, + "layer_6_max_spectral_norm": 0.012050088495016098, + "layer_7_update_fnorm": 0.6005680561065674, + "layer_7_max_l1_linf_norm": 0.405080646276474, + "layer_7_max_spectral_norm": 0.012046069838106632, + "layer_8_update_fnorm": 0.5999310612678528, + "layer_8_max_l1_linf_norm": 0.41259312629699707, + "layer_8_max_spectral_norm": 0.012045716866850853, + "layer_9_update_fnorm": 0.598946750164032, + "layer_9_max_l1_linf_norm": 0.41280144453048706, + "layer_9_max_spectral_norm": 0.012052732519805431, + "layer_10_update_fnorm": 0.5981257557868958, + "layer_10_max_l1_linf_norm": 0.4134449064731598, + "layer_10_max_spectral_norm": 0.012046366930007935, + "layer_11_update_fnorm": 0.5878198146820068, + "layer_11_max_l1_linf_norm": 0.4159872829914093, + "layer_11_max_spectral_norm": 0.012043047696352005, + "layer_12_update_fnorm": 0.6004613041877747, + "layer_12_max_l1_linf_norm": 0.4148041009902954, + "layer_12_max_spectral_norm": 0.01204889640212059, + "block0_q_update_fnorm": 0.23928450047969818, + "block0_q_max_l1_linf_norm": 0.20929133892059326, + "block0_q_max_spectral_norm": 0.012041131034493446, + "block0_k_update_fnorm": 0.23371770977973938, + "block0_k_max_l1_linf_norm": 0.2113436460494995, + "block0_k_max_spectral_norm": 0.0120411841198802, + "block0_v_update_fnorm": 0.15055346488952637, + "block0_v_max_l1_linf_norm": 0.17579545080661774, + "block0_v_max_spectral_norm": 0.012027466669678688, + "block0_o_update_fnorm": 0.2202337086200714, + "block0_o_max_l1_linf_norm": 0.18839150667190552, + "block0_o_max_spectral_norm": 0.012035480700433254, + "block0_mlp_win_update_fnorm": 0.26287779211997986, + "block0_mlp_win_max_l1_linf_norm": 0.1730569303035736, + "block0_mlp_win_max_spectral_norm": 0.012045104056596756, + "block0_mlp_wout_update_fnorm": 0.2655108571052551, + "block0_mlp_wout_max_l1_linf_norm": 0.4361671805381775, + "block0_mlp_wout_max_spectral_norm": 0.012041952461004257, + "block3_q_update_fnorm": 0.202580064535141, + "block3_q_max_l1_linf_norm": 0.21197769045829773, + "block3_q_max_spectral_norm": 0.012039314955472946, + "block3_k_update_fnorm": 0.19941042363643646, + "block3_k_max_l1_linf_norm": 0.21310409903526306, + "block3_k_max_spectral_norm": 0.01203947700560093, + "block3_v_update_fnorm": 0.16963230073451996, + "block3_v_max_l1_linf_norm": 0.1857873499393463, + "block3_v_max_spectral_norm": 0.012029467150568962, + "block3_o_update_fnorm": 0.23100389540195465, + "block3_o_max_l1_linf_norm": 0.1958683729171753, + "block3_o_max_spectral_norm": 0.01203998364508152, + "block3_mlp_win_update_fnorm": 0.2646346092224121, + "block3_mlp_win_max_l1_linf_norm": 0.17293575406074524, + "block3_mlp_win_max_spectral_norm": 0.012038195505738258, + "block3_mlp_wout_update_fnorm": 0.2419542670249939, + "block3_mlp_wout_max_l1_linf_norm": 0.39813780784606934, + "block3_mlp_wout_max_spectral_norm": 0.01139582134783268, + "block7_q_update_fnorm": 0.24211382865905762, + "block7_q_max_l1_linf_norm": 0.21070438623428345, + "block7_q_max_spectral_norm": 0.01204294990748167, + "block7_k_update_fnorm": 0.24649693071842194, + "block7_k_max_l1_linf_norm": 0.21011345088481903, + "block7_k_max_spectral_norm": 0.012045716866850853, + "block7_v_update_fnorm": 0.21735377609729767, + "block7_v_max_l1_linf_norm": 0.21020230650901794, + "block7_v_max_spectral_norm": 0.012036161497235298, + "block7_o_update_fnorm": 0.24792471528053284, + "block7_o_max_l1_linf_norm": 0.20763643085956573, + "block7_o_max_spectral_norm": 0.012041795067489147, + "block7_mlp_win_update_fnorm": 0.26582884788513184, + "block7_mlp_win_max_l1_linf_norm": 0.15136045217514038, + "block7_mlp_win_max_spectral_norm": 0.012044733390212059, + "block7_mlp_wout_update_fnorm": 0.24704469740390778, + "block7_mlp_wout_max_l1_linf_norm": 0.41259312629699707, + "block7_mlp_wout_max_spectral_norm": 0.011372213251888752, + "block11_q_update_fnorm": 0.24829374253749847, + "block11_q_max_l1_linf_norm": 0.2131786048412323, + "block11_q_max_spectral_norm": 0.012038776651024818, + "block11_k_update_fnorm": 0.24920138716697693, + "block11_k_max_l1_linf_norm": 0.21491661667823792, + "block11_k_max_spectral_norm": 0.012039585039019585, + "block11_v_update_fnorm": 0.24592693150043488, + "block11_v_max_l1_linf_norm": 0.20782878994941711, + "block11_v_max_spectral_norm": 0.01204889640212059, + "block11_o_update_fnorm": 0.24856936931610107, + "block11_o_max_l1_linf_norm": 0.20712360739707947, + "block11_o_max_spectral_norm": 0.012038073502480984, + "block11_mlp_win_update_fnorm": 0.24062176048755646, + "block11_mlp_win_max_l1_linf_norm": 0.168904647231102, + "block11_mlp_win_max_spectral_norm": 0.01139648724347353, + "block11_mlp_wout_update_fnorm": 0.23763003945350647, + "block11_mlp_wout_max_l1_linf_norm": 0.39578619599342346, + "block11_mlp_wout_max_spectral_norm": 0.011361776851117611, + "total_sharpness": 0.006260917987674475, + "block_total_sharpness": 0.007959369570016861, + "v_norm_block": 1.953292727470398, + "v_T_H_v_block": 0.03036780282855034, + "v_norm": 2.370368003845215, + "ip_v_neg_g_hvp": 0.04851507395505905, + "cos_v_neg_g_hvp": 0.026915885508060455, + "g_hvp_norm": 0.7604177594184875, + "ip_v_neg_g_t": 0.053004879504442215, + "cos_v_neg_g_t": 0.028442272916436195, + "g_t_norm": 0.7862049341201782, + "g_norm": 0.7604177594184875, + "hv_norm": 1.0035287141799927, + "cos_v_hv": 0.014788494445383549, + "hg_norm": 74.53883361816406, + "cos_g_hg": 0.6335495114326477, + "v_parallel_norm": 0.0063470760360360146, + "v_perp_norm": 2.370359420776367, + "embed_lm_head_v_norm": 1.3428678512573242, + "embed_lm_head_cos_v_neg_g": 0.03935560584068298, + "layer_1_v_norm": 0.5681720972061157, + "layer_1_cos_v_neg_g": 0.019511576741933823, + "layer_2_v_norm": 0.4424891173839569, + "layer_2_cos_v_neg_g": 0.027520611882209778, + "layer_3_v_norm": 0.41227856278419495, + "layer_3_cos_v_neg_g": 0.029604576528072357, + "layer_4_v_norm": 0.5401502847671509, + "layer_4_cos_v_neg_g": 0.028333796188235283, + "layer_5_v_norm": 0.5851784944534302, + "layer_5_cos_v_neg_g": 0.030711645260453224, + "layer_6_v_norm": 0.5910917520523071, + "layer_6_cos_v_neg_g": 0.03168383613228798, + "layer_7_v_norm": 0.6005680561065674, + "layer_7_cos_v_neg_g": 0.03267880156636238, + "layer_8_v_norm": 0.5999310612678528, + "layer_8_cos_v_neg_g": 0.033335015177726746, + "layer_9_v_norm": 0.598946750164032, + "layer_9_cos_v_neg_g": 0.03331233188509941, + "layer_10_v_norm": 0.5981257557868958, + "layer_10_cos_v_neg_g": 0.03577578067779541, + "layer_11_v_norm": 0.5878198146820068, + "layer_11_cos_v_neg_g": 0.045054685324430466, + "layer_12_v_norm": 0.6004613041877747, + "layer_12_cos_v_neg_g": 0.06355912238359451, + "block0_q_v_norm": 0.23928450047969818, + "block0_q_cos_v_neg_g": 0.034435614943504333, + "block0_k_v_norm": 0.23371770977973938, + "block0_k_cos_v_neg_g": 0.020840588957071304, + "block0_v_v_norm": 0.15055346488952637, + "block0_v_cos_v_neg_g": 0.03516886383295059, + "block0_o_v_norm": 0.2202337086200714, + "block0_o_cos_v_neg_g": 0.042487192898988724, + "block0_mlp_win_v_norm": 0.26287779211997986, + "block0_mlp_win_cos_v_neg_g": 0.03679753094911575, + "block0_mlp_wout_v_norm": 0.2655108571052551, + "block0_mlp_wout_cos_v_neg_g": 0.05603201687335968, + "block3_q_v_norm": 0.202580064535141, + "block3_q_cos_v_neg_g": 0.032040759921073914, + "block3_k_v_norm": 0.19941042363643646, + "block3_k_cos_v_neg_g": 0.04708319157361984, + "block3_v_v_norm": 0.16963230073451996, + "block3_v_cos_v_neg_g": 0.03077893890440464, + "block3_o_v_norm": 0.23100389540195465, + "block3_o_cos_v_neg_g": 0.050649624317884445, + "block3_mlp_win_v_norm": 0.2646346092224121, + "block3_mlp_win_cos_v_neg_g": 0.03329186514019966, + "block3_mlp_wout_v_norm": 0.2419542670249939, + "block3_mlp_wout_cos_v_neg_g": 0.10163851082324982, + "block7_q_v_norm": 0.24211382865905762, + "block7_q_cos_v_neg_g": 0.03922577202320099, + "block7_k_v_norm": 0.24649693071842194, + "block7_k_cos_v_neg_g": 0.0784725546836853, + "block7_v_v_norm": 0.21735377609729767, + "block7_v_cos_v_neg_g": 0.029576841741800308, + "block7_o_v_norm": 0.24792471528053284, + "block7_o_cos_v_neg_g": 0.07399259507656097, + "block7_mlp_win_v_norm": 0.26582884788513184, + "block7_mlp_win_cos_v_neg_g": 0.04310735687613487, + "block7_mlp_wout_v_norm": 0.24704469740390778, + "block7_mlp_wout_cos_v_neg_g": 0.12387946248054504, + "block11_q_v_norm": 0.24829374253749847, + "block11_q_cos_v_neg_g": 0.07757275551557541, + "block11_k_v_norm": 0.24920138716697693, + "block11_k_cos_v_neg_g": 0.095035620033741, + "block11_v_v_norm": 0.24592693150043488, + "block11_v_cos_v_neg_g": 0.05120787397027016, + "block11_o_v_norm": 0.24856936931610107, + "block11_o_cos_v_neg_g": 0.0842224583029747, + "block11_mlp_win_v_norm": 0.24062176048755646, + "block11_mlp_win_cos_v_neg_g": 0.08354360610246658, + "block11_mlp_wout_v_norm": 0.23763003945350647, + "block11_mlp_wout_cos_v_neg_g": 0.07404667139053345, + "embed_lm_head_sharpness": 0.0004725710896309465, + "layer_1_sharpness": 0.013168880715966225, + "layer_2_sharpness": 0.0062010823749005795, + "layer_3_sharpness": 0.00480040954425931, + "layer_4_sharpness": 0.001701686647720635, + "layer_5_sharpness": 0.0010140707017853856, + "layer_6_sharpness": 0.0011516616214066744, + "layer_7_sharpness": 0.001456310274079442, + "layer_8_sharpness": 0.0011036810465157032, + "layer_9_sharpness": 0.0007141941459849477, + "layer_10_sharpness": 0.0004496585170272738, + "layer_11_sharpness": 0.0005925542209297419, + "layer_12_sharpness": 0.0006792531930841506, + "block0_q_sharpness": 0.00019537242769729346, + "block0_k_sharpness": 0.00018487947818357497, + "block0_v_sharpness": 0.07193222641944885, + "block0_o_sharpness": 0.0013043887447565794, + "block0_mlp_win_sharpness": 0.0022137875203043222, + "block0_mlp_wout_sharpness": 0.0012941103195771575, + "block3_q_sharpness": 7.743792230030522e-05, + "block3_k_sharpness": 0.0007834777352400124, + "block3_v_sharpness": 0.006033098790794611, + "block3_o_sharpness": 0.0003455898549873382, + "block3_mlp_win_sharpness": 0.00012922198220621794, + "block3_mlp_wout_sharpness": 5.676803630194627e-05, + "block7_q_sharpness": 7.028937397990376e-05, + "block7_k_sharpness": 8.178902498912066e-05, + "block7_v_sharpness": 0.003138989210128784, + "block7_o_sharpness": 8.884667477104813e-05, + "block7_mlp_win_sharpness": 0.00036860370892100036, + "block7_mlp_wout_sharpness": 8.453623013338074e-05, + "block11_q_sharpness": 3.630349601735361e-05, + "block11_k_sharpness": 3.998853571829386e-05, + "block11_v_sharpness": 0.0001945576659636572, + "block11_o_sharpness": 4.348091533756815e-05, + "block11_mlp_win_sharpness": 0.0003511926915962249, + "block11_mlp_wout_sharpness": 0.00121699133887887, + "sum_layer_numerators": 0.009316612409694728, + "block_diag_sharpness": 0.0024418746263977446, + "cross_layer_sharpness": 0.005517494943619116 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..19f8e34cd790e253f4bb1bc4748d05d2e4c34108 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_4500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.359746217727661, + "total_l1_linf_norm": 20135.35546875, + "total_spectral_norm": 2.359746217727661, + "embed_lm_head_update_fnorm": 1.3378708362579346, + "embed_lm_head_max_l1_linf_norm": 0.33890050649642944, + "embed_lm_head_max_spectral_norm": 0.2058279663324356, + "layer_1_update_fnorm": 0.553353488445282, + "layer_1_max_l1_linf_norm": 0.43355751037597656, + "layer_1_max_spectral_norm": 0.012046224437654018, + "layer_2_update_fnorm": 0.43526721000671387, + "layer_2_max_l1_linf_norm": 0.4068351984024048, + "layer_2_max_spectral_norm": 0.01203853264451027, + "layer_3_update_fnorm": 0.41723382472991943, + "layer_3_max_l1_linf_norm": 0.5434971451759338, + "layer_3_max_spectral_norm": 0.021304946392774582, + "layer_4_update_fnorm": 0.5242317318916321, + "layer_4_max_l1_linf_norm": 0.41787654161453247, + "layer_4_max_spectral_norm": 0.012537403032183647, + "layer_5_update_fnorm": 0.5845304727554321, + "layer_5_max_l1_linf_norm": 0.40720227360725403, + "layer_5_max_spectral_norm": 0.012049068696796894, + "layer_6_update_fnorm": 0.5919308066368103, + "layer_6_max_l1_linf_norm": 0.41086751222610474, + "layer_6_max_spectral_norm": 0.012054762803018093, + "layer_7_update_fnorm": 0.599522054195404, + "layer_7_max_l1_linf_norm": 0.40832090377807617, + "layer_7_max_spectral_norm": 0.012053015641868114, + "layer_8_update_fnorm": 0.6012479066848755, + "layer_8_max_l1_linf_norm": 0.4106050133705139, + "layer_8_max_spectral_norm": 0.012052985839545727, + "layer_9_update_fnorm": 0.5990048050880432, + "layer_9_max_l1_linf_norm": 0.4100656509399414, + "layer_9_max_spectral_norm": 0.01205031480640173, + "layer_10_update_fnorm": 0.5982604622840881, + "layer_10_max_l1_linf_norm": 0.4137236475944519, + "layer_10_max_spectral_norm": 0.012056702747941017, + "layer_11_update_fnorm": 0.5867692232131958, + "layer_11_max_l1_linf_norm": 0.4136159420013428, + "layer_11_max_spectral_norm": 0.012049124576151371, + "layer_12_update_fnorm": 0.599949300289154, + "layer_12_max_l1_linf_norm": 0.40786299109458923, + "layer_12_max_spectral_norm": 0.012043533846735954, + "block0_q_update_fnorm": 0.2295777052640915, + "block0_q_max_l1_linf_norm": 0.21426722407341003, + "block0_q_max_spectral_norm": 0.012039359658956528, + "block0_k_update_fnorm": 0.21345749497413635, + "block0_k_max_l1_linf_norm": 0.2178836315870285, + "block0_k_max_spectral_norm": 0.012035414576530457, + "block0_v_update_fnorm": 0.13948842883110046, + "block0_v_max_l1_linf_norm": 0.16909416019916534, + "block0_v_max_spectral_norm": 0.012024576775729656, + "block0_o_update_fnorm": 0.21691261231899261, + "block0_o_max_l1_linf_norm": 0.18593868613243103, + "block0_o_max_spectral_norm": 0.012037930078804493, + "block0_mlp_win_update_fnorm": 0.2662656903266907, + "block0_mlp_win_max_l1_linf_norm": 0.17808881402015686, + "block0_mlp_win_max_spectral_norm": 0.012046224437654018, + "block0_mlp_wout_update_fnorm": 0.26530081033706665, + "block0_mlp_wout_max_l1_linf_norm": 0.43355751037597656, + "block0_mlp_wout_max_spectral_norm": 0.012044436298310757, + "block3_q_update_fnorm": 0.1740516722202301, + "block3_q_max_l1_linf_norm": 0.20521725714206696, + "block3_q_max_spectral_norm": 0.012033725157380104, + "block3_k_update_fnorm": 0.18850353360176086, + "block3_k_max_l1_linf_norm": 0.21176451444625854, + "block3_k_max_spectral_norm": 0.012036202475428581, + "block3_v_update_fnorm": 0.17235784232616425, + "block3_v_max_l1_linf_norm": 0.18583238124847412, + "block3_v_max_spectral_norm": 0.01203193049877882, + "block3_o_update_fnorm": 0.22427552938461304, + "block3_o_max_l1_linf_norm": 0.18753421306610107, + "block3_o_max_spectral_norm": 0.012038473971188068, + "block3_mlp_win_update_fnorm": 0.2644651234149933, + "block3_mlp_win_max_l1_linf_norm": 0.16979214549064636, + "block3_mlp_win_max_spectral_norm": 0.01204122044146061, + "block3_mlp_wout_update_fnorm": 0.24226680397987366, + "block3_mlp_wout_max_l1_linf_norm": 0.39996540546417236, + "block3_mlp_wout_max_spectral_norm": 0.011400147341191769, + "block7_q_update_fnorm": 0.24159908294677734, + "block7_q_max_l1_linf_norm": 0.2095848023891449, + "block7_q_max_spectral_norm": 0.012044880539178848, + "block7_k_update_fnorm": 0.24692583084106445, + "block7_k_max_l1_linf_norm": 0.20914322137832642, + "block7_k_max_spectral_norm": 0.012044393457472324, + "block7_v_update_fnorm": 0.21567566692829132, + "block7_v_max_l1_linf_norm": 0.2099921554327011, + "block7_v_max_spectral_norm": 0.012034306302666664, + "block7_o_update_fnorm": 0.2481052130460739, + "block7_o_max_l1_linf_norm": 0.20743270218372345, + "block7_o_max_spectral_norm": 0.012041949667036533, + "block7_mlp_win_update_fnorm": 0.27002042531967163, + "block7_mlp_win_max_l1_linf_norm": 0.14975564181804657, + "block7_mlp_win_max_spectral_norm": 0.012052985839545727, + "block7_mlp_wout_update_fnorm": 0.24708305299282074, + "block7_mlp_wout_max_l1_linf_norm": 0.4106050133705139, + "block7_mlp_wout_max_spectral_norm": 0.011378621682524681, + "block11_q_update_fnorm": 0.24793726205825806, + "block11_q_max_l1_linf_norm": 0.21473804116249084, + "block11_q_max_spectral_norm": 0.012043466791510582, + "block11_k_update_fnorm": 0.24929824471473694, + "block11_k_max_l1_linf_norm": 0.22110790014266968, + "block11_k_max_spectral_norm": 0.012040394358336926, + "block11_v_update_fnorm": 0.24571792781352997, + "block11_v_max_l1_linf_norm": 0.20807036757469177, + "block11_v_max_spectral_norm": 0.012043533846735954, + "block11_o_update_fnorm": 0.24867920577526093, + "block11_o_max_l1_linf_norm": 0.2074451595544815, + "block11_o_max_spectral_norm": 0.012039474211633205, + "block11_mlp_win_update_fnorm": 0.24016986787319183, + "block11_mlp_win_max_l1_linf_norm": 0.16764682531356812, + "block11_mlp_win_max_spectral_norm": 0.011402500793337822, + "block11_mlp_wout_update_fnorm": 0.23716816306114197, + "block11_mlp_wout_max_l1_linf_norm": 0.39534133672714233, + "block11_mlp_wout_max_spectral_norm": 0.0113695552572608, + "total_sharpness": 0.007227829657495022, + "block_total_sharpness": 0.009657847695052624, + "v_norm_block": 1.9438374042510986, + "v_T_H_v_block": 0.03649221360683441, + "v_norm": 2.359746217727661, + "ip_v_neg_g_hvp": 0.051759518682956696, + "cos_v_neg_g_hvp": 0.028142189607024193, + "g_hvp_norm": 0.7794119119644165, + "ip_v_neg_g_t": 0.055376797914505005, + "cos_v_neg_g_t": 0.032942067831754684, + "g_t_norm": 0.7123799324035645, + "g_norm": 0.7794119119644165, + "hv_norm": 1.88020658493042, + "cos_v_hv": 0.009071260690689087, + "hg_norm": 428.4442138671875, + "cos_g_hg": 0.14762431383132935, + "v_parallel_norm": 0.005964368581771851, + "v_perp_norm": 2.35973858833313, + "embed_lm_head_v_norm": 1.3378708362579346, + "embed_lm_head_cos_v_neg_g": 0.03870938718318939, + "layer_1_v_norm": 0.553353488445282, + "layer_1_cos_v_neg_g": 0.01853746734559536, + "layer_2_v_norm": 0.43526721000671387, + "layer_2_cos_v_neg_g": 0.03470201790332794, + "layer_3_v_norm": 0.4172338545322418, + "layer_3_cos_v_neg_g": 0.03480183705687523, + "layer_4_v_norm": 0.5242317318916321, + "layer_4_cos_v_neg_g": 0.03153686225414276, + "layer_5_v_norm": 0.5845304727554321, + "layer_5_cos_v_neg_g": 0.03280586004257202, + "layer_6_v_norm": 0.5919308066368103, + "layer_6_cos_v_neg_g": 0.03253515809774399, + "layer_7_v_norm": 0.599522054195404, + "layer_7_cos_v_neg_g": 0.03229846432805061, + "layer_8_v_norm": 0.6012479662895203, + "layer_8_cos_v_neg_g": 0.03152545541524887, + "layer_9_v_norm": 0.5990048050880432, + "layer_9_cos_v_neg_g": 0.032128509134054184, + "layer_10_v_norm": 0.5982604622840881, + "layer_10_cos_v_neg_g": 0.03677242994308472, + "layer_11_v_norm": 0.5867692828178406, + "layer_11_cos_v_neg_g": 0.04683040827512741, + "layer_12_v_norm": 0.599949300289154, + "layer_12_cos_v_neg_g": 0.08094213902950287, + "block0_q_v_norm": 0.2295777052640915, + "block0_q_cos_v_neg_g": 0.030606403946876526, + "block0_k_v_norm": 0.21345749497413635, + "block0_k_cos_v_neg_g": 0.030819188803434372, + "block0_v_v_norm": 0.13948842883110046, + "block0_v_cos_v_neg_g": 0.03152943029999733, + "block0_o_v_norm": 0.21691261231899261, + "block0_o_cos_v_neg_g": 0.03805005922913551, + "block0_mlp_win_v_norm": 0.2662656903266907, + "block0_mlp_win_cos_v_neg_g": 0.029984068125486374, + "block0_mlp_wout_v_norm": 0.26530081033706665, + "block0_mlp_wout_cos_v_neg_g": 0.05717924237251282, + "block3_q_v_norm": 0.1740516722202301, + "block3_q_cos_v_neg_g": 0.03461536765098572, + "block3_k_v_norm": 0.18850353360176086, + "block3_k_cos_v_neg_g": 0.06174132972955704, + "block3_v_v_norm": 0.17235784232616425, + "block3_v_cos_v_neg_g": 0.03565603122115135, + "block3_o_v_norm": 0.22427552938461304, + "block3_o_cos_v_neg_g": 0.04053092002868652, + "block3_mlp_win_v_norm": 0.2644651234149933, + "block3_mlp_win_cos_v_neg_g": 0.03499381244182587, + "block3_mlp_wout_v_norm": 0.24226680397987366, + "block3_mlp_wout_cos_v_neg_g": 0.11193210631608963, + "block7_q_v_norm": 0.24159908294677734, + "block7_q_cos_v_neg_g": 0.03876825049519539, + "block7_k_v_norm": 0.24692583084106445, + "block7_k_cos_v_neg_g": 0.08025747537612915, + "block7_v_v_norm": 0.21567566692829132, + "block7_v_cos_v_neg_g": 0.030744519084692, + "block7_o_v_norm": 0.2481052130460739, + "block7_o_cos_v_neg_g": 0.07594212144613266, + "block7_mlp_win_v_norm": 0.27002042531967163, + "block7_mlp_win_cos_v_neg_g": 0.04042189195752144, + "block7_mlp_wout_v_norm": 0.24708305299282074, + "block7_mlp_wout_cos_v_neg_g": 0.12762920558452606, + "block11_q_v_norm": 0.24793726205825806, + "block11_q_cos_v_neg_g": 0.08108537644147873, + "block11_k_v_norm": 0.24929824471473694, + "block11_k_cos_v_neg_g": 0.1048731654882431, + "block11_v_v_norm": 0.24571792781352997, + "block11_v_cos_v_neg_g": 0.054640308022499084, + "block11_o_v_norm": 0.24867920577526093, + "block11_o_cos_v_neg_g": 0.09128274023532867, + "block11_mlp_win_v_norm": 0.24016986787319183, + "block11_mlp_win_cos_v_neg_g": 0.1068335697054863, + "block11_mlp_wout_v_norm": 0.23716816306114197, + "block11_mlp_wout_cos_v_neg_g": 0.0985039472579956, + "embed_lm_head_sharpness": 0.0003132180718239397, + "layer_1_sharpness": 0.006227413658052683, + "layer_2_sharpness": 0.00957933347672224, + "layer_3_sharpness": 0.00791874434798956, + "layer_4_sharpness": 0.001897925976663828, + "layer_5_sharpness": 0.0012341777328401804, + "layer_6_sharpness": 0.0014745084336027503, + "layer_7_sharpness": 0.0014833592576906085, + "layer_8_sharpness": 0.0014554131776094437, + "layer_9_sharpness": 0.0008794774184934795, + "layer_10_sharpness": 0.0004858323372900486, + "layer_11_sharpness": 0.0006038898136466742, + "layer_12_sharpness": 0.0009428975754417479, + "block0_q_sharpness": -0.0011211716337129474, + "block0_k_sharpness": -0.003689724253490567, + "block0_v_sharpness": 0.05043062940239906, + "block0_o_sharpness": 0.0017724777571856976, + "block0_mlp_win_sharpness": 0.0007283262093551457, + "block0_mlp_wout_sharpness": 0.001663637813180685, + "block3_q_sharpness": 0.00013357655552681535, + "block3_k_sharpness": 0.0013162618270143867, + "block3_v_sharpness": 0.005556666757911444, + "block3_o_sharpness": 0.0003087917575612664, + "block3_mlp_win_sharpness": 0.00015399875701405108, + "block3_mlp_wout_sharpness": 6.290237070061266e-05, + "block7_q_sharpness": 0.00010360290616517887, + "block7_k_sharpness": 9.057464194484055e-05, + "block7_v_sharpness": 0.0038319912273436785, + "block7_o_sharpness": 7.502573862439021e-05, + "block7_mlp_win_sharpness": 0.0004826870863325894, + "block7_mlp_wout_sharpness": 8.820076618576422e-05, + "block11_q_sharpness": 4.880015330854803e-05, + "block11_k_sharpness": 5.66490889468696e-05, + "block11_v_sharpness": 0.00020281699835322797, + "block11_o_sharpness": 4.249194171279669e-05, + "block11_mlp_win_sharpness": 0.00031403335742652416, + "block11_mlp_wout_sharpness": 0.0024664716329425573, + "sum_layer_numerators": 0.008656197835398616, + "block_diag_sharpness": 0.0022909061812535947, + "cross_layer_sharpness": 0.0073669415137990295 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..9cb45bbca9cd16a542281d40b5a8e6cd2d26b33a --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.702928066253662, + "total_l1_linf_norm": 14800.43359375, + "total_spectral_norm": 1.702928066253662, + "embed_lm_head_update_fnorm": 0.9760808348655701, + "embed_lm_head_max_l1_linf_norm": 0.25323379039764404, + "embed_lm_head_max_spectral_norm": 0.23088675737380981, + "layer_1_update_fnorm": 0.4258233904838562, + "layer_1_max_l1_linf_norm": 0.3145899772644043, + "layer_1_max_spectral_norm": 0.00860289391130209, + "layer_2_update_fnorm": 0.3900372385978699, + "layer_2_max_l1_linf_norm": 0.30605995655059814, + "layer_2_max_spectral_norm": 0.008601496927440166, + "layer_3_update_fnorm": 0.3726014494895935, + "layer_3_max_l1_linf_norm": 0.3054024577140808, + "layer_3_max_spectral_norm": 0.008601752109825611, + "layer_4_update_fnorm": 0.3718297779560089, + "layer_4_max_l1_linf_norm": 0.3120923638343811, + "layer_4_max_spectral_norm": 0.008602586574852467, + "layer_5_update_fnorm": 0.4005180299282074, + "layer_5_max_l1_linf_norm": 0.31622064113616943, + "layer_5_max_spectral_norm": 0.008602422662079334, + "layer_6_update_fnorm": 0.39086413383483887, + "layer_6_max_l1_linf_norm": 0.3173533082008362, + "layer_6_max_spectral_norm": 0.008606992661952972, + "layer_7_update_fnorm": 0.40967056155204773, + "layer_7_max_l1_linf_norm": 0.3183366060256958, + "layer_7_max_spectral_norm": 0.008604687638580799, + "layer_8_update_fnorm": 0.40315160155296326, + "layer_8_max_l1_linf_norm": 0.31957191228866577, + "layer_8_max_spectral_norm": 0.008604254573583603, + "layer_9_update_fnorm": 0.4166719615459442, + "layer_9_max_l1_linf_norm": 0.3218642473220825, + "layer_9_max_spectral_norm": 0.008603673428297043, + "layer_10_update_fnorm": 0.4151226282119751, + "layer_10_max_l1_linf_norm": 0.3205111026763916, + "layer_10_max_spectral_norm": 0.008604327216744423, + "layer_11_update_fnorm": 0.41879165172576904, + "layer_11_max_l1_linf_norm": 0.3237016201019287, + "layer_11_max_spectral_norm": 0.008603264577686787, + "layer_12_update_fnorm": 0.41449567675590515, + "layer_12_max_l1_linf_norm": 0.32558494806289673, + "layer_12_max_spectral_norm": 0.008603434078395367, + "block0_q_update_fnorm": 0.1755838394165039, + "block0_q_max_l1_linf_norm": 0.148590549826622, + "block0_q_max_spectral_norm": 0.008601329289376736, + "block0_k_update_fnorm": 0.1752701699733734, + "block0_k_max_l1_linf_norm": 0.14764028787612915, + "block0_k_max_spectral_norm": 0.008602519519627094, + "block0_v_update_fnorm": 0.1506468802690506, + "block0_v_max_l1_linf_norm": 0.1387099027633667, + "block0_v_max_spectral_norm": 0.008595154620707035, + "block0_o_update_fnorm": 0.15523850917816162, + "block0_o_max_l1_linf_norm": 0.13272622227668762, + "block0_o_max_spectral_norm": 0.008597545325756073, + "block0_mlp_win_update_fnorm": 0.1916426718235016, + "block0_mlp_win_max_l1_linf_norm": 0.09535567462444305, + "block0_mlp_win_max_spectral_norm": 0.00860289391130209, + "block0_mlp_wout_update_fnorm": 0.19034722447395325, + "block0_mlp_wout_max_l1_linf_norm": 0.3145899772644043, + "block0_mlp_wout_max_spectral_norm": 0.008601462468504906, + "block3_q_update_fnorm": 0.13714097440242767, + "block3_q_max_l1_linf_norm": 0.15257129073143005, + "block3_q_max_spectral_norm": 0.008597641251981258, + "block3_k_update_fnorm": 0.12013781815767288, + "block3_k_max_l1_linf_norm": 0.15146037936210632, + "block3_k_max_spectral_norm": 0.008597086183726788, + "block3_v_update_fnorm": 0.1284029483795166, + "block3_v_max_l1_linf_norm": 0.13847316801548004, + "block3_v_max_spectral_norm": 0.008596139028668404, + "block3_o_update_fnorm": 0.13605083525180817, + "block3_o_max_l1_linf_norm": 0.11698479950428009, + "block3_o_max_spectral_norm": 0.008595294319093227, + "block3_mlp_win_update_fnorm": 0.18326228857040405, + "block3_mlp_win_max_l1_linf_norm": 0.10741851478815079, + "block3_mlp_win_max_spectral_norm": 0.00859956257045269, + "block3_mlp_wout_update_fnorm": 0.19076810777187347, + "block3_mlp_wout_max_l1_linf_norm": 0.3120923638343811, + "block3_mlp_wout_max_spectral_norm": 0.008602586574852467, + "block7_q_update_fnorm": 0.14623641967773438, + "block7_q_max_l1_linf_norm": 0.15030990540981293, + "block7_q_max_spectral_norm": 0.008597291074693203, + "block7_k_update_fnorm": 0.1437273770570755, + "block7_k_max_l1_linf_norm": 0.15246650576591492, + "block7_k_max_spectral_norm": 0.00859934464097023, + "block7_v_update_fnorm": 0.14762264490127563, + "block7_v_max_l1_linf_norm": 0.15098629891872406, + "block7_v_max_spectral_norm": 0.00859684869647026, + "block7_o_update_fnorm": 0.15471649169921875, + "block7_o_max_l1_linf_norm": 0.13097217679023743, + "block7_o_max_spectral_norm": 0.00859921332448721, + "block7_mlp_win_update_fnorm": 0.1923646628856659, + "block7_mlp_win_max_l1_linf_norm": 0.11664540320634842, + "block7_mlp_win_max_spectral_norm": 0.008604254573583603, + "block7_mlp_wout_update_fnorm": 0.19422566890716553, + "block7_mlp_wout_max_l1_linf_norm": 0.31957191228866577, + "block7_mlp_wout_max_spectral_norm": 0.008602268062531948, + "block11_q_update_fnorm": 0.15934394299983978, + "block11_q_max_l1_linf_norm": 0.1494106501340866, + "block11_q_max_spectral_norm": 0.008599056862294674, + "block11_k_update_fnorm": 0.1605100929737091, + "block11_k_max_l1_linf_norm": 0.14616571366786957, + "block11_k_max_spectral_norm": 0.008599493652582169, + "block11_v_update_fnorm": 0.14710117876529694, + "block11_v_max_l1_linf_norm": 0.14694157242774963, + "block11_v_max_spectral_norm": 0.008595808409154415, + "block11_o_update_fnorm": 0.15445172786712646, + "block11_o_max_l1_linf_norm": 0.13155165314674377, + "block11_o_max_spectral_norm": 0.008594932034611702, + "block11_mlp_win_update_fnorm": 0.19001352787017822, + "block11_mlp_win_max_l1_linf_norm": 0.10370999574661255, + "block11_mlp_win_max_spectral_norm": 0.008598718792200089, + "block11_mlp_wout_update_fnorm": 0.19755040109157562, + "block11_mlp_wout_max_l1_linf_norm": 0.32558494806289673, + "block11_mlp_wout_max_spectral_norm": 0.008603434078395367, + "total_sharpness": 0.03213019296526909, + "block_total_sharpness": 0.04447315260767937, + "v_norm_block": 1.3954318761825562, + "v_T_H_v_block": 0.08659946173429489, + "v_norm": 1.702928066253662, + "ip_v_neg_g_hvp": 0.08932071924209595, + "cos_v_neg_g_hvp": 0.05212464556097984, + "g_hvp_norm": 1.0062659978866577, + "ip_v_neg_g_t": 0.08961234241724014, + "cos_v_neg_g_t": 0.05539514124393463, + "g_t_norm": 0.9499480724334717, + "g_norm": 1.0062659978866577, + "hv_norm": 1.459114670753479, + "cos_v_hv": 0.037499044090509415, + "hg_norm": 58.16632843017578, + "cos_g_hg": 0.7134166955947876, + "v_parallel_norm": 0.003799762111157179, + "v_perp_norm": 1.7029237747192383, + "embed_lm_head_v_norm": 0.9760808348655701, + "embed_lm_head_cos_v_neg_g": 0.10113900154829025, + "layer_1_v_norm": 0.4258233904838562, + "layer_1_cos_v_neg_g": 0.05596756562590599, + "layer_2_v_norm": 0.3900372385978699, + "layer_2_cos_v_neg_g": 0.052848830819129944, + "layer_3_v_norm": 0.3726014494895935, + "layer_3_cos_v_neg_g": 0.04879824072122574, + "layer_4_v_norm": 0.3718297779560089, + "layer_4_cos_v_neg_g": 0.0542336143553257, + "layer_5_v_norm": 0.4005180299282074, + "layer_5_cos_v_neg_g": 0.05707395449280739, + "layer_6_v_norm": 0.39086413383483887, + "layer_6_cos_v_neg_g": 0.07378534227609634, + "layer_7_v_norm": 0.40967056155204773, + "layer_7_cos_v_neg_g": 0.07646247744560242, + "layer_8_v_norm": 0.40315160155296326, + "layer_8_cos_v_neg_g": 0.07912307232618332, + "layer_9_v_norm": 0.4166719615459442, + "layer_9_cos_v_neg_g": 0.0781562402844429, + "layer_10_v_norm": 0.4151226282119751, + "layer_10_cos_v_neg_g": 0.07725407928228378, + "layer_11_v_norm": 0.41879165172576904, + "layer_11_cos_v_neg_g": 0.0737127810716629, + "layer_12_v_norm": 0.41449567675590515, + "layer_12_cos_v_neg_g": 0.06930753588676453, + "block0_q_v_norm": 0.1755838394165039, + "block0_q_cos_v_neg_g": 0.1018948182463646, + "block0_k_v_norm": 0.1752701699733734, + "block0_k_cos_v_neg_g": 0.08688527345657349, + "block0_v_v_norm": 0.1506468802690506, + "block0_v_cos_v_neg_g": 0.05030981823801994, + "block0_o_v_norm": 0.15523850917816162, + "block0_o_cos_v_neg_g": 0.07675517350435257, + "block0_mlp_win_v_norm": 0.1916426718235016, + "block0_mlp_win_cos_v_neg_g": 0.08083763718605042, + "block0_mlp_wout_v_norm": 0.19034722447395325, + "block0_mlp_wout_cos_v_neg_g": 0.07550172507762909, + "block3_q_v_norm": 0.13714097440242767, + "block3_q_cos_v_neg_g": 0.0469304658472538, + "block3_k_v_norm": 0.12013781815767288, + "block3_k_cos_v_neg_g": 0.0622088797390461, + "block3_v_v_norm": 0.1284029483795166, + "block3_v_cos_v_neg_g": 0.048897795379161835, + "block3_o_v_norm": 0.13605083525180817, + "block3_o_cos_v_neg_g": 0.05618314445018768, + "block3_mlp_win_v_norm": 0.18326228857040405, + "block3_mlp_win_cos_v_neg_g": 0.06625307351350784, + "block3_mlp_wout_v_norm": 0.19076810777187347, + "block3_mlp_wout_cos_v_neg_g": 0.07718310505151749, + "block7_q_v_norm": 0.14623641967773438, + "block7_q_cos_v_neg_g": 0.07754198461771011, + "block7_k_v_norm": 0.1437273770570755, + "block7_k_cos_v_neg_g": 0.08060555160045624, + "block7_v_v_norm": 0.14762264490127563, + "block7_v_cos_v_neg_g": 0.08190618455410004, + "block7_o_v_norm": 0.15471649169921875, + "block7_o_cos_v_neg_g": 0.07099426537752151, + "block7_mlp_win_v_norm": 0.1923646628856659, + "block7_mlp_win_cos_v_neg_g": 0.0896293967962265, + "block7_mlp_wout_v_norm": 0.19422566890716553, + "block7_mlp_wout_cos_v_neg_g": 0.09726208448410034, + "block11_q_v_norm": 0.15934394299983978, + "block11_q_cos_v_neg_g": 0.08143718540668488, + "block11_k_v_norm": 0.1605100929737091, + "block11_k_cos_v_neg_g": 0.09081672877073288, + "block11_v_v_norm": 0.14710117876529694, + "block11_v_cos_v_neg_g": 0.08349943906068802, + "block11_o_v_norm": 0.15445172786712646, + "block11_o_cos_v_neg_g": 0.08979104459285736, + "block11_mlp_win_v_norm": 0.19001352787017822, + "block11_mlp_win_cos_v_neg_g": 0.08675000816583633, + "block11_mlp_wout_v_norm": 0.19755040109157562, + "block11_mlp_wout_cos_v_neg_g": 0.07806365191936493, + "embed_lm_head_sharpness": 0.0008268538513220847, + "layer_1_sharpness": 0.030658844858407974, + "layer_2_sharpness": 0.010856236331164837, + "layer_3_sharpness": 0.011919781565666199, + "layer_4_sharpness": 0.010385395959019661, + "layer_5_sharpness": 0.006515813060104847, + "layer_6_sharpness": 0.006446355488151312, + "layer_7_sharpness": 0.004137528128921986, + "layer_8_sharpness": 0.0027815490029752254, + "layer_9_sharpness": 0.0018787389853969216, + "layer_10_sharpness": 0.001340703573077917, + "layer_11_sharpness": 0.0010710470378398895, + "layer_12_sharpness": 0.0010288581252098083, + "block0_q_sharpness": 0.0012627479154616594, + "block0_k_sharpness": 0.0018855180824175477, + "block0_v_sharpness": 0.012978559359908104, + "block0_o_sharpness": 0.009082406759262085, + "block0_mlp_win_sharpness": 0.005648647900670767, + "block0_mlp_wout_sharpness": 0.017697440460324287, + "block3_q_sharpness": 0.00025110249407589436, + "block3_k_sharpness": 0.009015052579343319, + "block3_v_sharpness": 0.0057646529749035835, + "block3_o_sharpness": 0.007165310438722372, + "block3_mlp_win_sharpness": 0.0010664929868653417, + "block3_mlp_wout_sharpness": 0.0026461367961019278, + "block7_q_sharpness": 0.0002692680573090911, + "block7_k_sharpness": 0.0004630429029930383, + "block7_v_sharpness": 0.0019726010505110025, + "block7_o_sharpness": 0.0008196509443223476, + "block7_mlp_win_sharpness": 0.0004018971521873027, + "block7_mlp_wout_sharpness": 0.001724379719235003, + "block11_q_sharpness": 5.9313508245395496e-05, + "block11_k_sharpness": 8.944303408497944e-05, + "block11_v_sharpness": 0.00045096632675267756, + "block11_o_sharpness": 0.0001623653806746006, + "block11_mlp_win_sharpness": 0.00021533989638555795, + "block11_mlp_wout_sharpness": 0.001080015324987471, + "sum_layer_numerators": 0.01439987559410784, + "block_diag_sharpness": 0.0073950558993109575, + "cross_layer_sharpness": 0.03707809670836841 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..afc7573d62e174051b48a29f51343e8cf53b913e --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.362642765045166, + "total_l1_linf_norm": 20146.5078125, + "total_spectral_norm": 2.362642765045166, + "embed_lm_head_update_fnorm": 1.345449447631836, + "embed_lm_head_max_l1_linf_norm": 0.3507089912891388, + "embed_lm_head_max_spectral_norm": 0.2019154280424118, + "layer_1_update_fnorm": 0.5612003803253174, + "layer_1_max_l1_linf_norm": 0.4334060549736023, + "layer_1_max_spectral_norm": 0.012043111957609653, + "layer_2_update_fnorm": 0.42835646867752075, + "layer_2_max_l1_linf_norm": 0.38180065155029297, + "layer_2_max_spectral_norm": 0.013862003572285175, + "layer_3_update_fnorm": 0.41050609946250916, + "layer_3_max_l1_linf_norm": 0.4470493197441101, + "layer_3_max_spectral_norm": 0.01836622878909111, + "layer_4_update_fnorm": 0.5306016206741333, + "layer_4_max_l1_linf_norm": 0.39770129323005676, + "layer_4_max_spectral_norm": 0.013394785113632679, + "layer_5_update_fnorm": 0.58489990234375, + "layer_5_max_l1_linf_norm": 0.4075278043746948, + "layer_5_max_spectral_norm": 0.012044978328049183, + "layer_6_update_fnorm": 0.5907135605812073, + "layer_6_max_l1_linf_norm": 0.4059486985206604, + "layer_6_max_spectral_norm": 0.012048784643411636, + "layer_7_update_fnorm": 0.5991700291633606, + "layer_7_max_l1_linf_norm": 0.40490397810935974, + "layer_7_max_spectral_norm": 0.012056469917297363, + "layer_8_update_fnorm": 0.6003873944282532, + "layer_8_max_l1_linf_norm": 0.40864843130111694, + "layer_8_max_spectral_norm": 0.01205722987651825, + "layer_9_update_fnorm": 0.5972142815589905, + "layer_9_max_l1_linf_norm": 0.40642592310905457, + "layer_9_max_spectral_norm": 0.01204830314964056, + "layer_10_update_fnorm": 0.5969678163528442, + "layer_10_max_l1_linf_norm": 0.41144615411758423, + "layer_10_max_spectral_norm": 0.012059652246534824, + "layer_11_update_fnorm": 0.5834841132164001, + "layer_11_max_l1_linf_norm": 0.4259835481643677, + "layer_11_max_spectral_norm": 0.0120443906635046, + "layer_12_update_fnorm": 0.5994566082954407, + "layer_12_max_l1_linf_norm": 0.424034059047699, + "layer_12_max_spectral_norm": 0.01204316969960928, + "block0_q_update_fnorm": 0.24313952028751373, + "block0_q_max_l1_linf_norm": 0.21010270714759827, + "block0_q_max_spectral_norm": 0.012043111957609653, + "block0_k_update_fnorm": 0.23209111392498016, + "block0_k_max_l1_linf_norm": 0.20386159420013428, + "block0_k_max_spectral_norm": 0.012039236724376678, + "block0_v_update_fnorm": 0.13412247598171234, + "block0_v_max_l1_linf_norm": 0.14642532169818878, + "block0_v_max_spectral_norm": 0.012024890631437302, + "block0_o_update_fnorm": 0.21312707662582397, + "block0_o_max_l1_linf_norm": 0.18139749765396118, + "block0_o_max_spectral_norm": 0.012037231586873531, + "block0_mlp_win_update_fnorm": 0.26134634017944336, + "block0_mlp_win_max_l1_linf_norm": 0.18212422728538513, + "block0_mlp_win_max_spectral_norm": 0.012041839770972729, + "block0_mlp_wout_update_fnorm": 0.2647785246372223, + "block0_mlp_wout_max_l1_linf_norm": 0.4334060549736023, + "block0_mlp_wout_max_spectral_norm": 0.01204212848097086, + "block3_q_update_fnorm": 0.1870884895324707, + "block3_q_max_l1_linf_norm": 0.21030963957309723, + "block3_q_max_spectral_norm": 0.012037147767841816, + "block3_k_update_fnorm": 0.19662116467952728, + "block3_k_max_l1_linf_norm": 0.21369290351867676, + "block3_k_max_spectral_norm": 0.012037438340485096, + "block3_v_update_fnorm": 0.17212967574596405, + "block3_v_max_l1_linf_norm": 0.1871700882911682, + "block3_v_max_spectral_norm": 0.012028892524540424, + "block3_o_update_fnorm": 0.22341127693653107, + "block3_o_max_l1_linf_norm": 0.18760573863983154, + "block3_o_max_spectral_norm": 0.012040836736559868, + "block3_mlp_win_update_fnorm": 0.264031320810318, + "block3_mlp_win_max_l1_linf_norm": 0.16829028725624084, + "block3_mlp_win_max_spectral_norm": 0.012036927975714207, + "block3_mlp_wout_update_fnorm": 0.24143585562705994, + "block3_mlp_wout_max_l1_linf_norm": 0.39770129323005676, + "block3_mlp_wout_max_spectral_norm": 0.011400136165320873, + "block7_q_update_fnorm": 0.24234376847743988, + "block7_q_max_l1_linf_norm": 0.21100065112113953, + "block7_q_max_spectral_norm": 0.012042778544127941, + "block7_k_update_fnorm": 0.2464597225189209, + "block7_k_max_l1_linf_norm": 0.20848551392555237, + "block7_k_max_spectral_norm": 0.012040708214044571, + "block7_v_update_fnorm": 0.2111920714378357, + "block7_v_max_l1_linf_norm": 0.20931598544120789, + "block7_v_max_spectral_norm": 0.012036253698170185, + "block7_o_update_fnorm": 0.24803130328655243, + "block7_o_max_l1_linf_norm": 0.20794671773910522, + "block7_o_max_spectral_norm": 0.012044369243085384, + "block7_mlp_win_update_fnorm": 0.27263155579566956, + "block7_mlp_win_max_l1_linf_norm": 0.15518292784690857, + "block7_mlp_win_max_spectral_norm": 0.01205722987651825, + "block7_mlp_wout_update_fnorm": 0.2458055466413498, + "block7_mlp_wout_max_l1_linf_norm": 0.40864843130111694, + "block7_mlp_wout_max_spectral_norm": 0.011359020136296749, + "block11_q_update_fnorm": 0.24774904549121857, + "block11_q_max_l1_linf_norm": 0.21402564644813538, + "block11_q_max_spectral_norm": 0.01204316969960928, + "block11_k_update_fnorm": 0.24946999549865723, + "block11_k_max_l1_linf_norm": 0.21422690153121948, + "block11_k_max_spectral_norm": 0.012038386426866055, + "block11_v_update_fnorm": 0.2454708069562912, + "block11_v_max_l1_linf_norm": 0.20628246665000916, + "block11_v_max_spectral_norm": 0.012042280286550522, + "block11_o_update_fnorm": 0.2486734837293625, + "block11_o_max_l1_linf_norm": 0.20915189385414124, + "block11_o_max_spectral_norm": 0.012038676999509335, + "block11_mlp_win_update_fnorm": 0.23675966262817383, + "block11_mlp_win_max_l1_linf_norm": 0.16818776726722717, + "block11_mlp_win_max_spectral_norm": 0.011370484717190266, + "block11_mlp_wout_update_fnorm": 0.23957917094230652, + "block11_mlp_wout_max_l1_linf_norm": 0.40468230843544006, + "block11_mlp_wout_max_spectral_norm": 0.011772257275879383, + "total_sharpness": 0.020752375945448875, + "block_total_sharpness": 0.016898969188332558, + "v_norm_block": 1.9421241283416748, + "v_T_H_v_block": 0.0637403130531311, + "v_norm": 2.362642765045166, + "ip_v_neg_g_hvp": 0.045895516872406006, + "cos_v_neg_g_hvp": 0.026643890887498856, + "g_hvp_norm": 0.7290789484977722, + "ip_v_neg_g_t": 0.05214276164770126, + "cos_v_neg_g_t": 0.03040607087314129, + "g_t_norm": 0.7258312702178955, + "g_norm": 0.7290789484977722, + "hv_norm": 14.148371696472168, + "cos_v_hv": 0.0034654487390071154, + "hg_norm": 10344.515625, + "cos_g_hg": 0.16395653784275055, + "v_parallel_norm": 0.006283516995608807, + "v_perp_norm": 2.3626344203948975, + "embed_lm_head_v_norm": 1.345449447631836, + "embed_lm_head_cos_v_neg_g": 0.03877795860171318, + "layer_1_v_norm": 0.5612003803253174, + "layer_1_cos_v_neg_g": 0.01525846216827631, + "layer_2_v_norm": 0.42835646867752075, + "layer_2_cos_v_neg_g": 0.027996821328997612, + "layer_3_v_norm": 0.41050606966018677, + "layer_3_cos_v_neg_g": 0.02934521809220314, + "layer_4_v_norm": 0.5306016206741333, + "layer_4_cos_v_neg_g": 0.026560241356492043, + "layer_5_v_norm": 0.58489990234375, + "layer_5_cos_v_neg_g": 0.028530143201351166, + "layer_6_v_norm": 0.5907135605812073, + "layer_6_cos_v_neg_g": 0.02739729918539524, + "layer_7_v_norm": 0.5991700291633606, + "layer_7_cos_v_neg_g": 0.027251379564404488, + "layer_8_v_norm": 0.6003873944282532, + "layer_8_cos_v_neg_g": 0.026740379631519318, + "layer_9_v_norm": 0.5972142815589905, + "layer_9_cos_v_neg_g": 0.028871415182948112, + "layer_10_v_norm": 0.5969678163528442, + "layer_10_cos_v_neg_g": 0.032559674233198166, + "layer_11_v_norm": 0.5834841132164001, + "layer_11_cos_v_neg_g": 0.04240339621901512, + "layer_12_v_norm": 0.5994566082954407, + "layer_12_cos_v_neg_g": 0.06628049910068512, + "block0_q_v_norm": 0.24313952028751373, + "block0_q_cos_v_neg_g": -0.0001933783059939742, + "block0_k_v_norm": 0.23209111392498016, + "block0_k_cos_v_neg_g": -0.0010776743292808533, + "block0_v_v_norm": 0.13412247598171234, + "block0_v_cos_v_neg_g": 0.027539340779185295, + "block0_o_v_norm": 0.21312707662582397, + "block0_o_cos_v_neg_g": 0.028665676712989807, + "block0_mlp_win_v_norm": 0.26134634017944336, + "block0_mlp_win_cos_v_neg_g": 0.02952505275607109, + "block0_mlp_wout_v_norm": 0.2647785246372223, + "block0_mlp_wout_cos_v_neg_g": 0.0499538779258728, + "block3_q_v_norm": 0.1870884895324707, + "block3_q_cos_v_neg_g": 0.02498127892613411, + "block3_k_v_norm": 0.19662116467952728, + "block3_k_cos_v_neg_g": 0.045164961367845535, + "block3_v_v_norm": 0.17212967574596405, + "block3_v_cos_v_neg_g": 0.026847606524825096, + "block3_o_v_norm": 0.22341127693653107, + "block3_o_cos_v_neg_g": 0.038686223328113556, + "block3_mlp_win_v_norm": 0.264031320810318, + "block3_mlp_win_cos_v_neg_g": 0.03065662458539009, + "block3_mlp_wout_v_norm": 0.24143585562705994, + "block3_mlp_wout_cos_v_neg_g": 0.10620313882827759, + "block7_q_v_norm": 0.24234376847743988, + "block7_q_cos_v_neg_g": 0.03290846198797226, + "block7_k_v_norm": 0.2464597225189209, + "block7_k_cos_v_neg_g": 0.0751538798213005, + "block7_v_v_norm": 0.2111920714378357, + "block7_v_cos_v_neg_g": 0.0221597608178854, + "block7_o_v_norm": 0.24803130328655243, + "block7_o_cos_v_neg_g": 0.07186923176050186, + "block7_mlp_win_v_norm": 0.27263155579566956, + "block7_mlp_win_cos_v_neg_g": 0.03380141407251358, + "block7_mlp_wout_v_norm": 0.2458055466413498, + "block7_mlp_wout_cos_v_neg_g": 0.1218867152929306, + "block11_q_v_norm": 0.24774904549121857, + "block11_q_cos_v_neg_g": 0.08406811952590942, + "block11_k_v_norm": 0.24946999549865723, + "block11_k_cos_v_neg_g": 0.10567300021648407, + "block11_v_v_norm": 0.2454708069562912, + "block11_v_cos_v_neg_g": 0.05023916810750961, + "block11_o_v_norm": 0.2486734837293625, + "block11_o_cos_v_neg_g": 0.07824047654867172, + "block11_mlp_win_v_norm": 0.23675966262817383, + "block11_mlp_win_cos_v_neg_g": 0.09171423316001892, + "block11_mlp_wout_v_norm": 0.23957917094230652, + "block11_mlp_wout_cos_v_neg_g": 0.07701334357261658, + "embed_lm_head_sharpness": 0.006288541480898857, + "layer_1_sharpness": 0.1201043650507927, + "layer_2_sharpness": 0.0048439824022352695, + "layer_3_sharpness": 0.0047446358948946, + "layer_4_sharpness": 0.0013878850731998682, + "layer_5_sharpness": 0.000990146305412054, + "layer_6_sharpness": 0.001227348460815847, + "layer_7_sharpness": 0.0010552285239100456, + "layer_8_sharpness": 0.0009527847287245095, + "layer_9_sharpness": 0.00077979383058846, + "layer_10_sharpness": 0.00045196633436717093, + "layer_11_sharpness": 0.0005770633579231799, + "layer_12_sharpness": 0.0015512557001784444, + "block0_q_sharpness": 0.02831423655152321, + "block0_k_sharpness": 0.03870289400219917, + "block0_v_sharpness": 0.2570604681968689, + "block0_o_sharpness": 0.02502455934882164, + "block0_mlp_win_sharpness": 0.003019705880433321, + "block0_mlp_wout_sharpness": 0.0009630789281800389, + "block3_q_sharpness": 3.9565777115058154e-05, + "block3_k_sharpness": 0.000737559050321579, + "block3_v_sharpness": 0.004517165012657642, + "block3_o_sharpness": 0.00042502672295086086, + "block3_mlp_win_sharpness": 0.0001198896425194107, + "block3_mlp_wout_sharpness": 6.565369403688237e-05, + "block7_q_sharpness": 6.15567114436999e-05, + "block7_k_sharpness": 7.016569725237787e-05, + "block7_v_sharpness": 0.0026549333706498146, + "block7_o_sharpness": 7.07026119926013e-05, + "block7_mlp_win_sharpness": 0.00036042218562215567, + "block7_mlp_wout_sharpness": 7.968087447807193e-05, + "block11_q_sharpness": 8.296016312669963e-05, + "block11_k_sharpness": 4.87549405079335e-05, + "block11_v_sharpness": 0.00017352201393805444, + "block11_o_sharpness": 3.833622031379491e-05, + "block11_mlp_win_sharpness": 0.0012974545825272799, + "block11_mlp_wout_sharpness": 0.002609975403174758, + "sum_layer_numerators": 0.04258786350655679, + "block_diag_sharpness": 0.01129098644006289, + "cross_layer_sharpness": 0.005607982748269668 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..4d0a64813bfb6fa034ce93ecebccd0db1a7fd3e8 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_5500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.298922538757324, + "total_l1_linf_norm": 19451.45703125, + "total_spectral_norm": 2.2989227771759033, + "embed_lm_head_update_fnorm": 1.321675181388855, + "embed_lm_head_max_l1_linf_norm": 0.3933121860027313, + "embed_lm_head_max_spectral_norm": 0.35703906416893005, + "layer_1_update_fnorm": 0.5086736083030701, + "layer_1_max_l1_linf_norm": 0.39373695850372314, + "layer_1_max_spectral_norm": 0.01203592773526907, + "layer_2_update_fnorm": 0.3593991994857788, + "layer_2_max_l1_linf_norm": 0.39341843128204346, + "layer_2_max_spectral_norm": 0.015696106478571892, + "layer_3_update_fnorm": 0.36828798055648804, + "layer_3_max_l1_linf_norm": 0.39955639839172363, + "layer_3_max_spectral_norm": 0.016523005440831184, + "layer_4_update_fnorm": 0.48906058073043823, + "layer_4_max_l1_linf_norm": 0.4122200012207031, + "layer_4_max_spectral_norm": 0.015761686488986015, + "layer_5_update_fnorm": 0.5659686326980591, + "layer_5_max_l1_linf_norm": 0.3949766755104065, + "layer_5_max_spectral_norm": 0.012043370865285397, + "layer_6_update_fnorm": 0.5857987999916077, + "layer_6_max_l1_linf_norm": 0.3991466164588928, + "layer_6_max_spectral_norm": 0.012043409049510956, + "layer_7_update_fnorm": 0.5936800241470337, + "layer_7_max_l1_linf_norm": 0.3997482657432556, + "layer_7_max_spectral_norm": 0.012047545053064823, + "layer_8_update_fnorm": 0.5980980396270752, + "layer_8_max_l1_linf_norm": 0.4032994210720062, + "layer_8_max_spectral_norm": 0.012051239609718323, + "layer_9_update_fnorm": 0.5965908765792847, + "layer_9_max_l1_linf_norm": 0.4033341407775879, + "layer_9_max_spectral_norm": 0.012052778154611588, + "layer_10_update_fnorm": 0.5960480570793152, + "layer_10_max_l1_linf_norm": 0.40712881088256836, + "layer_10_max_spectral_norm": 0.012053104117512703, + "layer_11_update_fnorm": 0.5768464207649231, + "layer_11_max_l1_linf_norm": 0.4285960793495178, + "layer_11_max_spectral_norm": 0.01204462070018053, + "layer_12_update_fnorm": 0.5982342958450317, + "layer_12_max_l1_linf_norm": 0.4309663474559784, + "layer_12_max_spectral_norm": 0.01204676553606987, + "block0_q_update_fnorm": 0.22035227715969086, + "block0_q_max_l1_linf_norm": 0.20330138504505157, + "block0_q_max_spectral_norm": 0.012034445069730282, + "block0_k_update_fnorm": 0.21819545328617096, + "block0_k_max_l1_linf_norm": 0.19920533895492554, + "block0_k_max_spectral_norm": 0.012033400125801563, + "block0_v_update_fnorm": 0.1216772049665451, + "block0_v_max_l1_linf_norm": 0.14594340324401855, + "block0_v_max_spectral_norm": 0.012024043127894402, + "block0_o_update_fnorm": 0.18976880609989166, + "block0_o_max_l1_linf_norm": 0.17106659710407257, + "block0_o_max_spectral_norm": 0.012033054605126381, + "block0_mlp_win_update_fnorm": 0.23358061909675598, + "block0_mlp_win_max_l1_linf_norm": 0.1825980842113495, + "block0_mlp_win_max_spectral_norm": 0.012033198028802872, + "block0_mlp_wout_update_fnorm": 0.23894529044628143, + "block0_mlp_wout_max_l1_linf_norm": 0.39373695850372314, + "block0_mlp_wout_max_spectral_norm": 0.01203592773526907, + "block3_q_update_fnorm": 0.14474309980869293, + "block3_q_max_l1_linf_norm": 0.18115389347076416, + "block3_q_max_spectral_norm": 0.012026515789330006, + "block3_k_update_fnorm": 0.16210581362247467, + "block3_k_max_l1_linf_norm": 0.19469225406646729, + "block3_k_max_spectral_norm": 0.012029716745018959, + "block3_v_update_fnorm": 0.15433605015277863, + "block3_v_max_l1_linf_norm": 0.163344144821167, + "block3_v_max_spectral_norm": 0.012027909979224205, + "block3_o_update_fnorm": 0.20335635542869568, + "block3_o_max_l1_linf_norm": 0.17364653944969177, + "block3_o_max_spectral_norm": 0.012032108381390572, + "block3_mlp_win_update_fnorm": 0.2522023916244507, + "block3_mlp_win_max_l1_linf_norm": 0.18477503955364227, + "block3_mlp_win_max_spectral_norm": 0.012036715634167194, + "block3_mlp_wout_update_fnorm": 0.2504699230194092, + "block3_mlp_wout_max_l1_linf_norm": 0.4122200012207031, + "block3_mlp_wout_max_spectral_norm": 0.012059935368597507, + "block7_q_update_fnorm": 0.24115049839019775, + "block7_q_max_l1_linf_norm": 0.20970895886421204, + "block7_q_max_spectral_norm": 0.012047219090163708, + "block7_k_update_fnorm": 0.24687492847442627, + "block7_k_max_l1_linf_norm": 0.20695170760154724, + "block7_k_max_spectral_norm": 0.012040698900818825, + "block7_v_update_fnorm": 0.20176443457603455, + "block7_v_max_l1_linf_norm": 0.21160702407360077, + "block7_v_max_spectral_norm": 0.012034960091114044, + "block7_o_update_fnorm": 0.24749958515167236, + "block7_o_max_l1_linf_norm": 0.20737218856811523, + "block7_o_max_spectral_norm": 0.012042125687003136, + "block7_mlp_win_update_fnorm": 0.2779636085033417, + "block7_mlp_win_max_l1_linf_norm": 0.16039253771305084, + "block7_mlp_win_max_spectral_norm": 0.012051239609718323, + "block7_mlp_wout_update_fnorm": 0.24343343079090118, + "block7_mlp_wout_max_l1_linf_norm": 0.4032994210720062, + "block7_mlp_wout_max_spectral_norm": 0.01136791706085205, + "block11_q_update_fnorm": 0.24742037057876587, + "block11_q_max_l1_linf_norm": 0.21408742666244507, + "block11_q_max_spectral_norm": 0.012042898684740067, + "block11_k_update_fnorm": 0.24933579564094543, + "block11_k_max_l1_linf_norm": 0.21636442840099335, + "block11_k_max_spectral_norm": 0.01203847210854292, + "block11_v_update_fnorm": 0.24492360651493073, + "block11_v_max_l1_linf_norm": 0.20766690373420715, + "block11_v_max_spectral_norm": 0.01204676553606987, + "block11_o_update_fnorm": 0.24828225374221802, + "block11_o_max_l1_linf_norm": 0.21113859117031097, + "block11_o_max_spectral_norm": 0.012045877985656261, + "block11_mlp_win_update_fnorm": 0.2372676283121109, + "block11_mlp_win_max_l1_linf_norm": 0.16932302713394165, + "block11_mlp_win_max_spectral_norm": 0.01139603927731514, + "block11_mlp_wout_update_fnorm": 0.23747175931930542, + "block11_mlp_wout_max_l1_linf_norm": 0.3965502679347992, + "block11_mlp_wout_max_spectral_norm": 0.01136603020131588, + "total_sharpness": -0.0010958838975057006, + "block_total_sharpness": -0.0018330470193177462, + "v_norm_block": 1.8810161352157593, + "v_T_H_v_block": -0.006485726684331894, + "v_norm": 2.298922538757324, + "ip_v_neg_g_hvp": 0.046081557869911194, + "cos_v_neg_g_hvp": 0.019714131951332092, + "g_hvp_norm": 1.0167757272720337, + "ip_v_neg_g_t": 0.05492882430553436, + "cos_v_neg_g_t": 0.020850611850619316, + "g_t_norm": 1.1459275484085083, + "g_norm": 1.0167757272720337, + "hv_norm": 28.594308853149414, + "cos_v_hv": -8.810676808934659e-05, + "hg_norm": 94381.8828125, + "cos_g_hg": -0.09650988131761551, + "v_parallel_norm": 0.00636023236438632, + "v_perp_norm": 2.2989137172698975, + "embed_lm_head_v_norm": 1.321675181388855, + "embed_lm_head_cos_v_neg_g": 0.017949089407920837, + "layer_1_v_norm": 0.5086736083030701, + "layer_1_cos_v_neg_g": 0.013745003379881382, + "layer_2_v_norm": 0.3593991994857788, + "layer_2_cos_v_neg_g": 0.01966465450823307, + "layer_3_v_norm": 0.36828798055648804, + "layer_3_cos_v_neg_g": 0.02767760306596756, + "layer_4_v_norm": 0.48906058073043823, + "layer_4_cos_v_neg_g": 0.024960476905107498, + "layer_5_v_norm": 0.5659686326980591, + "layer_5_cos_v_neg_g": 0.02998155727982521, + "layer_6_v_norm": 0.5857987403869629, + "layer_6_cos_v_neg_g": 0.02984512411057949, + "layer_7_v_norm": 0.5936800241470337, + "layer_7_cos_v_neg_g": 0.028935743495821953, + "layer_8_v_norm": 0.5980980396270752, + "layer_8_cos_v_neg_g": 0.029298201203346252, + "layer_9_v_norm": 0.5965908765792847, + "layer_9_cos_v_neg_g": 0.03139381855726242, + "layer_10_v_norm": 0.5960480570793152, + "layer_10_cos_v_neg_g": 0.0349862277507782, + "layer_11_v_norm": 0.5768464207649231, + "layer_11_cos_v_neg_g": 0.04529167711734772, + "layer_12_v_norm": 0.5982342958450317, + "layer_12_cos_v_neg_g": 0.07777538150548935, + "block0_q_v_norm": 0.22035227715969086, + "block0_q_cos_v_neg_g": 0.032822757959365845, + "block0_k_v_norm": 0.21819545328617096, + "block0_k_cos_v_neg_g": 0.017089135944843292, + "block0_v_v_norm": 0.1216772049665451, + "block0_v_cos_v_neg_g": 0.025223562493920326, + "block0_o_v_norm": 0.18976880609989166, + "block0_o_cos_v_neg_g": 0.03662479668855667, + "block0_mlp_win_v_norm": 0.23358061909675598, + "block0_mlp_win_cos_v_neg_g": 0.018200738355517387, + "block0_mlp_wout_v_norm": 0.23894529044628143, + "block0_mlp_wout_cos_v_neg_g": 0.03143920749425888, + "block3_q_v_norm": 0.14474309980869293, + "block3_q_cos_v_neg_g": 0.03069242462515831, + "block3_k_v_norm": 0.16210581362247467, + "block3_k_cos_v_neg_g": 0.04232768341898918, + "block3_v_v_norm": 0.15433605015277863, + "block3_v_cos_v_neg_g": 0.02039128728210926, + "block3_o_v_norm": 0.20335635542869568, + "block3_o_cos_v_neg_g": 0.029102159664034843, + "block3_mlp_win_v_norm": 0.2522023916244507, + "block3_mlp_win_cos_v_neg_g": 0.030253306031227112, + "block3_mlp_wout_v_norm": 0.2504699230194092, + "block3_mlp_wout_cos_v_neg_g": 0.07608126848936081, + "block7_q_v_norm": 0.24115049839019775, + "block7_q_cos_v_neg_g": 0.03458210825920105, + "block7_k_v_norm": 0.24687492847442627, + "block7_k_cos_v_neg_g": 0.08470740169286728, + "block7_v_v_norm": 0.20176443457603455, + "block7_v_cos_v_neg_g": 0.026428531855344772, + "block7_o_v_norm": 0.24749958515167236, + "block7_o_cos_v_neg_g": 0.08093790709972382, + "block7_mlp_win_v_norm": 0.2779636085033417, + "block7_mlp_win_cos_v_neg_g": 0.03817138448357582, + "block7_mlp_wout_v_norm": 0.24343343079090118, + "block7_mlp_wout_cos_v_neg_g": 0.12972454726696014, + "block11_q_v_norm": 0.24742037057876587, + "block11_q_cos_v_neg_g": 0.07625867426395416, + "block11_k_v_norm": 0.24933579564094543, + "block11_k_cos_v_neg_g": 0.10070713609457016, + "block11_v_v_norm": 0.24492360651493073, + "block11_v_cos_v_neg_g": 0.051223527640104294, + "block11_o_v_norm": 0.24828225374221802, + "block11_o_cos_v_neg_g": 0.08318553119897842, + "block11_mlp_win_v_norm": 0.2372676283121109, + "block11_mlp_win_cos_v_neg_g": 0.10776487737894058, + "block11_mlp_wout_v_norm": 0.23747175931930542, + "block11_mlp_wout_cos_v_neg_g": 0.09378130733966827, + "embed_lm_head_sharpness": 0.00044176564551889896, + "layer_1_sharpness": -0.07860022783279419, + "layer_2_sharpness": -0.0052787926979362965, + "layer_3_sharpness": 0.005292231682687998, + "layer_4_sharpness": 0.001742952736094594, + "layer_5_sharpness": 0.0013830220559611917, + "layer_6_sharpness": 0.001315313158556819, + "layer_7_sharpness": 0.0015431884676218033, + "layer_8_sharpness": 0.0011707664234563708, + "layer_9_sharpness": 0.000739679962862283, + "layer_10_sharpness": 0.0004791153478436172, + "layer_11_sharpness": 0.0005365994875319302, + "layer_12_sharpness": 0.0003476944111753255, + "block0_q_sharpness": -0.0015682094963267446, + "block0_k_sharpness": 0.007256023120135069, + "block0_v_sharpness": -0.6310625076293945, + "block0_o_sharpness": -0.0026605562306940556, + "block0_mlp_win_sharpness": -0.0067131612449884415, + "block0_mlp_wout_sharpness": -0.004364381544291973, + "block3_q_sharpness": 8.310750854434446e-05, + "block3_k_sharpness": 0.003137195250019431, + "block3_v_sharpness": 0.0049948920495808125, + "block3_o_sharpness": 0.0005361371440812945, + "block3_mlp_win_sharpness": 0.00019794204854406416, + "block3_mlp_wout_sharpness": 0.00011119372356915846, + "block7_q_sharpness": 8.175808034138754e-05, + "block7_k_sharpness": 7.824420026736334e-05, + "block7_v_sharpness": 0.003289230167865753, + "block7_o_sharpness": 8.085824083536863e-05, + "block7_mlp_win_sharpness": 0.0004201411793474108, + "block7_mlp_wout_sharpness": 9.143794886767864e-05, + "block11_q_sharpness": 5.5102413170970976e-05, + "block11_k_sharpness": 4.5271284761838615e-05, + "block11_v_sharpness": 0.00013269773626234382, + "block11_o_sharpness": 3.024054785782937e-05, + "block11_mlp_win_sharpness": 0.00023679365403950214, + "block11_mlp_wout_sharpness": 0.0004071395960636437, + "sum_layer_numerators": -0.017291310011517864, + "block_diag_sharpness": -0.004887005810550014, + "cross_layer_sharpness": 0.003053958791232268 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..0125ac84c904a75a397948d56ed037cdcb2035a6 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.281038761138916, + "total_l1_linf_norm": 19264.201171875, + "total_spectral_norm": 2.281039237976074, + "embed_lm_head_update_fnorm": 1.31832754611969, + "embed_lm_head_max_l1_linf_norm": 0.36795854568481445, + "embed_lm_head_max_spectral_norm": 0.24970600008964539, + "layer_1_update_fnorm": 0.5265312194824219, + "layer_1_max_l1_linf_norm": 0.36374804377555847, + "layer_1_max_spectral_norm": 0.012041601352393627, + "layer_2_update_fnorm": 0.3159104585647583, + "layer_2_max_l1_linf_norm": 0.510459303855896, + "layer_2_max_spectral_norm": 0.014501823112368584, + "layer_3_update_fnorm": 0.35911715030670166, + "layer_3_max_l1_linf_norm": 0.4634600579738617, + "layer_3_max_spectral_norm": 0.013493705540895462, + "layer_4_update_fnorm": 0.4544248878955841, + "layer_4_max_l1_linf_norm": 0.4696403741836548, + "layer_4_max_spectral_norm": 0.017227932810783386, + "layer_5_update_fnorm": 0.5559934973716736, + "layer_5_max_l1_linf_norm": 0.44411641359329224, + "layer_5_max_spectral_norm": 0.012042008340358734, + "layer_6_update_fnorm": 0.5821795463562012, + "layer_6_max_l1_linf_norm": 0.4146806001663208, + "layer_6_max_spectral_norm": 0.012044765055179596, + "layer_7_update_fnorm": 0.5951413512229919, + "layer_7_max_l1_linf_norm": 0.39861106872558594, + "layer_7_max_spectral_norm": 0.012045865878462791, + "layer_8_update_fnorm": 0.5957340002059937, + "layer_8_max_l1_linf_norm": 0.40317538380622864, + "layer_8_max_spectral_norm": 0.01205002423375845, + "layer_9_update_fnorm": 0.595636785030365, + "layer_9_max_l1_linf_norm": 0.4065787196159363, + "layer_9_max_spectral_norm": 0.012054875493049622, + "layer_10_update_fnorm": 0.5934019684791565, + "layer_10_max_l1_linf_norm": 0.42810988426208496, + "layer_10_max_spectral_norm": 0.012045740149915218, + "layer_11_update_fnorm": 0.5746209025382996, + "layer_11_max_l1_linf_norm": 0.4454022943973541, + "layer_11_max_spectral_norm": 0.012043086811900139, + "layer_12_update_fnorm": 0.5986639857292175, + "layer_12_max_l1_linf_norm": 0.44375646114349365, + "layer_12_max_spectral_norm": 0.012046074494719505, + "block0_q_update_fnorm": 0.2424609512090683, + "block0_q_max_l1_linf_norm": 0.2084372639656067, + "block0_q_max_spectral_norm": 0.012041601352393627, + "block0_k_update_fnorm": 0.23612335324287415, + "block0_k_max_l1_linf_norm": 0.2079939991235733, + "block0_k_max_spectral_norm": 0.012039056979119778, + "block0_v_update_fnorm": 0.172639399766922, + "block0_v_max_l1_linf_norm": 0.16853533685207367, + "block0_v_max_spectral_norm": 0.01203112956136465, + "block0_o_update_fnorm": 0.2045549601316452, + "block0_o_max_l1_linf_norm": 0.1762111783027649, + "block0_o_max_spectral_norm": 0.01203243713825941, + "block0_mlp_win_update_fnorm": 0.20874826610088348, + "block0_mlp_win_max_l1_linf_norm": 0.18444398045539856, + "block0_mlp_win_max_spectral_norm": 0.01203086506575346, + "block0_mlp_wout_update_fnorm": 0.21758152544498444, + "block0_mlp_wout_max_l1_linf_norm": 0.36374804377555847, + "block0_mlp_wout_max_spectral_norm": 0.012035011313855648, + "block3_q_update_fnorm": 0.10872664302587509, + "block3_q_max_l1_linf_norm": 0.14604198932647705, + "block3_q_max_spectral_norm": 0.012025395408272743, + "block3_k_update_fnorm": 0.12001049518585205, + "block3_k_max_l1_linf_norm": 0.1598103940486908, + "block3_k_max_spectral_norm": 0.012024147436022758, + "block3_v_update_fnorm": 0.1468663513660431, + "block3_v_max_l1_linf_norm": 0.15722410380840302, + "block3_v_max_spectral_norm": 0.012027990072965622, + "block3_o_update_fnorm": 0.1927136480808258, + "block3_o_max_l1_linf_norm": 0.1630028784275055, + "block3_o_max_spectral_norm": 0.012032844126224518, + "block3_mlp_win_update_fnorm": 0.2493283599615097, + "block3_mlp_win_max_l1_linf_norm": 0.1786668598651886, + "block3_mlp_win_max_spectral_norm": 0.012038140557706356, + "block3_mlp_wout_update_fnorm": 0.24255585670471191, + "block3_mlp_wout_max_l1_linf_norm": 0.39957934617996216, + "block3_mlp_wout_max_spectral_norm": 0.011856496334075928, + "block7_q_update_fnorm": 0.240072563290596, + "block7_q_max_l1_linf_norm": 0.20835936069488525, + "block7_q_max_spectral_norm": 0.012039611116051674, + "block7_k_update_fnorm": 0.24700576066970825, + "block7_k_max_l1_linf_norm": 0.21108299493789673, + "block7_k_max_spectral_norm": 0.012040192261338234, + "block7_v_update_fnorm": 0.19615507125854492, + "block7_v_max_l1_linf_norm": 0.2076844573020935, + "block7_v_max_spectral_norm": 0.012031074613332748, + "block7_o_update_fnorm": 0.2473212480545044, + "block7_o_max_l1_linf_norm": 0.2072317898273468, + "block7_o_max_spectral_norm": 0.01205002423375845, + "block7_mlp_win_update_fnorm": 0.27881166338920593, + "block7_mlp_win_max_l1_linf_norm": 0.1600978672504425, + "block7_mlp_win_max_spectral_norm": 0.012045229785144329, + "block7_mlp_wout_update_fnorm": 0.24229386448860168, + "block7_mlp_wout_max_l1_linf_norm": 0.40317538380622864, + "block7_mlp_wout_max_spectral_norm": 0.011383539997041225, + "block11_q_update_fnorm": 0.24767842888832092, + "block11_q_max_l1_linf_norm": 0.21586966514587402, + "block11_q_max_spectral_norm": 0.012038319371640682, + "block11_k_update_fnorm": 0.24954882264137268, + "block11_k_max_l1_linf_norm": 0.21654292941093445, + "block11_k_max_spectral_norm": 0.012039406225085258, + "block11_v_update_fnorm": 0.24514004588127136, + "block11_v_max_l1_linf_norm": 0.207978755235672, + "block11_v_max_spectral_norm": 0.012046074494719505, + "block11_o_update_fnorm": 0.24808602035045624, + "block11_o_max_l1_linf_norm": 0.20670410990715027, + "block11_o_max_spectral_norm": 0.01203934382647276, + "block11_mlp_win_update_fnorm": 0.23778729140758514, + "block11_mlp_win_max_l1_linf_norm": 0.16636455059051514, + "block11_mlp_win_max_spectral_norm": 0.011399129405617714, + "block11_mlp_wout_update_fnorm": 0.23749029636383057, + "block11_mlp_wout_max_l1_linf_norm": 0.39510881900787354, + "block11_mlp_wout_max_spectral_norm": 0.011430368758738041, + "total_sharpness": -0.03434763476252556, + "block_total_sharpness": -0.04892561212182045, + "v_norm_block": 1.8614921569824219, + "v_T_H_v_block": -0.16953472793102264, + "v_norm": 2.281038761138916, + "ip_v_neg_g_hvp": 0.04156129062175751, + "cos_v_neg_g_hvp": 0.02071233280003071, + "g_hvp_norm": 0.8796853423118591, + "ip_v_neg_g_t": 0.04822034388780594, + "cos_v_neg_g_t": 0.024786124005913734, + "g_t_norm": 0.8528821468353271, + "g_norm": 0.8796853423118591, + "hv_norm": 71.83956909179688, + "cos_v_hv": -0.0010906008537858725, + "hg_norm": 34171.25390625, + "cos_g_hg": -0.06480426341295242, + "v_parallel_norm": 0.004280589986592531, + "v_perp_norm": 2.2810349464416504, + "embed_lm_head_v_norm": 1.31832754611969, + "embed_lm_head_cos_v_neg_g": 0.028097005560994148, + "layer_1_v_norm": 0.5265312194824219, + "layer_1_cos_v_neg_g": 0.011821048334240913, + "layer_2_v_norm": 0.3159104585647583, + "layer_2_cos_v_neg_g": 0.02040615864098072, + "layer_3_v_norm": 0.3591171205043793, + "layer_3_cos_v_neg_g": 0.028464525938034058, + "layer_4_v_norm": 0.4544248878955841, + "layer_4_cos_v_neg_g": 0.022805776447057724, + "layer_5_v_norm": 0.5559934973716736, + "layer_5_cos_v_neg_g": 0.025838196277618408, + "layer_6_v_norm": 0.5821795463562012, + "layer_6_cos_v_neg_g": 0.026333266869187355, + "layer_7_v_norm": 0.5951413512229919, + "layer_7_cos_v_neg_g": 0.02764095552265644, + "layer_8_v_norm": 0.5957340002059937, + "layer_8_cos_v_neg_g": 0.0273415707051754, + "layer_9_v_norm": 0.595636785030365, + "layer_9_cos_v_neg_g": 0.027361998334527016, + "layer_10_v_norm": 0.5934019684791565, + "layer_10_cos_v_neg_g": 0.030213763937354088, + "layer_11_v_norm": 0.5746209025382996, + "layer_11_cos_v_neg_g": 0.039555713534355164, + "layer_12_v_norm": 0.5986639857292175, + "layer_12_cos_v_neg_g": 0.0722324326634407, + "block0_q_v_norm": 0.2424609512090683, + "block0_q_cos_v_neg_g": -0.00036530435318127275, + "block0_k_v_norm": 0.23612335324287415, + "block0_k_cos_v_neg_g": 0.0011689073871821165, + "block0_v_v_norm": 0.172639399766922, + "block0_v_cos_v_neg_g": 0.014044558629393578, + "block0_o_v_norm": 0.2045549601316452, + "block0_o_cos_v_neg_g": 0.01852058432996273, + "block0_mlp_win_v_norm": 0.20874826610088348, + "block0_mlp_win_cos_v_neg_g": 0.021075397729873657, + "block0_mlp_wout_v_norm": 0.21758152544498444, + "block0_mlp_wout_cos_v_neg_g": 0.03820610046386719, + "block3_q_v_norm": 0.10872664302587509, + "block3_q_cos_v_neg_g": 0.0207870751619339, + "block3_k_v_norm": 0.12001049518585205, + "block3_k_cos_v_neg_g": 0.04917106777429581, + "block3_v_v_norm": 0.1468663513660431, + "block3_v_cos_v_neg_g": 0.019031107425689697, + "block3_o_v_norm": 0.1927136480808258, + "block3_o_cos_v_neg_g": 0.024765266105532646, + "block3_mlp_win_v_norm": 0.2493283599615097, + "block3_mlp_win_cos_v_neg_g": 0.025725986808538437, + "block3_mlp_wout_v_norm": 0.24255585670471191, + "block3_mlp_wout_cos_v_neg_g": 0.08802837133407593, + "block7_q_v_norm": 0.240072563290596, + "block7_q_cos_v_neg_g": 0.03309030458331108, + "block7_k_v_norm": 0.24700576066970825, + "block7_k_cos_v_neg_g": 0.07431075721979141, + "block7_v_v_norm": 0.19615507125854492, + "block7_v_cos_v_neg_g": 0.030731331557035446, + "block7_o_v_norm": 0.2473212480545044, + "block7_o_cos_v_neg_g": 0.07622208446264267, + "block7_mlp_win_v_norm": 0.27881166338920593, + "block7_mlp_win_cos_v_neg_g": 0.034171730279922485, + "block7_mlp_wout_v_norm": 0.24229386448860168, + "block7_mlp_wout_cos_v_neg_g": 0.11858391761779785, + "block11_q_v_norm": 0.24767842888832092, + "block11_q_cos_v_neg_g": 0.07262968271970749, + "block11_k_v_norm": 0.24954882264137268, + "block11_k_cos_v_neg_g": 0.09609910100698471, + "block11_v_v_norm": 0.24514004588127136, + "block11_v_cos_v_neg_g": 0.04785238951444626, + "block11_o_v_norm": 0.24808602035045624, + "block11_o_cos_v_neg_g": 0.07944048941135406, + "block11_mlp_win_v_norm": 0.23778729140758514, + "block11_mlp_win_cos_v_neg_g": 0.09802041202783585, + "block11_mlp_wout_v_norm": 0.23749029636383057, + "block11_mlp_wout_cos_v_neg_g": 0.08644933253526688, + "embed_lm_head_sharpness": 0.00043150768033228815, + "layer_1_sharpness": -0.36372336745262146, + "layer_2_sharpness": -0.12765689194202423, + "layer_3_sharpness": 0.00532820587977767, + "layer_4_sharpness": 0.001999929081648588, + "layer_5_sharpness": 0.0014085274888202548, + "layer_6_sharpness": 0.001330621656961739, + "layer_7_sharpness": 0.0012467269552871585, + "layer_8_sharpness": 0.0011869744630530477, + "layer_9_sharpness": 0.0007670990307815373, + "layer_10_sharpness": 0.0004649200418498367, + "layer_11_sharpness": 0.00047101450036279857, + "layer_12_sharpness": 0.0002939123660326004, + "block0_q_sharpness": -5.083890937385149e-05, + "block0_k_sharpness": 0.0001124750342569314, + "block0_v_sharpness": -0.36915093660354614, + "block0_o_sharpness": -0.06132464483380318, + "block0_mlp_win_sharpness": -0.13494227826595306, + "block0_mlp_wout_sharpness": -0.176273375749588, + "block3_q_sharpness": 5.7153531088260934e-05, + "block3_k_sharpness": 0.0031041137408465147, + "block3_v_sharpness": 0.005145884118974209, + "block3_o_sharpness": 0.0007016003364697099, + "block3_mlp_win_sharpness": 0.00019251399498898536, + "block3_mlp_wout_sharpness": 8.55435137054883e-05, + "block7_q_sharpness": 0.00010002192721003667, + "block7_k_sharpness": 7.707808981649578e-05, + "block7_v_sharpness": 0.003614953951910138, + "block7_o_sharpness": 7.87231620051898e-05, + "block7_mlp_win_sharpness": 0.0004310626827646047, + "block7_mlp_wout_sharpness": 8.084189903456718e-05, + "block11_q_sharpness": 6.02762884227559e-05, + "block11_k_sharpness": 4.028806142741814e-05, + "block11_v_sharpness": 0.00010840221511898562, + "block11_o_sharpness": 2.605996087368112e-05, + "block11_mlp_win_sharpness": 0.0001872481225291267, + "block11_mlp_wout_sharpness": 0.0004078179190400988, + "sum_layer_numerators": -0.1100308626538877, + "block_diag_sharpness": -0.03175353614980627, + "cross_layer_sharpness": -0.01717207597201418 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..2cc3381051973a0e4c98043e72df2ea1b31793de --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_6500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2744550704956055, + "total_l1_linf_norm": 19245.025390625, + "total_spectral_norm": 2.2744553089141846, + "embed_lm_head_update_fnorm": 1.333665370941162, + "embed_lm_head_max_l1_linf_norm": 0.325797975063324, + "embed_lm_head_max_spectral_norm": 0.2147047221660614, + "layer_1_update_fnorm": 0.5039432644844055, + "layer_1_max_l1_linf_norm": 0.3824053704738617, + "layer_1_max_spectral_norm": 0.01203496940433979, + "layer_2_update_fnorm": 0.35070478916168213, + "layer_2_max_l1_linf_norm": 0.46353113651275635, + "layer_2_max_spectral_norm": 0.018096061423420906, + "layer_3_update_fnorm": 0.3782268464565277, + "layer_3_max_l1_linf_norm": 0.3649846315383911, + "layer_3_max_spectral_norm": 0.013371565379202366, + "layer_4_update_fnorm": 0.4315307140350342, + "layer_4_max_l1_linf_norm": 0.5701813697814941, + "layer_4_max_spectral_norm": 0.021427329629659653, + "layer_5_update_fnorm": 0.5288112759590149, + "layer_5_max_l1_linf_norm": 0.3978036046028137, + "layer_5_max_spectral_norm": 0.012044711969792843, + "layer_6_update_fnorm": 0.5376662611961365, + "layer_6_max_l1_linf_norm": 0.4047791361808777, + "layer_6_max_spectral_norm": 0.01255161501467228, + "layer_7_update_fnorm": 0.5976028442382812, + "layer_7_max_l1_linf_norm": 0.403769850730896, + "layer_7_max_spectral_norm": 0.012047982774674892, + "layer_8_update_fnorm": 0.600685715675354, + "layer_8_max_l1_linf_norm": 0.4051356911659241, + "layer_8_max_spectral_norm": 0.012046024203300476, + "layer_9_update_fnorm": 0.5958428382873535, + "layer_9_max_l1_linf_norm": 0.40647780895233154, + "layer_9_max_spectral_norm": 0.012058787047863007, + "layer_10_update_fnorm": 0.5959643721580505, + "layer_10_max_l1_linf_norm": 0.41159743070602417, + "layer_10_max_spectral_norm": 0.012051276862621307, + "layer_11_update_fnorm": 0.5770926475524902, + "layer_11_max_l1_linf_norm": 0.41260606050491333, + "layer_11_max_spectral_norm": 0.01205054298043251, + "layer_12_update_fnorm": 0.5986142158508301, + "layer_12_max_l1_linf_norm": 0.4158596396446228, + "layer_12_max_spectral_norm": 0.01204428356140852, + "block0_q_update_fnorm": 0.2029297947883606, + "block0_q_max_l1_linf_norm": 0.19248856604099274, + "block0_q_max_spectral_norm": 0.012032956816256046, + "block0_k_update_fnorm": 0.21757489442825317, + "block0_k_max_l1_linf_norm": 0.2000925987958908, + "block0_k_max_spectral_norm": 0.012033953331410885, + "block0_v_update_fnorm": 0.13988281786441803, + "block0_v_max_l1_linf_norm": 0.15654218196868896, + "block0_v_max_spectral_norm": 0.012027377262711525, + "block0_o_update_fnorm": 0.1963060051202774, + "block0_o_max_l1_linf_norm": 0.18031780421733856, + "block0_o_max_spectral_norm": 0.012034200131893158, + "block0_mlp_win_update_fnorm": 0.23005203902721405, + "block0_mlp_win_max_l1_linf_norm": 0.17711853981018066, + "block0_mlp_win_max_spectral_norm": 0.012032708153128624, + "block0_mlp_wout_update_fnorm": 0.23297107219696045, + "block0_mlp_wout_max_l1_linf_norm": 0.3824053704738617, + "block0_mlp_wout_max_spectral_norm": 0.01203496940433979, + "block3_q_update_fnorm": 0.05057057365775108, + "block3_q_max_l1_linf_norm": 0.11121733486652374, + "block3_q_max_spectral_norm": 0.011944867670536041, + "block3_k_update_fnorm": 0.054362669587135315, + "block3_k_max_l1_linf_norm": 0.12372645735740662, + "block3_k_max_spectral_norm": 0.012016533873975277, + "block3_v_update_fnorm": 0.1485954076051712, + "block3_v_max_l1_linf_norm": 0.16632381081581116, + "block3_v_max_spectral_norm": 0.012027857825160027, + "block3_o_update_fnorm": 0.18750061094760895, + "block3_o_max_l1_linf_norm": 0.16222432255744934, + "block3_o_max_spectral_norm": 0.012031571008265018, + "block3_mlp_win_update_fnorm": 0.25012922286987305, + "block3_mlp_win_max_l1_linf_norm": 0.17845721542835236, + "block3_mlp_win_max_spectral_norm": 0.01203992497175932, + "block3_mlp_wout_update_fnorm": 0.24507763981819153, + "block3_mlp_wout_max_l1_linf_norm": 0.40372633934020996, + "block3_mlp_wout_max_spectral_norm": 0.011965042911469936, + "block7_q_update_fnorm": 0.24147704243659973, + "block7_q_max_l1_linf_norm": 0.20924359560012817, + "block7_q_max_spectral_norm": 0.012044127099215984, + "block7_k_update_fnorm": 0.24701938033103943, + "block7_k_max_l1_linf_norm": 0.20808228850364685, + "block7_k_max_spectral_norm": 0.012041310779750347, + "block7_v_update_fnorm": 0.2058972865343094, + "block7_v_max_l1_linf_norm": 0.20983192324638367, + "block7_v_max_spectral_norm": 0.012033906765282154, + "block7_o_update_fnorm": 0.2477676421403885, + "block7_o_max_l1_linf_norm": 0.20819318294525146, + "block7_o_max_spectral_norm": 0.012044142931699753, + "block7_mlp_win_update_fnorm": 0.27922385931015015, + "block7_mlp_win_max_l1_linf_norm": 0.16004978120326996, + "block7_mlp_win_max_spectral_norm": 0.012046024203300476, + "block7_mlp_wout_update_fnorm": 0.24415136873722076, + "block7_mlp_wout_max_l1_linf_norm": 0.4051356911659241, + "block7_mlp_wout_max_spectral_norm": 0.011369450949132442, + "block11_q_update_fnorm": 0.24771791696548462, + "block11_q_max_l1_linf_norm": 0.21632802486419678, + "block11_q_max_spectral_norm": 0.01204428356140852, + "block11_k_update_fnorm": 0.24940995872020721, + "block11_k_max_l1_linf_norm": 0.2160523682832718, + "block11_k_max_spectral_norm": 0.012043955735862255, + "block11_v_update_fnorm": 0.24527667462825775, + "block11_v_max_l1_linf_norm": 0.20639272034168243, + "block11_v_max_spectral_norm": 0.012041078880429268, + "block11_o_update_fnorm": 0.24856680631637573, + "block11_o_max_l1_linf_norm": 0.20787839591503143, + "block11_o_max_spectral_norm": 0.012040426023304462, + "block11_mlp_win_update_fnorm": 0.23688896000385284, + "block11_mlp_win_max_l1_linf_norm": 0.17785820364952087, + "block11_mlp_win_max_spectral_norm": 0.011380810290575027, + "block11_mlp_wout_update_fnorm": 0.23776832222938538, + "block11_mlp_wout_max_l1_linf_norm": 0.404873251914978, + "block11_mlp_wout_max_spectral_norm": 0.011621474288403988, + "total_sharpness": 0.003408887656405568, + "block_total_sharpness": 0.005335414316505194, + "v_norm_block": 1.8424123525619507, + "v_T_H_v_block": 0.0181109756231308, + "v_norm": 2.2744550704956055, + "ip_v_neg_g_hvp": 0.03875945135951042, + "cos_v_neg_g_hvp": 0.021186625584959984, + "g_hvp_norm": 0.804337739944458, + "ip_v_neg_g_t": 0.044657036662101746, + "cos_v_neg_g_t": 0.023055819794535637, + "g_t_norm": 0.8515927195549011, + "g_norm": 0.804337739944458, + "hv_norm": 4.766841411590576, + "cos_v_hv": 0.0016265199519693851, + "hg_norm": 15577.4677734375, + "cos_g_hg": -0.17237429320812225, + "v_parallel_norm": 0.005638942122459412, + "v_perp_norm": 2.2744481563568115, + "embed_lm_head_v_norm": 1.333665370941162, + "embed_lm_head_cos_v_neg_g": 0.02557254023849964, + "layer_1_v_norm": 0.5039432644844055, + "layer_1_cos_v_neg_g": 0.010044665075838566, + "layer_2_v_norm": 0.35070478916168213, + "layer_2_cos_v_neg_g": 0.014835633337497711, + "layer_3_v_norm": 0.3782268464565277, + "layer_3_cos_v_neg_g": 0.025310296565294266, + "layer_4_v_norm": 0.4315307140350342, + "layer_4_cos_v_neg_g": 0.024346841499209404, + "layer_5_v_norm": 0.5288112759590149, + "layer_5_cos_v_neg_g": 0.024673398584127426, + "layer_6_v_norm": 0.5376662611961365, + "layer_6_cos_v_neg_g": 0.026209905743598938, + "layer_7_v_norm": 0.5976028442382812, + "layer_7_cos_v_neg_g": 0.02565552294254303, + "layer_8_v_norm": 0.600685715675354, + "layer_8_cos_v_neg_g": 0.02545175328850746, + "layer_9_v_norm": 0.5958428382873535, + "layer_9_cos_v_neg_g": 0.02667972818017006, + "layer_10_v_norm": 0.5959643721580505, + "layer_10_cos_v_neg_g": 0.028647497296333313, + "layer_11_v_norm": 0.5770926475524902, + "layer_11_cos_v_neg_g": 0.03865937516093254, + "layer_12_v_norm": 0.5986142158508301, + "layer_12_cos_v_neg_g": 0.07229391485452652, + "block0_q_v_norm": 0.2029297947883606, + "block0_q_cos_v_neg_g": 0.003658290719613433, + "block0_k_v_norm": 0.21757489442825317, + "block0_k_cos_v_neg_g": 0.008813608437776566, + "block0_v_v_norm": 0.13988281786441803, + "block0_v_cos_v_neg_g": 0.01154489628970623, + "block0_o_v_norm": 0.1963060051202774, + "block0_o_cos_v_neg_g": 0.024001600220799446, + "block0_mlp_win_v_norm": 0.23005203902721405, + "block0_mlp_win_cos_v_neg_g": 0.02068459615111351, + "block0_mlp_wout_v_norm": 0.23297107219696045, + "block0_mlp_wout_cos_v_neg_g": 0.02452688477933407, + "block3_q_v_norm": 0.05057057365775108, + "block3_q_cos_v_neg_g": 0.020189514383673668, + "block3_k_v_norm": 0.054362669587135315, + "block3_k_cos_v_neg_g": 0.07701653987169266, + "block3_v_v_norm": 0.1485954076051712, + "block3_v_cos_v_neg_g": 0.026874741539359093, + "block3_o_v_norm": 0.18750061094760895, + "block3_o_cos_v_neg_g": 0.02717253752052784, + "block3_mlp_win_v_norm": 0.25012922286987305, + "block3_mlp_win_cos_v_neg_g": 0.027095863595604897, + "block3_mlp_wout_v_norm": 0.24507763981819153, + "block3_mlp_wout_cos_v_neg_g": 0.07445866614580154, + "block7_q_v_norm": 0.24147704243659973, + "block7_q_cos_v_neg_g": 0.02941565029323101, + "block7_k_v_norm": 0.24701938033103943, + "block7_k_cos_v_neg_g": 0.07641571760177612, + "block7_v_v_norm": 0.2058972865343094, + "block7_v_cos_v_neg_g": 0.023131828755140305, + "block7_o_v_norm": 0.2477676421403885, + "block7_o_cos_v_neg_g": 0.07101824879646301, + "block7_mlp_win_v_norm": 0.27922385931015015, + "block7_mlp_win_cos_v_neg_g": 0.030404025688767433, + "block7_mlp_wout_v_norm": 0.24415136873722076, + "block7_mlp_wout_cos_v_neg_g": 0.11790675669908524, + "block11_q_v_norm": 0.24771791696548462, + "block11_q_cos_v_neg_g": 0.07159053534269333, + "block11_k_v_norm": 0.24940995872020721, + "block11_k_cos_v_neg_g": 0.09798356145620346, + "block11_v_v_norm": 0.24527667462825775, + "block11_v_cos_v_neg_g": 0.043721962720155716, + "block11_o_v_norm": 0.24856680631637573, + "block11_o_cos_v_neg_g": 0.07727149873971939, + "block11_mlp_win_v_norm": 0.23688896000385284, + "block11_mlp_win_cos_v_neg_g": 0.09738244116306305, + "block11_mlp_wout_v_norm": 0.23776832222938538, + "block11_mlp_wout_cos_v_neg_g": 0.08930101990699768, + "embed_lm_head_sharpness": 3.348386235302314e-05, + "layer_1_sharpness": -0.010306070558726788, + "layer_2_sharpness": 0.0032722570467740297, + "layer_3_sharpness": 0.0035076835192739964, + "layer_4_sharpness": 0.002358677564188838, + "layer_5_sharpness": 0.0013930201530456543, + "layer_6_sharpness": 0.0014618654968217015, + "layer_7_sharpness": 0.0011301299091428518, + "layer_8_sharpness": 0.0011003995314240456, + "layer_9_sharpness": 0.0009106113575398922, + "layer_10_sharpness": 0.00047027674736455083, + "layer_11_sharpness": 0.0005421085515990853, + "layer_12_sharpness": 0.0006867306074127555, + "block0_q_sharpness": -0.01603865996003151, + "block0_k_sharpness": -0.0013813429977744818, + "block0_v_sharpness": -0.09777481853961945, + "block0_o_sharpness": 0.004582044202834368, + "block0_mlp_win_sharpness": 0.0006775605143047869, + "block0_mlp_wout_sharpness": 0.0007712242659181356, + "block3_q_sharpness": 0.00017523053975310177, + "block3_k_sharpness": 0.010575434193015099, + "block3_v_sharpness": 0.005696992855519056, + "block3_o_sharpness": 0.001128518721088767, + "block3_mlp_win_sharpness": 0.00021562185429502279, + "block3_mlp_wout_sharpness": 0.00010620122338877991, + "block7_q_sharpness": 6.584245420526713e-05, + "block7_k_sharpness": 5.7031189498957247e-05, + "block7_v_sharpness": 0.0027781755197793245, + "block7_o_sharpness": 6.153382128104568e-05, + "block7_mlp_win_sharpness": 0.000555179372895509, + "block7_mlp_wout_sharpness": 8.949311450123787e-05, + "block11_q_sharpness": 8.000627713045105e-05, + "block11_k_sharpness": 4.2965999455191195e-05, + "block11_v_sharpness": 0.00011486498988233507, + "block11_o_sharpness": 2.9409169655991718e-05, + "block11_mlp_win_sharpness": 0.0006837451364845037, + "block11_mlp_wout_sharpness": 0.0010018799221143126, + "sum_layer_numerators": 0.001255922226382499, + "block_diag_sharpness": 0.0003699892219058173, + "cross_layer_sharpness": 0.004965425094599377 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..985d8a31ce4de5396ec8fa96d821d574ad582f90 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.236363410949707, + "total_l1_linf_norm": 18827.833984375, + "total_spectral_norm": 2.236363649368286, + "embed_lm_head_update_fnorm": 1.33402419090271, + "embed_lm_head_max_l1_linf_norm": 0.3684972822666168, + "embed_lm_head_max_spectral_norm": 0.246858149766922, + "layer_1_update_fnorm": 0.4131356179714203, + "layer_1_max_l1_linf_norm": 0.35548722743988037, + "layer_1_max_spectral_norm": 0.012032200582325459, + "layer_2_update_fnorm": 0.32105639576911926, + "layer_2_max_l1_linf_norm": 0.4379146099090576, + "layer_2_max_spectral_norm": 0.01594773307442665, + "layer_3_update_fnorm": 0.3436650037765503, + "layer_3_max_l1_linf_norm": 0.408463716506958, + "layer_3_max_spectral_norm": 0.014071281999349594, + "layer_4_update_fnorm": 0.41489389538764954, + "layer_4_max_l1_linf_norm": 0.570412814617157, + "layer_4_max_spectral_norm": 0.02150719240307808, + "layer_5_update_fnorm": 0.5236823558807373, + "layer_5_max_l1_linf_norm": 0.407187283039093, + "layer_5_max_spectral_norm": 0.013121751137077808, + "layer_6_update_fnorm": 0.5238242149353027, + "layer_6_max_l1_linf_norm": 0.3971552848815918, + "layer_6_max_spectral_norm": 0.01204768382012844, + "layer_7_update_fnorm": 0.5939532518386841, + "layer_7_max_l1_linf_norm": 0.40004244446754456, + "layer_7_max_spectral_norm": 0.012045668438076973, + "layer_8_update_fnorm": 0.5976043939590454, + "layer_8_max_l1_linf_norm": 0.40405040979385376, + "layer_8_max_spectral_norm": 0.012048245407640934, + "layer_9_update_fnorm": 0.5969422459602356, + "layer_9_max_l1_linf_norm": 0.40286576747894287, + "layer_9_max_spectral_norm": 0.01204827893525362, + "layer_10_update_fnorm": 0.595272958278656, + "layer_10_max_l1_linf_norm": 0.40905529260635376, + "layer_10_max_spectral_norm": 0.012048418633639812, + "layer_11_update_fnorm": 0.5750659108161926, + "layer_11_max_l1_linf_norm": 0.43702271580696106, + "layer_11_max_spectral_norm": 0.012044730596244335, + "layer_12_update_fnorm": 0.5981333255767822, + "layer_12_max_l1_linf_norm": 0.43147873878479004, + "layer_12_max_spectral_norm": 0.012046289630234241, + "block0_q_update_fnorm": 0.10962200909852982, + "block0_q_max_l1_linf_norm": 0.15362875163555145, + "block0_q_max_spectral_norm": 0.012025118805468082, + "block0_k_update_fnorm": 0.13354574143886566, + "block0_k_max_l1_linf_norm": 0.17374442517757416, + "block0_k_max_spectral_norm": 0.012022956274449825, + "block0_v_update_fnorm": 0.13208888471126556, + "block0_v_max_l1_linf_norm": 0.15510399639606476, + "block0_v_max_spectral_norm": 0.012027506716549397, + "block0_o_update_fnorm": 0.1926911175251007, + "block0_o_max_l1_linf_norm": 0.1750708818435669, + "block0_o_max_spectral_norm": 0.012032200582325459, + "block0_mlp_win_update_fnorm": 0.20542334020137787, + "block0_mlp_win_max_l1_linf_norm": 0.18321499228477478, + "block0_mlp_win_max_spectral_norm": 0.012029564939439297, + "block0_mlp_wout_update_fnorm": 0.20948591828346252, + "block0_mlp_wout_max_l1_linf_norm": 0.34764501452445984, + "block0_mlp_wout_max_spectral_norm": 0.012030068784952164, + "block3_q_update_fnorm": 0.02360350452363491, + "block3_q_max_l1_linf_norm": 0.07396121323108673, + "block3_q_max_spectral_norm": 0.009864364750683308, + "block3_k_update_fnorm": 0.03628597408533096, + "block3_k_max_l1_linf_norm": 0.08252023160457611, + "block3_k_max_spectral_norm": 0.011969741433858871, + "block3_v_update_fnorm": 0.13802699744701385, + "block3_v_max_l1_linf_norm": 0.15562814474105835, + "block3_v_max_spectral_norm": 0.012027032673358917, + "block3_o_update_fnorm": 0.16132527589797974, + "block3_o_max_l1_linf_norm": 0.1389886885881424, + "block3_o_max_spectral_norm": 0.012030316516757011, + "block3_mlp_win_update_fnorm": 0.24302542209625244, + "block3_mlp_win_max_l1_linf_norm": 0.1804744303226471, + "block3_mlp_win_max_spectral_norm": 0.012034066021442413, + "block3_mlp_wout_update_fnorm": 0.2553783357143402, + "block3_mlp_wout_max_l1_linf_norm": 0.41776353120803833, + "block3_mlp_wout_max_spectral_norm": 0.012062220834195614, + "block7_q_update_fnorm": 0.23890770971775055, + "block7_q_max_l1_linf_norm": 0.20645901560783386, + "block7_q_max_spectral_norm": 0.012042424641549587, + "block7_k_update_fnorm": 0.24619613587856293, + "block7_k_max_l1_linf_norm": 0.20615124702453613, + "block7_k_max_spectral_norm": 0.012043444439768791, + "block7_v_update_fnorm": 0.2023150473833084, + "block7_v_max_l1_linf_norm": 0.21022889018058777, + "block7_v_max_spectral_norm": 0.012034633196890354, + "block7_o_update_fnorm": 0.2474278062582016, + "block7_o_max_l1_linf_norm": 0.2069755345582962, + "block7_o_max_spectral_norm": 0.012044793926179409, + "block7_mlp_win_update_fnorm": 0.27983805537223816, + "block7_mlp_win_max_l1_linf_norm": 0.1638038605451584, + "block7_mlp_win_max_spectral_norm": 0.012048245407640934, + "block7_mlp_wout_update_fnorm": 0.2425706833600998, + "block7_mlp_wout_max_l1_linf_norm": 0.40405040979385376, + "block7_mlp_wout_max_spectral_norm": 0.01137557253241539, + "block11_q_update_fnorm": 0.24755725264549255, + "block11_q_max_l1_linf_norm": 0.2118481546640396, + "block11_q_max_spectral_norm": 0.012042658403515816, + "block11_k_update_fnorm": 0.2494671642780304, + "block11_k_max_l1_linf_norm": 0.21787557005882263, + "block11_k_max_spectral_norm": 0.012045626528561115, + "block11_v_update_fnorm": 0.24477934837341309, + "block11_v_max_l1_linf_norm": 0.20630574226379395, + "block11_v_max_spectral_norm": 0.012046289630234241, + "block11_o_update_fnorm": 0.24795469641685486, + "block11_o_max_l1_linf_norm": 0.20665571093559265, + "block11_o_max_spectral_norm": 0.012045280076563358, + "block11_mlp_win_update_fnorm": 0.2345409244298935, + "block11_mlp_win_max_l1_linf_norm": 0.175563782453537, + "block11_mlp_win_max_spectral_norm": 0.011354884132742882, + "block11_mlp_wout_update_fnorm": 0.24011442065238953, + "block11_mlp_wout_max_l1_linf_norm": 0.4158802628517151, + "block11_mlp_wout_max_spectral_norm": 0.011946020647883415, + "total_sharpness": 0.0019959472119808197, + "block_total_sharpness": 0.006871203426271677, + "v_norm_block": 1.7949098348617554, + "v_T_H_v_block": 0.02213696390390396, + "v_norm": 2.236363410949707, + "ip_v_neg_g_hvp": 0.03678889200091362, + "cos_v_neg_g_hvp": 0.021001460030674934, + "g_hvp_norm": 0.7832940816879272, + "ip_v_neg_g_t": 0.0560518279671669, + "cos_v_neg_g_t": 0.02499859221279621, + "g_t_norm": 1.0026096105575562, + "g_norm": 0.7832940816879272, + "hv_norm": 20.562692642211914, + "cos_v_hv": 0.0002170758234569803, + "hg_norm": 8432.5712890625, + "cos_g_hg": 0.06206699460744858, + "v_parallel_norm": 0.004541343543678522, + "v_perp_norm": 2.236358880996704, + "embed_lm_head_v_norm": 1.33402419090271, + "embed_lm_head_cos_v_neg_g": 0.03482649475336075, + "layer_1_v_norm": 0.4131356179714203, + "layer_1_cos_v_neg_g": 0.011839277110993862, + "layer_2_v_norm": 0.32105639576911926, + "layer_2_cos_v_neg_g": 0.010448280721902847, + "layer_3_v_norm": 0.3436650037765503, + "layer_3_cos_v_neg_g": 0.019852664321660995, + "layer_4_v_norm": 0.41489389538764954, + "layer_4_cos_v_neg_g": 0.019392479211091995, + "layer_5_v_norm": 0.5236823558807373, + "layer_5_cos_v_neg_g": 0.023427171632647514, + "layer_6_v_norm": 0.5238242149353027, + "layer_6_cos_v_neg_g": 0.026435771957039833, + "layer_7_v_norm": 0.5939532518386841, + "layer_7_cos_v_neg_g": 0.02542773447930813, + "layer_8_v_norm": 0.5976043939590454, + "layer_8_cos_v_neg_g": 0.025842010974884033, + "layer_9_v_norm": 0.5969422459602356, + "layer_9_cos_v_neg_g": 0.027259454131126404, + "layer_10_v_norm": 0.595272958278656, + "layer_10_cos_v_neg_g": 0.02930011786520481, + "layer_11_v_norm": 0.5750658512115479, + "layer_11_cos_v_neg_g": 0.038004402071237564, + "layer_12_v_norm": 0.5981333255767822, + "layer_12_cos_v_neg_g": 0.07223600149154663, + "block0_q_v_norm": 0.10962200909852982, + "block0_q_cos_v_neg_g": -0.026124371215701103, + "block0_k_v_norm": 0.13354574143886566, + "block0_k_cos_v_neg_g": -0.012296230532228947, + "block0_v_v_norm": 0.13208888471126556, + "block0_v_cos_v_neg_g": 0.016794627532362938, + "block0_o_v_norm": 0.1926911175251007, + "block0_o_cos_v_neg_g": 0.017696470022201538, + "block0_mlp_win_v_norm": 0.20542334020137787, + "block0_mlp_win_cos_v_neg_g": 0.020625216886401176, + "block0_mlp_wout_v_norm": 0.20948591828346252, + "block0_mlp_wout_cos_v_neg_g": 0.021188631653785706, + "block3_q_v_norm": 0.02360350452363491, + "block3_q_cos_v_neg_g": 0.009714355692267418, + "block3_k_v_norm": 0.03628597408533096, + "block3_k_cos_v_neg_g": 0.03440115600824356, + "block3_v_v_norm": 0.13802699744701385, + "block3_v_cos_v_neg_g": 0.0246327705681324, + "block3_o_v_norm": 0.16132527589797974, + "block3_o_cos_v_neg_g": 0.01889100670814514, + "block3_mlp_win_v_norm": 0.24302542209625244, + "block3_mlp_win_cos_v_neg_g": 0.02219647355377674, + "block3_mlp_wout_v_norm": 0.2553783357143402, + "block3_mlp_wout_cos_v_neg_g": 0.061742037534713745, + "block7_q_v_norm": 0.23890770971775055, + "block7_q_cos_v_neg_g": 0.02989717200398445, + "block7_k_v_norm": 0.24619613587856293, + "block7_k_cos_v_neg_g": 0.0724010244011879, + "block7_v_v_norm": 0.2023150473833084, + "block7_v_cos_v_neg_g": 0.024630259722471237, + "block7_o_v_norm": 0.2474278062582016, + "block7_o_cos_v_neg_g": 0.07348917424678802, + "block7_mlp_win_v_norm": 0.27983805537223816, + "block7_mlp_win_cos_v_neg_g": 0.030961690470576286, + "block7_mlp_wout_v_norm": 0.2425706833600998, + "block7_mlp_wout_cos_v_neg_g": 0.11973635852336884, + "block11_q_v_norm": 0.24755725264549255, + "block11_q_cos_v_neg_g": 0.07238404452800751, + "block11_k_v_norm": 0.2494671642780304, + "block11_k_cos_v_neg_g": 0.09855788946151733, + "block11_v_v_norm": 0.24477934837341309, + "block11_v_cos_v_neg_g": 0.0407756082713604, + "block11_o_v_norm": 0.24795469641685486, + "block11_o_cos_v_neg_g": 0.0770469382405281, + "block11_mlp_win_v_norm": 0.2345409244298935, + "block11_mlp_win_cos_v_neg_g": 0.10263744741678238, + "block11_mlp_wout_v_norm": 0.24011442065238953, + "block11_mlp_wout_cos_v_neg_g": 0.08897168189287186, + "embed_lm_head_sharpness": -0.001781057333573699, + "layer_1_sharpness": -0.048529043793678284, + "layer_2_sharpness": 0.021776361390948296, + "layer_3_sharpness": 0.007469121366739273, + "layer_4_sharpness": 0.0025706905871629715, + "layer_5_sharpness": 0.0015950626693665981, + "layer_6_sharpness": 0.001605150755494833, + "layer_7_sharpness": 0.0011659784941002727, + "layer_8_sharpness": 0.0009306250722147524, + "layer_9_sharpness": 0.0008244090713560581, + "layer_10_sharpness": 0.00047693532542325556, + "layer_11_sharpness": 0.000516515807248652, + "layer_12_sharpness": 0.001278223586268723, + "block0_q_sharpness": -1.3279480934143066, + "block0_k_sharpness": -0.034926604479551315, + "block0_v_sharpness": 0.2540513873100281, + "block0_o_sharpness": 0.00797764677554369, + "block0_mlp_win_sharpness": 0.010743722319602966, + "block0_mlp_wout_sharpness": 0.008588174358010292, + "block3_q_sharpness": 0.0003093705454375595, + "block3_k_sharpness": 0.0287702064961195, + "block3_v_sharpness": 0.0060824924148619175, + "block3_o_sharpness": 0.0017256834544241428, + "block3_mlp_win_sharpness": 0.00022411368263419718, + "block3_mlp_wout_sharpness": 9.056537237484008e-05, + "block7_q_sharpness": 0.00011019262456102297, + "block7_k_sharpness": 7.250784983625636e-05, + "block7_v_sharpness": 0.0026841692160815, + "block7_o_sharpness": 5.39283464604523e-05, + "block7_mlp_win_sharpness": 0.00042018553358502686, + "block7_mlp_wout_sharpness": 7.920752977952361e-05, + "block11_q_sharpness": 0.00011508457828313112, + "block11_k_sharpness": 5.5569315009051934e-05, + "block11_v_sharpness": 0.00010345132614020258, + "block11_o_sharpness": 2.8429773010429926e-05, + "block11_mlp_win_sharpness": 0.0011415977496653795, + "block11_mlp_wout_sharpness": 0.002062734682112932, + "sum_layer_numerators": -0.0020012342547386405, + "block_diag_sharpness": -0.0006211731190737545, + "cross_layer_sharpness": 0.007492376545345431 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..c99eee07d990c515a84f0732af4bcf0f70e3c970 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_7500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.205749034881592, + "total_l1_linf_norm": 18425.923828125, + "total_spectral_norm": 2.2057485580444336, + "embed_lm_head_update_fnorm": 1.3210457563400269, + "embed_lm_head_max_l1_linf_norm": 0.3849870562553406, + "embed_lm_head_max_spectral_norm": 0.19317588210105896, + "layer_1_update_fnorm": 0.4901193380355835, + "layer_1_max_l1_linf_norm": 0.33450332283973694, + "layer_1_max_spectral_norm": 0.012041540816426277, + "layer_2_update_fnorm": 0.24422216415405273, + "layer_2_max_l1_linf_norm": 0.4682219326496124, + "layer_2_max_spectral_norm": 0.01439762208610773, + "layer_3_update_fnorm": 0.29535382986068726, + "layer_3_max_l1_linf_norm": 0.4582933485507965, + "layer_3_max_spectral_norm": 0.01202792301774025, + "layer_4_update_fnorm": 0.4024772644042969, + "layer_4_max_l1_linf_norm": 0.5494058132171631, + "layer_4_max_spectral_norm": 0.020375931635499, + "layer_5_update_fnorm": 0.48260873556137085, + "layer_5_max_l1_linf_norm": 0.4262930750846863, + "layer_5_max_spectral_norm": 0.013246062211692333, + "layer_6_update_fnorm": 0.5021575093269348, + "layer_6_max_l1_linf_norm": 0.3976183235645294, + "layer_6_max_spectral_norm": 0.012684985063970089, + "layer_7_update_fnorm": 0.5881430506706238, + "layer_7_max_l1_linf_norm": 0.3969215154647827, + "layer_7_max_spectral_norm": 0.012047354131937027, + "layer_8_update_fnorm": 0.5926426649093628, + "layer_8_max_l1_linf_norm": 0.3997071385383606, + "layer_8_max_spectral_norm": 0.01204836368560791, + "layer_9_update_fnorm": 0.5928044319152832, + "layer_9_max_l1_linf_norm": 0.4016689658164978, + "layer_9_max_spectral_norm": 0.012047254480421543, + "layer_10_update_fnorm": 0.5938267707824707, + "layer_10_max_l1_linf_norm": 0.41139352321624756, + "layer_10_max_spectral_norm": 0.012056158855557442, + "layer_11_update_fnorm": 0.5724133849143982, + "layer_11_max_l1_linf_norm": 0.43849366903305054, + "layer_11_max_spectral_norm": 0.012044121511280537, + "layer_12_update_fnorm": 0.5976343154907227, + "layer_12_max_l1_linf_norm": 0.4413851201534271, + "layer_12_max_spectral_norm": 0.012044056318700314, + "block0_q_update_fnorm": 0.23067811131477356, + "block0_q_max_l1_linf_norm": 0.20979806780815125, + "block0_q_max_spectral_norm": 0.012041540816426277, + "block0_k_update_fnorm": 0.2259465605020523, + "block0_k_max_l1_linf_norm": 0.2086479514837265, + "block0_k_max_spectral_norm": 0.012038804590702057, + "block0_v_update_fnorm": 0.14769797027111053, + "block0_v_max_l1_linf_norm": 0.15787744522094727, + "block0_v_max_spectral_norm": 0.012026848271489143, + "block0_o_update_fnorm": 0.18880467116832733, + "block0_o_max_l1_linf_norm": 0.17881885170936584, + "block0_o_max_spectral_norm": 0.012035205960273743, + "block0_mlp_win_update_fnorm": 0.20219670236110687, + "block0_mlp_win_max_l1_linf_norm": 0.1897139549255371, + "block0_mlp_win_max_spectral_norm": 0.012027605436742306, + "block0_mlp_wout_update_fnorm": 0.1935444325208664, + "block0_mlp_wout_max_l1_linf_norm": 0.3204144835472107, + "block0_mlp_wout_max_spectral_norm": 0.01202885527163744, + "block3_q_update_fnorm": 0.027422260493040085, + "block3_q_max_l1_linf_norm": 0.06929637491703033, + "block3_q_max_spectral_norm": 0.009568738751113415, + "block3_k_update_fnorm": 0.03762191906571388, + "block3_k_max_l1_linf_norm": 0.0770949199795723, + "block3_k_max_spectral_norm": 0.011999977752566338, + "block3_v_update_fnorm": 0.12034924328327179, + "block3_v_max_l1_linf_norm": 0.14951030910015106, + "block3_v_max_spectral_norm": 0.01202565897256136, + "block3_o_update_fnorm": 0.15127721428871155, + "block3_o_max_l1_linf_norm": 0.1329393833875656, + "block3_o_max_spectral_norm": 0.012029295787215233, + "block3_mlp_win_update_fnorm": 0.2310045063495636, + "block3_mlp_win_max_l1_linf_norm": 0.17635229229927063, + "block3_mlp_win_max_spectral_norm": 0.012032490223646164, + "block3_mlp_wout_update_fnorm": 0.2612015902996063, + "block3_mlp_wout_max_l1_linf_norm": 0.42872440814971924, + "block3_mlp_wout_max_spectral_norm": 0.012045293115079403, + "block7_q_update_fnorm": 0.23821403086185455, + "block7_q_max_l1_linf_norm": 0.20582422614097595, + "block7_q_max_spectral_norm": 0.012037904933094978, + "block7_k_update_fnorm": 0.24653729796409607, + "block7_k_max_l1_linf_norm": 0.20684847235679626, + "block7_k_max_spectral_norm": 0.012044711038470268, + "block7_v_update_fnorm": 0.1888570487499237, + "block7_v_max_l1_linf_norm": 0.20786917209625244, + "block7_v_max_spectral_norm": 0.012032241560518742, + "block7_o_update_fnorm": 0.24736391007900238, + "block7_o_max_l1_linf_norm": 0.207525372505188, + "block7_o_max_spectral_norm": 0.01204836368560791, + "block7_mlp_win_update_fnorm": 0.27997010946273804, + "block7_mlp_win_max_l1_linf_norm": 0.16264595091342926, + "block7_mlp_win_max_spectral_norm": 0.012046554125845432, + "block7_mlp_wout_update_fnorm": 0.24144214391708374, + "block7_mlp_wout_max_l1_linf_norm": 0.3997071385383606, + "block7_mlp_wout_max_spectral_norm": 0.01138946320861578, + "block11_q_update_fnorm": 0.24725192785263062, + "block11_q_max_l1_linf_norm": 0.21290883421897888, + "block11_q_max_spectral_norm": 0.012041980400681496, + "block11_k_update_fnorm": 0.24894979596138, + "block11_k_max_l1_linf_norm": 0.21445584297180176, + "block11_k_max_spectral_norm": 0.012041829526424408, + "block11_v_update_fnorm": 0.24456694722175598, + "block11_v_max_l1_linf_norm": 0.2047712802886963, + "block11_v_max_spectral_norm": 0.012044056318700314, + "block11_o_update_fnorm": 0.24812725186347961, + "block11_o_max_l1_linf_norm": 0.20766563713550568, + "block11_o_max_spectral_norm": 0.012043734081089497, + "block11_mlp_win_update_fnorm": 0.2346228063106537, + "block11_mlp_win_max_l1_linf_norm": 0.17299845814704895, + "block11_mlp_win_max_spectral_norm": 0.01136542484164238, + "block11_mlp_wout_update_fnorm": 0.23965169489383698, + "block11_mlp_wout_max_l1_linf_norm": 0.4059234857559204, + "block11_mlp_wout_max_spectral_norm": 0.011848492547869682, + "total_sharpness": 0.010981273837387562, + "block_total_sharpness": 0.013671724125742912, + "v_norm_block": 1.7663987874984741, + "v_T_H_v_block": 0.04265803471207619, + "v_norm": 2.205749034881592, + "ip_v_neg_g_hvp": 0.04991939663887024, + "cos_v_neg_g_hvp": 0.020104916766285896, + "g_hvp_norm": 1.1256695985794067, + "ip_v_neg_g_t": 0.0606534481048584, + "cos_v_neg_g_t": 0.022234991192817688, + "g_t_norm": 1.236694574356079, + "g_norm": 1.1256695985794067, + "hv_norm": 9.525896072387695, + "cos_v_hv": 0.0025427457876503468, + "hg_norm": 4615.84228515625, + "cos_g_hg": -0.015396101400256157, + "v_parallel_norm": 0.004159438423812389, + "v_perp_norm": 2.205744981765747, + "embed_lm_head_v_norm": 1.3210457563400269, + "embed_lm_head_cos_v_neg_g": 0.021332595497369766, + "layer_1_v_norm": 0.4901193380355835, + "layer_1_cos_v_neg_g": 0.015390599146485329, + "layer_2_v_norm": 0.24422216415405273, + "layer_2_cos_v_neg_g": 0.031871333718299866, + "layer_3_v_norm": 0.29535382986068726, + "layer_3_cos_v_neg_g": 0.030111320316791534, + "layer_4_v_norm": 0.4024772644042969, + "layer_4_cos_v_neg_g": 0.021925799548625946, + "layer_5_v_norm": 0.48260873556137085, + "layer_5_cos_v_neg_g": 0.02279578149318695, + "layer_6_v_norm": 0.5021575093269348, + "layer_6_cos_v_neg_g": 0.025415997952222824, + "layer_7_v_norm": 0.5881430506706238, + "layer_7_cos_v_neg_g": 0.023901069536805153, + "layer_8_v_norm": 0.5926426649093628, + "layer_8_cos_v_neg_g": 0.02587970346212387, + "layer_9_v_norm": 0.5928044319152832, + "layer_9_cos_v_neg_g": 0.0260454211384058, + "layer_10_v_norm": 0.5938267707824707, + "layer_10_cos_v_neg_g": 0.028562873601913452, + "layer_11_v_norm": 0.5724133849143982, + "layer_11_cos_v_neg_g": 0.036999572068452835, + "layer_12_v_norm": 0.5976343154907227, + "layer_12_cos_v_neg_g": 0.06762737780809402, + "block0_q_v_norm": 0.23067811131477356, + "block0_q_cos_v_neg_g": 0.026855070143938065, + "block0_k_v_norm": 0.2259465605020523, + "block0_k_cos_v_neg_g": 0.02604105696082115, + "block0_v_v_norm": 0.14769797027111053, + "block0_v_cos_v_neg_g": 0.02237716317176819, + "block0_o_v_norm": 0.18880467116832733, + "block0_o_cos_v_neg_g": 0.03660236671566963, + "block0_mlp_win_v_norm": 0.20219670236110687, + "block0_mlp_win_cos_v_neg_g": 0.023384174332022667, + "block0_mlp_wout_v_norm": 0.1935444325208664, + "block0_mlp_wout_cos_v_neg_g": 0.0164785198867321, + "block3_q_v_norm": 0.027422260493040085, + "block3_q_cos_v_neg_g": 0.036471929401159286, + "block3_k_v_norm": 0.03762191906571388, + "block3_k_cos_v_neg_g": 0.11675223708152771, + "block3_v_v_norm": 0.12034924328327179, + "block3_v_cos_v_neg_g": 0.03509406000375748, + "block3_o_v_norm": 0.15127721428871155, + "block3_o_cos_v_neg_g": 0.022538475692272186, + "block3_mlp_win_v_norm": 0.2310045063495636, + "block3_mlp_win_cos_v_neg_g": 0.020553529262542725, + "block3_mlp_wout_v_norm": 0.2612015902996063, + "block3_mlp_wout_cos_v_neg_g": 0.05579646676778793, + "block7_q_v_norm": 0.23821403086185455, + "block7_q_cos_v_neg_g": 0.03000497817993164, + "block7_k_v_norm": 0.24653729796409607, + "block7_k_cos_v_neg_g": 0.08241478353738785, + "block7_v_v_norm": 0.1888570487499237, + "block7_v_cos_v_neg_g": 0.028518598526716232, + "block7_o_v_norm": 0.24736391007900238, + "block7_o_cos_v_neg_g": 0.07795565575361252, + "block7_mlp_win_v_norm": 0.27997010946273804, + "block7_mlp_win_cos_v_neg_g": 0.03118978813290596, + "block7_mlp_wout_v_norm": 0.24144214391708374, + "block7_mlp_wout_cos_v_neg_g": 0.12098751217126846, + "block11_q_v_norm": 0.24725192785263062, + "block11_q_cos_v_neg_g": 0.07795773446559906, + "block11_k_v_norm": 0.24894979596138, + "block11_k_cos_v_neg_g": 0.10117557644844055, + "block11_v_v_norm": 0.24456694722175598, + "block11_v_cos_v_neg_g": 0.039843715727329254, + "block11_o_v_norm": 0.24812725186347961, + "block11_o_cos_v_neg_g": 0.07454061508178711, + "block11_mlp_win_v_norm": 0.2346228063106537, + "block11_mlp_win_cos_v_neg_g": 0.09393474459648132, + "block11_mlp_wout_v_norm": 0.23965169489383698, + "block11_mlp_wout_cos_v_neg_g": 0.08115965127944946, + "embed_lm_head_sharpness": 0.001393211423419416, + "layer_1_sharpness": 0.026452986523509026, + "layer_2_sharpness": 0.018036644905805588, + "layer_3_sharpness": 0.008912838995456696, + "layer_4_sharpness": 0.004586874973028898, + "layer_5_sharpness": 0.0022986747790127993, + "layer_6_sharpness": 0.002197306603193283, + "layer_7_sharpness": 0.0018083874601870775, + "layer_8_sharpness": 0.0016506233951076865, + "layer_9_sharpness": 0.001027487451210618, + "layer_10_sharpness": 0.0006398452678695321, + "layer_11_sharpness": 0.0005700920009985566, + "layer_12_sharpness": 0.0003725049609784037, + "block0_q_sharpness": 0.009760061278939247, + "block0_k_sharpness": 0.04447953775525093, + "block0_v_sharpness": 0.029674287885427475, + "block0_o_sharpness": 0.006952345371246338, + "block0_mlp_win_sharpness": 0.0020633675158023834, + "block0_mlp_wout_sharpness": 0.0021790345199406147, + "block3_q_sharpness": 0.0004826273361686617, + "block3_k_sharpness": 0.019318077713251114, + "block3_v_sharpness": 0.01359629537910223, + "block3_o_sharpness": 0.003132144222036004, + "block3_mlp_win_sharpness": 0.00034215961932204664, + "block3_mlp_wout_sharpness": 0.00012464959581848234, + "block7_q_sharpness": 8.740647172089666e-05, + "block7_k_sharpness": 8.972011710284278e-05, + "block7_v_sharpness": 0.004637368954718113, + "block7_o_sharpness": 8.48182025947608e-05, + "block7_mlp_win_sharpness": 0.0006277624052017927, + "block7_mlp_wout_sharpness": 0.00010693260992411524, + "block11_q_sharpness": 7.027309766272083e-05, + "block11_k_sharpness": 5.344077362678945e-05, + "block11_v_sharpness": 0.00011509757314343005, + "block11_o_sharpness": 3.616850153775886e-05, + "block11_mlp_win_sharpness": 0.00024112379469443113, + "block11_mlp_wout_sharpness": 0.00042493731598369777, + "sum_layer_numerators": 0.012152057537050488, + "block_diag_sharpness": 0.003894684671186986, + "cross_layer_sharpness": 0.009777039454555925 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..e119f7ae7f9d1ed2e0295c0113243e2239f7b230 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2277183532714844, + "total_l1_linf_norm": 18671.26171875, + "total_spectral_norm": 2.2277188301086426, + "embed_lm_head_update_fnorm": 1.3393176794052124, + "embed_lm_head_max_l1_linf_norm": 0.36396509408950806, + "embed_lm_head_max_spectral_norm": 0.2129865288734436, + "layer_1_update_fnorm": 0.4717259407043457, + "layer_1_max_l1_linf_norm": 0.37436702847480774, + "layer_1_max_spectral_norm": 0.012036980129778385, + "layer_2_update_fnorm": 0.2594015598297119, + "layer_2_max_l1_linf_norm": 0.5447345972061157, + "layer_2_max_spectral_norm": 0.016172727569937706, + "layer_3_update_fnorm": 0.3177007734775543, + "layer_3_max_l1_linf_norm": 0.4901943504810333, + "layer_3_max_spectral_norm": 0.01202829834073782, + "layer_4_update_fnorm": 0.4103156626224518, + "layer_4_max_l1_linf_norm": 0.5639572143554688, + "layer_4_max_spectral_norm": 0.02162119373679161, + "layer_5_update_fnorm": 0.49931207299232483, + "layer_5_max_l1_linf_norm": 0.4169215261936188, + "layer_5_max_spectral_norm": 0.013355696573853493, + "layer_6_update_fnorm": 0.5268954038619995, + "layer_6_max_l1_linf_norm": 0.41022753715515137, + "layer_6_max_spectral_norm": 0.012042254209518433, + "layer_7_update_fnorm": 0.5880352854728699, + "layer_7_max_l1_linf_norm": 0.39901071786880493, + "layer_7_max_spectral_norm": 0.01204910222440958, + "layer_8_update_fnorm": 0.59295654296875, + "layer_8_max_l1_linf_norm": 0.39754414558410645, + "layer_8_max_spectral_norm": 0.012050686404109001, + "layer_9_update_fnorm": 0.5922491550445557, + "layer_9_max_l1_linf_norm": 0.40106165409088135, + "layer_9_max_spectral_norm": 0.012045399285852909, + "layer_10_update_fnorm": 0.5941154360771179, + "layer_10_max_l1_linf_norm": 0.4297311305999756, + "layer_10_max_spectral_norm": 0.012051560916006565, + "layer_11_update_fnorm": 0.568894624710083, + "layer_11_max_l1_linf_norm": 0.4665752053260803, + "layer_11_max_spectral_norm": 0.012041169218719006, + "layer_12_update_fnorm": 0.5984680652618408, + "layer_12_max_l1_linf_norm": 0.457769513130188, + "layer_12_max_spectral_norm": 0.012044153176248074, + "block0_q_update_fnorm": 0.21620656549930573, + "block0_q_max_l1_linf_norm": 0.2048015147447586, + "block0_q_max_spectral_norm": 0.012035520747303963, + "block0_k_update_fnorm": 0.2208237498998642, + "block0_k_max_l1_linf_norm": 0.2072143852710724, + "block0_k_max_spectral_norm": 0.012036980129778385, + "block0_v_update_fnorm": 0.14219754934310913, + "block0_v_max_l1_linf_norm": 0.16062122583389282, + "block0_v_max_spectral_norm": 0.012030924670398235, + "block0_o_update_fnorm": 0.1788071244955063, + "block0_o_max_l1_linf_norm": 0.16929370164871216, + "block0_o_max_spectral_norm": 0.012029617093503475, + "block0_mlp_win_update_fnorm": 0.1946556270122528, + "block0_mlp_win_max_l1_linf_norm": 0.19324801862239838, + "block0_mlp_win_max_spectral_norm": 0.01202764455229044, + "block0_mlp_wout_update_fnorm": 0.19171825051307678, + "block0_mlp_wout_max_l1_linf_norm": 0.31739556789398193, + "block0_mlp_wout_max_spectral_norm": 0.012029687874019146, + "block3_q_update_fnorm": 0.04186524450778961, + "block3_q_max_l1_linf_norm": 0.0926678329706192, + "block3_q_max_spectral_norm": 0.011914068832993507, + "block3_k_update_fnorm": 0.04360460117459297, + "block3_k_max_l1_linf_norm": 0.11497433483600616, + "block3_k_max_spectral_norm": 0.011327549815177917, + "block3_v_update_fnorm": 0.125307098031044, + "block3_v_max_l1_linf_norm": 0.14219553768634796, + "block3_v_max_spectral_norm": 0.012027000077068806, + "block3_o_update_fnorm": 0.15643088519573212, + "block3_o_max_l1_linf_norm": 0.13497978448867798, + "block3_o_max_spectral_norm": 0.012027631513774395, + "block3_mlp_win_update_fnorm": 0.22737522423267365, + "block3_mlp_win_max_l1_linf_norm": 0.17508439719676971, + "block3_mlp_win_max_spectral_norm": 0.012033136561512947, + "block3_mlp_wout_update_fnorm": 0.26806050539016724, + "block3_mlp_wout_max_l1_linf_norm": 0.4438422918319702, + "block3_mlp_wout_max_spectral_norm": 0.012047363445162773, + "block7_q_update_fnorm": 0.23967507481575012, + "block7_q_max_l1_linf_norm": 0.20812633633613586, + "block7_q_max_spectral_norm": 0.01203955989331007, + "block7_k_update_fnorm": 0.24649131298065186, + "block7_k_max_l1_linf_norm": 0.2069399356842041, + "block7_k_max_spectral_norm": 0.012046683579683304, + "block7_v_update_fnorm": 0.18730561435222626, + "block7_v_max_l1_linf_norm": 0.20396658778190613, + "block7_v_max_spectral_norm": 0.012031559832394123, + "block7_o_update_fnorm": 0.2478131502866745, + "block7_o_max_l1_linf_norm": 0.2073676735162735, + "block7_o_max_spectral_norm": 0.012043963186442852, + "block7_mlp_win_update_fnorm": 0.2803235352039337, + "block7_mlp_win_max_l1_linf_norm": 0.16809295117855072, + "block7_mlp_win_max_spectral_norm": 0.012050686404109001, + "block7_mlp_wout_update_fnorm": 0.24116496741771698, + "block7_mlp_wout_max_l1_linf_norm": 0.39754414558410645, + "block7_mlp_wout_max_spectral_norm": 0.011393524706363678, + "block11_q_update_fnorm": 0.24738538265228271, + "block11_q_max_l1_linf_norm": 0.21527011692523956, + "block11_q_max_spectral_norm": 0.012040618807077408, + "block11_k_update_fnorm": 0.24991855025291443, + "block11_k_max_l1_linf_norm": 0.214562326669693, + "block11_k_max_spectral_norm": 0.012038908898830414, + "block11_v_update_fnorm": 0.24393543601036072, + "block11_v_max_l1_linf_norm": 0.20724627375602722, + "block11_v_max_spectral_norm": 0.012044153176248074, + "block11_o_update_fnorm": 0.24763864278793335, + "block11_o_max_l1_linf_norm": 0.20954535901546478, + "block11_o_max_spectral_norm": 0.012042660266160965, + "block11_mlp_win_update_fnorm": 0.23492056131362915, + "block11_mlp_win_max_l1_linf_norm": 0.1670066863298416, + "block11_mlp_win_max_spectral_norm": 0.011352820321917534, + "block11_mlp_wout_update_fnorm": 0.24140618741512299, + "block11_mlp_wout_max_l1_linf_norm": 0.4218955934047699, + "block11_mlp_wout_max_spectral_norm": 0.012007879093289375, + "total_sharpness": -0.002888475777581334, + "block_total_sharpness": 0.002872607670724392, + "v_norm_block": 1.7801570892333984, + "v_T_H_v_block": 0.00910317711532116, + "v_norm": 2.2277183532714844, + "ip_v_neg_g_hvp": 0.03599138185381889, + "cos_v_neg_g_hvp": 0.01779319904744625, + "g_hvp_norm": 0.9079965949058533, + "ip_v_neg_g_t": 0.04955805465579033, + "cos_v_neg_g_t": 0.023088844493031502, + "g_t_norm": 0.9635000824928284, + "g_norm": 0.9079965949058533, + "hv_norm": 10.121729850769043, + "cos_v_hv": -0.0006357323727570474, + "hg_norm": 10980.0498046875, + "cos_g_hg": -0.19893479347229004, + "v_parallel_norm": 0.004637453705072403, + "v_perp_norm": 2.2277135848999023, + "embed_lm_head_v_norm": 1.3393176794052124, + "embed_lm_head_cos_v_neg_g": 0.021014539524912834, + "layer_1_v_norm": 0.4717259407043457, + "layer_1_cos_v_neg_g": 0.0062191118486225605, + "layer_2_v_norm": 0.2594015598297119, + "layer_2_cos_v_neg_g": 0.014710546471178532, + "layer_3_v_norm": 0.31770074367523193, + "layer_3_cos_v_neg_g": 0.02158912643790245, + "layer_4_v_norm": 0.4103156626224518, + "layer_4_cos_v_neg_g": 0.018595706671476364, + "layer_5_v_norm": 0.49931207299232483, + "layer_5_cos_v_neg_g": 0.022233497351408005, + "layer_6_v_norm": 0.5268954634666443, + "layer_6_cos_v_neg_g": 0.022984949871897697, + "layer_7_v_norm": 0.5880352854728699, + "layer_7_cos_v_neg_g": 0.02316301316022873, + "layer_8_v_norm": 0.59295654296875, + "layer_8_cos_v_neg_g": 0.02511422522366047, + "layer_9_v_norm": 0.5922491550445557, + "layer_9_cos_v_neg_g": 0.025845887139439583, + "layer_10_v_norm": 0.5941154360771179, + "layer_10_cos_v_neg_g": 0.028596272692084312, + "layer_11_v_norm": 0.568894624710083, + "layer_11_cos_v_neg_g": 0.03709269315004349, + "layer_12_v_norm": 0.5984680652618408, + "layer_12_cos_v_neg_g": 0.06171426549553871, + "block0_q_v_norm": 0.21620656549930573, + "block0_q_cos_v_neg_g": 0.017540626227855682, + "block0_k_v_norm": 0.2208237498998642, + "block0_k_cos_v_neg_g": 0.014040244743227959, + "block0_v_v_norm": 0.14219754934310913, + "block0_v_cos_v_neg_g": 0.008157355710864067, + "block0_o_v_norm": 0.1788071244955063, + "block0_o_cos_v_neg_g": 0.012482322752475739, + "block0_mlp_win_v_norm": 0.1946556270122528, + "block0_mlp_win_cos_v_neg_g": 0.011494812555611134, + "block0_mlp_wout_v_norm": 0.19171825051307678, + "block0_mlp_wout_cos_v_neg_g": 0.012728830799460411, + "block3_q_v_norm": 0.04186524450778961, + "block3_q_cos_v_neg_g": 0.02586987614631653, + "block3_k_v_norm": 0.04360460117459297, + "block3_k_cos_v_neg_g": 0.046725936233997345, + "block3_v_v_norm": 0.125307098031044, + "block3_v_cos_v_neg_g": 0.021337855607271194, + "block3_o_v_norm": 0.15643088519573212, + "block3_o_cos_v_neg_g": 0.0201756302267313, + "block3_mlp_win_v_norm": 0.22737522423267365, + "block3_mlp_win_cos_v_neg_g": 0.021273886784911156, + "block3_mlp_wout_v_norm": 0.26806050539016724, + "block3_mlp_wout_cos_v_neg_g": 0.04053477570414543, + "block7_q_v_norm": 0.23967507481575012, + "block7_q_cos_v_neg_g": 0.0309995599091053, + "block7_k_v_norm": 0.24649131298065186, + "block7_k_cos_v_neg_g": 0.0775618702173233, + "block7_v_v_norm": 0.18730561435222626, + "block7_v_cos_v_neg_g": 0.0268729068338871, + "block7_o_v_norm": 0.2478131502866745, + "block7_o_cos_v_neg_g": 0.07498816400766373, + "block7_mlp_win_v_norm": 0.2803235352039337, + "block7_mlp_win_cos_v_neg_g": 0.029538964852690697, + "block7_mlp_wout_v_norm": 0.24116496741771698, + "block7_mlp_wout_cos_v_neg_g": 0.11576412618160248, + "block11_q_v_norm": 0.24738538265228271, + "block11_q_cos_v_neg_g": 0.06838521361351013, + "block11_k_v_norm": 0.24991855025291443, + "block11_k_cos_v_neg_g": 0.09382828325033188, + "block11_v_v_norm": 0.24393543601036072, + "block11_v_cos_v_neg_g": 0.03881552815437317, + "block11_o_v_norm": 0.24763864278793335, + "block11_o_cos_v_neg_g": 0.07625719159841537, + "block11_mlp_win_v_norm": 0.23492056131362915, + "block11_mlp_win_cos_v_neg_g": 0.08271990716457367, + "block11_mlp_wout_v_norm": 0.24140618741512299, + "block11_mlp_wout_cos_v_neg_g": 0.07435843348503113, + "embed_lm_head_sharpness": -0.003802958410233259, + "layer_1_sharpness": -0.031364887952804565, + "layer_2_sharpness": 0.013258403167128563, + "layer_3_sharpness": 0.004037151113152504, + "layer_4_sharpness": 0.0027536461129784584, + "layer_5_sharpness": 0.0016113772289827466, + "layer_6_sharpness": 0.0018802734557539225, + "layer_7_sharpness": 0.0013951477594673634, + "layer_8_sharpness": 0.0013018377358093858, + "layer_9_sharpness": 0.0010025714291259646, + "layer_10_sharpness": 0.0005183388711884618, + "layer_11_sharpness": 0.0006456582923419774, + "layer_12_sharpness": 0.0006782735581509769, + "block0_q_sharpness": -0.0019895348232239485, + "block0_k_sharpness": -0.004834478721022606, + "block0_v_sharpness": 0.09489873051643372, + "block0_o_sharpness": -0.088522769510746, + "block0_mlp_win_sharpness": -0.00488132843747735, + "block0_mlp_wout_sharpness": -0.005334155168384314, + "block3_q_sharpness": -8.669101225677878e-05, + "block3_k_sharpness": 0.012279500253498554, + "block3_v_sharpness": 0.007912498898804188, + "block3_o_sharpness": 0.0020665351767092943, + "block3_mlp_win_sharpness": 0.00014000560622662306, + "block3_mlp_wout_sharpness": 6.80520897731185e-05, + "block7_q_sharpness": 8.973899821285158e-05, + "block7_k_sharpness": 6.823633884778246e-05, + "block7_v_sharpness": 0.004211935680359602, + "block7_o_sharpness": 6.471676897490397e-05, + "block7_mlp_win_sharpness": 0.0005219109007157385, + "block7_mlp_wout_sharpness": 8.544050069758669e-05, + "block11_q_sharpness": 6.954657874302939e-05, + "block11_k_sharpness": 3.552521229721606e-05, + "block11_v_sharpness": 9.938576113199815e-05, + "block11_o_sharpness": 4.556926432996988e-05, + "block11_mlp_win_sharpness": 0.0004049900744576007, + "block11_mlp_wout_sharpness": 0.0012706306297332048, + "sum_layer_numerators": -0.002365854302950904, + "block_diag_sharpness": -0.0007465713841957091, + "cross_layer_sharpness": 0.003619179054920101 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..84cad2f299af93e7298b9587c57708f3969e06e7 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_8500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.676390528678894, + "total_l1_linf_norm": 14046.38671875, + "total_spectral_norm": 1.6763906478881836, + "embed_lm_head_update_fnorm": 1.0118887424468994, + "embed_lm_head_max_l1_linf_norm": 0.32790136337280273, + "embed_lm_head_max_spectral_norm": 0.16852208971977234, + "layer_1_update_fnorm": 0.3468776047229767, + "layer_1_max_l1_linf_norm": 0.2949081063270569, + "layer_1_max_spectral_norm": 0.009036270901560783, + "layer_2_update_fnorm": 0.21767404675483704, + "layer_2_max_l1_linf_norm": 0.39444392919540405, + "layer_2_max_spectral_norm": 0.009166402742266655, + "layer_3_update_fnorm": 0.2482808381319046, + "layer_3_max_l1_linf_norm": 0.35021454095840454, + "layer_3_max_spectral_norm": 0.009876622818410397, + "layer_4_update_fnorm": 0.3043330907821655, + "layer_4_max_l1_linf_norm": 0.36488276720046997, + "layer_4_max_spectral_norm": 0.015092904679477215, + "layer_5_update_fnorm": 0.37873896956443787, + "layer_5_max_l1_linf_norm": 0.3556138277053833, + "layer_5_max_spectral_norm": 0.011004927568137646, + "layer_6_update_fnorm": 0.3933950662612915, + "layer_6_max_l1_linf_norm": 0.3397219181060791, + "layer_6_max_spectral_norm": 0.009036432020366192, + "layer_7_update_fnorm": 0.4381020665168762, + "layer_7_max_l1_linf_norm": 0.3161046802997589, + "layer_7_max_spectral_norm": 0.009039330296218395, + "layer_8_update_fnorm": 0.44187086820602417, + "layer_8_max_l1_linf_norm": 0.29976022243499756, + "layer_8_max_spectral_norm": 0.009042469784617424, + "layer_9_update_fnorm": 0.44367143511772156, + "layer_9_max_l1_linf_norm": 0.3112197518348694, + "layer_9_max_spectral_norm": 0.00904158130288124, + "layer_10_update_fnorm": 0.4444149136543274, + "layer_10_max_l1_linf_norm": 0.32315850257873535, + "layer_10_max_spectral_norm": 0.009046158753335476, + "layer_11_update_fnorm": 0.4285477101802826, + "layer_11_max_l1_linf_norm": 0.35117387771606445, + "layer_11_max_spectral_norm": 0.009036733768880367, + "layer_12_update_fnorm": 0.4483416974544525, + "layer_12_max_l1_linf_norm": 0.36460432410240173, + "layer_12_max_spectral_norm": 0.0090399868786335, + "block0_q_update_fnorm": 0.16999712586402893, + "block0_q_max_l1_linf_norm": 0.15507367253303528, + "block0_q_max_spectral_norm": 0.009036270901560783, + "block0_k_update_fnorm": 0.16042360663414001, + "block0_k_max_l1_linf_norm": 0.15539933741092682, + "block0_k_max_spectral_norm": 0.00903361290693283, + "block0_v_update_fnorm": 0.10238257050514221, + "block0_v_max_l1_linf_norm": 0.11863812059164047, + "block0_v_max_spectral_norm": 0.009026230312883854, + "block0_o_update_fnorm": 0.13247697055339813, + "block0_o_max_l1_linf_norm": 0.126320943236351, + "block0_o_max_spectral_norm": 0.009028563275933266, + "block0_mlp_win_update_fnorm": 0.13386031985282898, + "block0_mlp_win_max_l1_linf_norm": 0.13407915830612183, + "block0_mlp_win_max_spectral_norm": 0.009027325548231602, + "block0_mlp_wout_update_fnorm": 0.14015325903892517, + "block0_mlp_wout_max_l1_linf_norm": 0.2268548607826233, + "block0_mlp_wout_max_spectral_norm": 0.009027884341776371, + "block3_q_update_fnorm": 0.022188806906342506, + "block3_q_max_l1_linf_norm": 0.06227485090494156, + "block3_q_max_spectral_norm": 0.008331282064318657, + "block3_k_update_fnorm": 0.02703814208507538, + "block3_k_max_l1_linf_norm": 0.06221356242895126, + "block3_k_max_spectral_norm": 0.009005818516016006, + "block3_v_update_fnorm": 0.08839448541402817, + "block3_v_max_l1_linf_norm": 0.10383693128824234, + "block3_v_max_spectral_norm": 0.009024139493703842, + "block3_o_update_fnorm": 0.11264756321907043, + "block3_o_max_l1_linf_norm": 0.09742793440818787, + "block3_o_max_spectral_norm": 0.009027376770973206, + "block3_mlp_win_update_fnorm": 0.16728273034095764, + "block3_mlp_win_max_l1_linf_norm": 0.13556531071662903, + "block3_mlp_win_max_spectral_norm": 0.009027219377458096, + "block3_mlp_wout_update_fnorm": 0.20591552555561066, + "block3_mlp_wout_max_l1_linf_norm": 0.33650413155555725, + "block3_mlp_wout_max_spectral_norm": 0.009046334773302078, + "block7_q_update_fnorm": 0.17722614109516144, + "block7_q_max_l1_linf_norm": 0.1547219157218933, + "block7_q_max_spectral_norm": 0.009037756361067295, + "block7_k_update_fnorm": 0.18456986546516418, + "block7_k_max_l1_linf_norm": 0.15683549642562866, + "block7_k_max_spectral_norm": 0.009040020406246185, + "block7_v_update_fnorm": 0.13658761978149414, + "block7_v_max_l1_linf_norm": 0.1530567705631256, + "block7_v_max_spectral_norm": 0.009030024521052837, + "block7_o_update_fnorm": 0.18580548465251923, + "block7_o_max_l1_linf_norm": 0.15631920099258423, + "block7_o_max_spectral_norm": 0.009038942866027355, + "block7_mlp_win_update_fnorm": 0.2097722589969635, + "block7_mlp_win_max_l1_linf_norm": 0.1265016794204712, + "block7_mlp_win_max_spectral_norm": 0.009042469784617424, + "block7_mlp_wout_update_fnorm": 0.18023726344108582, + "block7_mlp_wout_max_l1_linf_norm": 0.29976022243499756, + "block7_mlp_wout_max_spectral_norm": 0.008546573109924793, + "block11_q_update_fnorm": 0.18533955514431, + "block11_q_max_l1_linf_norm": 0.15995006263256073, + "block11_q_max_spectral_norm": 0.009037218056619167, + "block11_k_update_fnorm": 0.18690291047096252, + "block11_k_max_l1_linf_norm": 0.16052702069282532, + "block11_k_max_spectral_norm": 0.009036535397171974, + "block11_v_update_fnorm": 0.18311114609241486, + "block11_v_max_l1_linf_norm": 0.15394586324691772, + "block11_v_max_spectral_norm": 0.00903872400522232, + "block11_o_update_fnorm": 0.18616241216659546, + "block11_o_max_l1_linf_norm": 0.15580949187278748, + "block11_o_max_spectral_norm": 0.0090399868786335, + "block11_mlp_win_update_fnorm": 0.17589730024337769, + "block11_mlp_win_max_l1_linf_norm": 0.12862755358219147, + "block11_mlp_win_max_spectral_norm": 0.008523916825652122, + "block11_mlp_wout_update_fnorm": 0.18020950257778168, + "block11_mlp_wout_max_l1_linf_norm": 0.3134523332118988, + "block11_mlp_wout_max_spectral_norm": 0.008982891216874123, + "total_sharpness": 0.023619240149855614, + "block_total_sharpness": 0.02779335528612137, + "v_norm_block": 1.3365501165390015, + "v_T_H_v_block": 0.049649111926555634, + "v_norm": 1.676390528678894, + "ip_v_neg_g_hvp": 0.027304520830512047, + "cos_v_neg_g_hvp": 0.02243274264037609, + "g_hvp_norm": 0.7260674834251404, + "ip_v_neg_g_t": 0.03149475157260895, + "cos_v_neg_g_t": 0.025139572098851204, + "g_t_norm": 0.7473174333572388, + "g_norm": 0.7260674834251404, + "hv_norm": 15.940661430358887, + "cos_v_hv": 0.0024839036632329226, + "hg_norm": 2473.024169921875, + "cos_g_hg": -0.008899196051061153, + "v_parallel_norm": 0.003993252757936716, + "v_perp_norm": 1.676385760307312, + "embed_lm_head_v_norm": 1.0118887424468994, + "embed_lm_head_cos_v_neg_g": 0.03460288792848587, + "layer_1_v_norm": 0.3468776047229767, + "layer_1_cos_v_neg_g": 0.009603791870176792, + "layer_2_v_norm": 0.21767404675483704, + "layer_2_cos_v_neg_g": 0.008632265962660313, + "layer_3_v_norm": 0.2482808381319046, + "layer_3_cos_v_neg_g": 0.023622578009963036, + "layer_4_v_norm": 0.3043330907821655, + "layer_4_cos_v_neg_g": 0.023922042921185493, + "layer_5_v_norm": 0.37873896956443787, + "layer_5_cos_v_neg_g": 0.02563243731856346, + "layer_6_v_norm": 0.3933950662612915, + "layer_6_cos_v_neg_g": 0.02649761363863945, + "layer_7_v_norm": 0.4381020665168762, + "layer_7_cos_v_neg_g": 0.024929890409111977, + "layer_8_v_norm": 0.44187086820602417, + "layer_8_cos_v_neg_g": 0.02600579895079136, + "layer_9_v_norm": 0.44367143511772156, + "layer_9_cos_v_neg_g": 0.028121333569288254, + "layer_10_v_norm": 0.4444149136543274, + "layer_10_cos_v_neg_g": 0.029930364340543747, + "layer_11_v_norm": 0.4285477101802826, + "layer_11_cos_v_neg_g": 0.038452405482530594, + "layer_12_v_norm": 0.4483416974544525, + "layer_12_cos_v_neg_g": 0.06759321689605713, + "block0_q_v_norm": 0.16999712586402893, + "block0_q_cos_v_neg_g": 0.03530328348278999, + "block0_k_v_norm": 0.16042360663414001, + "block0_k_cos_v_neg_g": 0.03557788208127022, + "block0_v_v_norm": 0.10238257050514221, + "block0_v_cos_v_neg_g": 0.020683255046606064, + "block0_o_v_norm": 0.13247697055339813, + "block0_o_cos_v_neg_g": 0.014801543205976486, + "block0_mlp_win_v_norm": 0.13386031985282898, + "block0_mlp_win_cos_v_neg_g": 0.008417323231697083, + "block0_mlp_wout_v_norm": 0.14015325903892517, + "block0_mlp_wout_cos_v_neg_g": 0.010671873576939106, + "block3_q_v_norm": 0.022188806906342506, + "block3_q_cos_v_neg_g": 0.008203739300370216, + "block3_k_v_norm": 0.02703814208507538, + "block3_k_cos_v_neg_g": 0.12831522524356842, + "block3_v_v_norm": 0.08839448541402817, + "block3_v_cos_v_neg_g": 0.02801104076206684, + "block3_o_v_norm": 0.11264756321907043, + "block3_o_cos_v_neg_g": 0.024823781102895737, + "block3_mlp_win_v_norm": 0.16728273034095764, + "block3_mlp_win_cos_v_neg_g": 0.026334362104535103, + "block3_mlp_wout_v_norm": 0.20591552555561066, + "block3_mlp_wout_cos_v_neg_g": 0.05580902844667435, + "block7_q_v_norm": 0.17722614109516144, + "block7_q_cos_v_neg_g": 0.03140798211097717, + "block7_k_v_norm": 0.18456986546516418, + "block7_k_cos_v_neg_g": 0.07848719507455826, + "block7_v_v_norm": 0.13658761978149414, + "block7_v_cos_v_neg_g": 0.026514941826462746, + "block7_o_v_norm": 0.18580548465251923, + "block7_o_cos_v_neg_g": 0.08496437966823578, + "block7_mlp_win_v_norm": 0.2097722589969635, + "block7_mlp_win_cos_v_neg_g": 0.03114127181470394, + "block7_mlp_wout_v_norm": 0.18023726344108582, + "block7_mlp_wout_cos_v_neg_g": 0.12718580663204193, + "block11_q_v_norm": 0.18533955514431, + "block11_q_cos_v_neg_g": 0.07433442771434784, + "block11_k_v_norm": 0.18690291047096252, + "block11_k_cos_v_neg_g": 0.1023535504937172, + "block11_v_v_norm": 0.18311114609241486, + "block11_v_cos_v_neg_g": 0.04182273894548416, + "block11_o_v_norm": 0.18616241216659546, + "block11_o_cos_v_neg_g": 0.0790729969739914, + "block11_mlp_win_v_norm": 0.17589730024337769, + "block11_mlp_win_cos_v_neg_g": 0.08783533424139023, + "block11_mlp_wout_v_norm": 0.18020950257778168, + "block11_mlp_wout_cos_v_neg_g": 0.0790005549788475, + "embed_lm_head_sharpness": 0.0018175201257690787, + "layer_1_sharpness": 0.19936642050743103, + "layer_2_sharpness": 0.11663389950990677, + "layer_3_sharpness": 0.001997915329411626, + "layer_4_sharpness": 0.004012400284409523, + "layer_5_sharpness": 0.0029225738253444433, + "layer_6_sharpness": 0.002151015680283308, + "layer_7_sharpness": 0.0017068766755983233, + "layer_8_sharpness": 0.0015057360287755728, + "layer_9_sharpness": 0.0010105324909090996, + "layer_10_sharpness": 0.0005403331597335637, + "layer_11_sharpness": 0.000530467601493001, + "layer_12_sharpness": 0.0004046494432259351, + "block0_q_sharpness": 0.00030840697581879795, + "block0_k_sharpness": -2.1119360098964535e-05, + "block0_v_sharpness": 1.3598843812942505, + "block0_o_sharpness": 0.0382109135389328, + "block0_mlp_win_sharpness": 0.0463375560939312, + "block0_mlp_wout_sharpness": 0.04560566693544388, + "block3_q_sharpness": 7.071682921377942e-05, + "block3_k_sharpness": 0.03494952619075775, + "block3_v_sharpness": 0.009399881586432457, + "block3_o_sharpness": 0.0021993988193571568, + "block3_mlp_win_sharpness": 0.0007122874376364052, + "block3_mlp_wout_sharpness": 0.0002562333829700947, + "block7_q_sharpness": 0.00016860324831213802, + "block7_k_sharpness": 0.00011042349797207862, + "block7_v_sharpness": 0.004486574325710535, + "block7_o_sharpness": 5.898425661143847e-05, + "block7_mlp_win_sharpness": 0.0005869630258530378, + "block7_mlp_wout_sharpness": 9.786774171516299e-05, + "block11_q_sharpness": 0.00013312061491888016, + "block11_k_sharpness": 7.087944686645642e-05, + "block11_v_sharpness": 0.00010823128832271323, + "block11_o_sharpness": 3.395176463527605e-05, + "block11_mlp_win_sharpness": 0.000273133598966524, + "block11_mlp_wout_sharpness": 0.00040198134956881404, + "sum_layer_numerators": 0.03186781918656097, + "block_diag_sharpness": 0.01783946591490826, + "cross_layer_sharpness": 0.00995388937121311 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..0754002c39027ec99c6355a53c5db65d64cf92f2 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.1001358032226562, + "total_l1_linf_norm": 9197.130859375, + "total_spectral_norm": 1.1001356840133667, + "embed_lm_head_update_fnorm": 0.6661964058876038, + "embed_lm_head_max_l1_linf_norm": 0.1905582994222641, + "embed_lm_head_max_spectral_norm": 0.10001007467508316, + "layer_1_update_fnorm": 0.2331753969192505, + "layer_1_max_l1_linf_norm": 0.20098209381103516, + "layer_1_max_spectral_norm": 0.006025472190231085, + "layer_2_update_fnorm": 0.14862364530563354, + "layer_2_max_l1_linf_norm": 0.24828824400901794, + "layer_2_max_spectral_norm": 0.006233257241547108, + "layer_3_update_fnorm": 0.15054075419902802, + "layer_3_max_l1_linf_norm": 0.27165013551712036, + "layer_3_max_spectral_norm": 0.006107062101364136, + "layer_4_update_fnorm": 0.19436316192150116, + "layer_4_max_l1_linf_norm": 0.27286845445632935, + "layer_4_max_spectral_norm": 0.009703483432531357, + "layer_5_update_fnorm": 0.23299075663089752, + "layer_5_max_l1_linf_norm": 0.26242774724960327, + "layer_5_max_spectral_norm": 0.008945394307374954, + "layer_6_update_fnorm": 0.2551012635231018, + "layer_6_max_l1_linf_norm": 0.2538759112358093, + "layer_6_max_spectral_norm": 0.006121480371803045, + "layer_7_update_fnorm": 0.2886136472225189, + "layer_7_max_l1_linf_norm": 0.2480739951133728, + "layer_7_max_spectral_norm": 0.006028973497450352, + "layer_8_update_fnorm": 0.29086658358573914, + "layer_8_max_l1_linf_norm": 0.24198414385318756, + "layer_8_max_spectral_norm": 0.006027512717992067, + "layer_9_update_fnorm": 0.29300230741500854, + "layer_9_max_l1_linf_norm": 0.2538031041622162, + "layer_9_max_spectral_norm": 0.006030158139765263, + "layer_10_update_fnorm": 0.2948737144470215, + "layer_10_max_l1_linf_norm": 0.2634608745574951, + "layer_10_max_spectral_norm": 0.006028232164680958, + "layer_11_update_fnorm": 0.2825719118118286, + "layer_11_max_l1_linf_norm": 0.27601224184036255, + "layer_11_max_spectral_norm": 0.006119498051702976, + "layer_12_update_fnorm": 0.29945141077041626, + "layer_12_max_l1_linf_norm": 0.27726590633392334, + "layer_12_max_spectral_norm": 0.006122991442680359, + "block0_q_update_fnorm": 0.11750197410583496, + "block0_q_max_l1_linf_norm": 0.1045784056186676, + "block0_q_max_spectral_norm": 0.006025472190231085, + "block0_k_update_fnorm": 0.10926475375890732, + "block0_k_max_l1_linf_norm": 0.10457883030176163, + "block0_k_max_spectral_norm": 0.00602443004027009, + "block0_v_update_fnorm": 0.06824234873056412, + "block0_v_max_l1_linf_norm": 0.0770346149802208, + "block0_v_max_spectral_norm": 0.006019261199980974, + "block0_o_update_fnorm": 0.08700745552778244, + "block0_o_max_l1_linf_norm": 0.0850653350353241, + "block0_o_max_spectral_norm": 0.006021744571626186, + "block0_mlp_win_update_fnorm": 0.0892675444483757, + "block0_mlp_win_max_l1_linf_norm": 0.09461010247468948, + "block0_mlp_win_max_spectral_norm": 0.006020216271281242, + "block0_mlp_wout_update_fnorm": 0.09156147390604019, + "block0_mlp_wout_max_l1_linf_norm": 0.1479381024837494, + "block0_mlp_wout_max_spectral_norm": 0.0060202740132808685, + "block3_q_update_fnorm": 0.01659298501908779, + "block3_q_max_l1_linf_norm": 0.03906940668821335, + "block3_q_max_spectral_norm": 0.0052670263685286045, + "block3_k_update_fnorm": 0.01959334686398506, + "block3_k_max_l1_linf_norm": 0.04851631075143814, + "block3_k_max_spectral_norm": 0.006016255356371403, + "block3_v_update_fnorm": 0.053355176001787186, + "block3_v_max_l1_linf_norm": 0.0670456662774086, + "block3_v_max_spectral_norm": 0.0060168104246258736, + "block3_o_update_fnorm": 0.07009156793355942, + "block3_o_max_l1_linf_norm": 0.060322172939777374, + "block3_o_max_spectral_norm": 0.006020072847604752, + "block3_mlp_win_update_fnorm": 0.10180691629648209, + "block3_mlp_win_max_l1_linf_norm": 0.09223812073469162, + "block3_mlp_win_max_spectral_norm": 0.006019988562911749, + "block3_mlp_wout_update_fnorm": 0.13694213330745697, + "block3_mlp_wout_max_l1_linf_norm": 0.22366371750831604, + "block3_mlp_wout_max_spectral_norm": 0.006029920652508736, + "block7_q_update_fnorm": 0.11829525977373123, + "block7_q_max_l1_linf_norm": 0.10378189384937286, + "block7_q_max_spectral_norm": 0.006026908289641142, + "block7_k_update_fnorm": 0.12301947176456451, + "block7_k_max_l1_linf_norm": 0.10511628538370132, + "block7_k_max_spectral_norm": 0.006027512717992067, + "block7_v_update_fnorm": 0.08411040157079697, + "block7_v_max_l1_linf_norm": 0.09962522983551025, + "block7_v_max_spectral_norm": 0.006021020468324423, + "block7_o_update_fnorm": 0.12356539070606232, + "block7_o_max_l1_linf_norm": 0.10344499349594116, + "block7_o_max_spectral_norm": 0.006026864983141422, + "block7_mlp_win_update_fnorm": 0.13761897385120392, + "block7_mlp_win_max_l1_linf_norm": 0.08177606016397476, + "block7_mlp_win_max_spectral_norm": 0.006025437731295824, + "block7_mlp_wout_update_fnorm": 0.11887700855731964, + "block7_mlp_wout_max_l1_linf_norm": 0.1968183070421219, + "block7_mlp_wout_max_spectral_norm": 0.005704349838197231, + "block11_q_update_fnorm": 0.12340990453958511, + "block11_q_max_l1_linf_norm": 0.10489897429943085, + "block11_q_max_spectral_norm": 0.006027922499924898, + "block11_k_update_fnorm": 0.12469399720430374, + "block11_k_max_l1_linf_norm": 0.10786992311477661, + "block11_k_max_spectral_norm": 0.006026677321642637, + "block11_v_update_fnorm": 0.12161765992641449, + "block11_v_max_l1_linf_norm": 0.1036541536450386, + "block11_v_max_spectral_norm": 0.006029356736689806, + "block11_o_update_fnorm": 0.12380686402320862, + "block11_o_max_l1_linf_norm": 0.10405086725950241, + "block11_o_max_spectral_norm": 0.006026772316545248, + "block11_mlp_win_update_fnorm": 0.1172153502702713, + "block11_mlp_win_max_l1_linf_norm": 0.08624975383281708, + "block11_mlp_win_max_spectral_norm": 0.005686480551958084, + "block11_mlp_wout_update_fnorm": 0.1223088800907135, + "block11_mlp_wout_max_l1_linf_norm": 0.2238229215145111, + "block11_mlp_wout_max_spectral_norm": 0.006018250249326229, + "total_sharpness": -0.0034296056255698204, + "block_total_sharpness": -0.004565601237118244, + "v_norm_block": 0.8754888772964478, + "v_T_H_v_block": -0.003499445505440235, + "v_norm": 1.1001358032226562, + "ip_v_neg_g_hvp": 0.017095383256673813, + "cos_v_neg_g_hvp": 0.01842096820473671, + "g_hvp_norm": 0.8435680270195007, + "ip_v_neg_g_t": 0.024287287145853043, + "cos_v_neg_g_t": 0.016606152057647705, + "g_t_norm": 1.329424500465393, + "g_norm": 0.8435680270195007, + "hv_norm": 6.605578422546387, + "cos_v_hv": -0.0005711887497454882, + "hg_norm": 1949.4097900390625, + "cos_g_hg": 0.03561966493725777, + "v_parallel_norm": 0.002076351549476385, + "v_perp_norm": 1.1001338958740234, + "embed_lm_head_v_norm": 0.6661964058876038, + "embed_lm_head_cos_v_neg_g": 0.02806994505226612, + "layer_1_v_norm": 0.2331753969192505, + "layer_1_cos_v_neg_g": 0.01144975982606411, + "layer_2_v_norm": 0.14862364530563354, + "layer_2_cos_v_neg_g": 0.010580400004982948, + "layer_3_v_norm": 0.15054073929786682, + "layer_3_cos_v_neg_g": 0.01622326485812664, + "layer_4_v_norm": 0.19436316192150116, + "layer_4_cos_v_neg_g": 0.02025209181010723, + "layer_5_v_norm": 0.23299075663089752, + "layer_5_cos_v_neg_g": 0.021756883710622787, + "layer_6_v_norm": 0.2551012635231018, + "layer_6_cos_v_neg_g": 0.02249869704246521, + "layer_7_v_norm": 0.2886136472225189, + "layer_7_cos_v_neg_g": 0.021294649690389633, + "layer_8_v_norm": 0.29086658358573914, + "layer_8_cos_v_neg_g": 0.021946514025330544, + "layer_9_v_norm": 0.29300230741500854, + "layer_9_cos_v_neg_g": 0.022689927369356155, + "layer_10_v_norm": 0.2948737144470215, + "layer_10_cos_v_neg_g": 0.025192663073539734, + "layer_11_v_norm": 0.282571941614151, + "layer_11_cos_v_neg_g": 0.0356937013566494, + "layer_12_v_norm": 0.29945141077041626, + "layer_12_cos_v_neg_g": 0.06353406608104706, + "block0_q_v_norm": 0.11750197410583496, + "block0_q_cos_v_neg_g": 0.020908335223793983, + "block0_k_v_norm": 0.10926475375890732, + "block0_k_cos_v_neg_g": 0.022461311891674995, + "block0_v_v_norm": 0.06824234873056412, + "block0_v_cos_v_neg_g": 0.0124760577455163, + "block0_o_v_norm": 0.08700745552778244, + "block0_o_cos_v_neg_g": 0.02790856920182705, + "block0_mlp_win_v_norm": 0.0892675444483757, + "block0_mlp_win_cos_v_neg_g": 0.02000412717461586, + "block0_mlp_wout_v_norm": 0.09156147390604019, + "block0_mlp_wout_cos_v_neg_g": 0.01823366992175579, + "block3_q_v_norm": 0.01659298501908779, + "block3_q_cos_v_neg_g": -0.0008666618959978223, + "block3_k_v_norm": 0.01959334686398506, + "block3_k_cos_v_neg_g": 0.08543352782726288, + "block3_v_v_norm": 0.053355176001787186, + "block3_v_cos_v_neg_g": 0.028864098712801933, + "block3_o_v_norm": 0.07009156793355942, + "block3_o_cos_v_neg_g": 0.026624450460076332, + "block3_mlp_win_v_norm": 0.10180691629648209, + "block3_mlp_win_cos_v_neg_g": 0.018845580518245697, + "block3_mlp_wout_v_norm": 0.13694213330745697, + "block3_mlp_wout_cos_v_neg_g": 0.027427561581134796, + "block7_q_v_norm": 0.11829525977373123, + "block7_q_cos_v_neg_g": 0.026463687419891357, + "block7_k_v_norm": 0.12301947176456451, + "block7_k_cos_v_neg_g": 0.06645971536636353, + "block7_v_v_norm": 0.08411040157079697, + "block7_v_cos_v_neg_g": 0.024964233860373497, + "block7_o_v_norm": 0.12356539070606232, + "block7_o_cos_v_neg_g": 0.07392095029354095, + "block7_mlp_win_v_norm": 0.13761897385120392, + "block7_mlp_win_cos_v_neg_g": 0.02663271129131317, + "block7_mlp_wout_v_norm": 0.11887700855731964, + "block7_mlp_wout_cos_v_neg_g": 0.11311160773038864, + "block11_q_v_norm": 0.12340990453958511, + "block11_q_cos_v_neg_g": 0.056869857013225555, + "block11_k_v_norm": 0.12469399720430374, + "block11_k_cos_v_neg_g": 0.09069427102804184, + "block11_v_v_norm": 0.12161765992641449, + "block11_v_cos_v_neg_g": 0.037198420614004135, + "block11_o_v_norm": 0.12380686402320862, + "block11_o_cos_v_neg_g": 0.07875079661607742, + "block11_mlp_win_v_norm": 0.1172153502702713, + "block11_mlp_win_cos_v_neg_g": 0.08264908194541931, + "block11_mlp_wout_v_norm": 0.1223088800907135, + "block11_mlp_wout_cos_v_neg_g": 0.0771811455488205, + "embed_lm_head_sharpness": 0.00026498394436202943, + "layer_1_sharpness": -0.1153504028916359, + "layer_2_sharpness": -0.013553394936025143, + "layer_3_sharpness": 0.0059700943529605865, + "layer_4_sharpness": 0.006445328705012798, + "layer_5_sharpness": 0.0046041603200137615, + "layer_6_sharpness": 0.0023816132452338934, + "layer_7_sharpness": 0.001624941942282021, + "layer_8_sharpness": 0.0015962732722982764, + "layer_9_sharpness": 0.001192584983073175, + "layer_10_sharpness": 0.0006590168341062963, + "layer_11_sharpness": 0.0006886160699650645, + "layer_12_sharpness": 0.0004906483227387071, + "block0_q_sharpness": -0.001112398342229426, + "block0_k_sharpness": -0.0010253931395709515, + "block0_v_sharpness": -0.7234717011451721, + "block0_o_sharpness": -0.01655455119907856, + "block0_mlp_win_sharpness": -0.010689731687307358, + "block0_mlp_wout_sharpness": -0.0010207061422988772, + "block3_q_sharpness": 0.00017011478485073894, + "block3_k_sharpness": 0.02783498913049698, + "block3_v_sharpness": 0.013187521137297153, + "block3_o_sharpness": 0.002940769772976637, + "block3_mlp_win_sharpness": 0.0018566056387498975, + "block3_mlp_wout_sharpness": 0.0006207319092936814, + "block7_q_sharpness": 0.00015737001376692206, + "block7_k_sharpness": 8.092433563433588e-05, + "block7_v_sharpness": 0.004966262262314558, + "block7_o_sharpness": 7.957235357025638e-05, + "block7_mlp_win_sharpness": 0.0007890940178185701, + "block7_mlp_wout_sharpness": 0.0001273605739697814, + "block11_q_sharpness": 0.00018266605911776423, + "block11_k_sharpness": 6.648178532486781e-05, + "block11_v_sharpness": 0.00013231992488726974, + "block11_o_sharpness": 3.464085966697894e-05, + "block11_mlp_win_sharpness": 0.0003673047467600554, + "block11_mlp_wout_sharpness": 0.0005451343604363501, + "sum_layer_numerators": -0.0052582940849798775, + "block_diag_sharpness": -0.0068603078661553015, + "cross_layer_sharpness": 0.0022947066290370573 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..7da027d75ebd1df9be771a8a06ce2bb1854d51c1 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/sharpness_step_9500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.5403160452842712, + "total_l1_linf_norm": 4457.3876953125, + "total_spectral_norm": 0.540316104888916, + "embed_lm_head_update_fnorm": 0.3321920335292816, + "embed_lm_head_max_l1_linf_norm": 0.08583063632249832, + "embed_lm_head_max_spectral_norm": 0.10675976425409317, + "layer_1_update_fnorm": 0.0996284931898117, + "layer_1_max_l1_linf_norm": 0.15834280848503113, + "layer_1_max_spectral_norm": 0.0033725376706570387, + "layer_2_update_fnorm": 0.059723228216171265, + "layer_2_max_l1_linf_norm": 0.1647244095802307, + "layer_2_max_spectral_norm": 0.003582550911232829, + "layer_3_update_fnorm": 0.05864515155553818, + "layer_3_max_l1_linf_norm": 0.1725464165210724, + "layer_3_max_spectral_norm": 0.0038213436491787434, + "layer_4_update_fnorm": 0.09051515907049179, + "layer_4_max_l1_linf_norm": 0.17211982607841492, + "layer_4_max_spectral_norm": 0.005047745071351528, + "layer_5_update_fnorm": 0.10904940962791443, + "layer_5_max_l1_linf_norm": 0.16686870157718658, + "layer_5_max_spectral_norm": 0.004457328002899885, + "layer_6_update_fnorm": 0.1271335780620575, + "layer_6_max_l1_linf_norm": 0.15544232726097107, + "layer_6_max_spectral_norm": 0.0034314903896301985, + "layer_7_update_fnorm": 0.14470556378364563, + "layer_7_max_l1_linf_norm": 0.14597046375274658, + "layer_7_max_spectral_norm": 0.003220726503059268, + "layer_8_update_fnorm": 0.14629413187503815, + "layer_8_max_l1_linf_norm": 0.13694989681243896, + "layer_8_max_spectral_norm": 0.0030434553045779467, + "layer_9_update_fnorm": 0.14680124819278717, + "layer_9_max_l1_linf_norm": 0.14661404490470886, + "layer_9_max_spectral_norm": 0.0032254925463348627, + "layer_10_update_fnorm": 0.14823046326637268, + "layer_10_max_l1_linf_norm": 0.15339553356170654, + "layer_10_max_spectral_norm": 0.003392573446035385, + "layer_11_update_fnorm": 0.14189472794532776, + "layer_11_max_l1_linf_norm": 0.14910204708576202, + "layer_11_max_spectral_norm": 0.003312642453238368, + "layer_12_update_fnorm": 0.1497049778699875, + "layer_12_max_l1_linf_norm": 0.15409693121910095, + "layer_12_max_spectral_norm": 0.0034115929156541824, + "block0_q_update_fnorm": 0.055286046117544174, + "block0_q_max_l1_linf_norm": 0.052132684737443924, + "block0_q_max_spectral_norm": 0.0030155072454363108, + "block0_k_update_fnorm": 0.05379701033234596, + "block0_k_max_l1_linf_norm": 0.0518721267580986, + "block0_k_max_spectral_norm": 0.003015281166881323, + "block0_v_update_fnorm": 0.031918950378894806, + "block0_v_max_l1_linf_norm": 0.03632039576768875, + "block0_v_max_spectral_norm": 0.0030127319041639566, + "block0_o_update_fnorm": 0.036030542105436325, + "block0_o_max_l1_linf_norm": 0.03884849324822426, + "block0_o_max_spectral_norm": 0.003012767294421792, + "block0_mlp_win_update_fnorm": 0.029375720769166946, + "block0_mlp_win_max_l1_linf_norm": 0.04978843033313751, + "block0_mlp_win_max_spectral_norm": 0.0030125067569315434, + "block0_mlp_wout_update_fnorm": 0.02783018723130226, + "block0_mlp_wout_max_l1_linf_norm": 0.045462001115083694, + "block0_mlp_wout_max_spectral_norm": 0.003012031549587846, + "block3_q_update_fnorm": 0.007736275438219309, + "block3_q_max_l1_linf_norm": 0.017872726544737816, + "block3_q_max_spectral_norm": 0.002975357696413994, + "block3_k_update_fnorm": 0.010394944809377193, + "block3_k_max_l1_linf_norm": 0.020661378279328346, + "block3_k_max_spectral_norm": 0.002964359475299716, + "block3_v_update_fnorm": 0.024457812309265137, + "block3_v_max_l1_linf_norm": 0.031532831490039825, + "block3_v_max_spectral_norm": 0.0030125617049634457, + "block3_o_update_fnorm": 0.030994627624750137, + "block3_o_max_l1_linf_norm": 0.027136364951729774, + "block3_o_max_spectral_norm": 0.003012910485267639, + "block3_mlp_win_update_fnorm": 0.046138696372509, + "block3_mlp_win_max_l1_linf_norm": 0.047079771757125854, + "block3_mlp_win_max_spectral_norm": 0.0030131684616208076, + "block3_mlp_wout_update_fnorm": 0.06529996544122696, + "block3_mlp_wout_max_l1_linf_norm": 0.1049826443195343, + "block3_mlp_wout_max_spectral_norm": 0.003015512367710471, + "block7_q_update_fnorm": 0.05902580916881561, + "block7_q_max_l1_linf_norm": 0.05207906663417816, + "block7_q_max_spectral_norm": 0.003015611320734024, + "block7_k_update_fnorm": 0.06156986579298973, + "block7_k_max_l1_linf_norm": 0.05226445943117142, + "block7_k_max_spectral_norm": 0.0030168823432177305, + "block7_v_update_fnorm": 0.04394201189279556, + "block7_v_max_l1_linf_norm": 0.05111737549304962, + "block7_v_max_spectral_norm": 0.003013977315276861, + "block7_o_update_fnorm": 0.061887793242931366, + "block7_o_max_l1_linf_norm": 0.05172593146562576, + "block7_o_max_spectral_norm": 0.003016322385519743, + "block7_mlp_win_update_fnorm": 0.0690692588686943, + "block7_mlp_win_max_l1_linf_norm": 0.04227398708462715, + "block7_mlp_win_max_spectral_norm": 0.0030156918801367283, + "block7_mlp_wout_update_fnorm": 0.05978734791278839, + "block7_mlp_wout_max_l1_linf_norm": 0.09911172091960907, + "block7_mlp_wout_max_spectral_norm": 0.0028546524699777365, + "block11_q_update_fnorm": 0.06183210015296936, + "block11_q_max_l1_linf_norm": 0.054023098200559616, + "block11_q_max_spectral_norm": 0.0030173659324645996, + "block11_k_update_fnorm": 0.062273621559143066, + "block11_k_max_l1_linf_norm": 0.053651466965675354, + "block11_k_max_spectral_norm": 0.0030162599869072437, + "block11_v_update_fnorm": 0.060614269226789474, + "block11_v_max_l1_linf_norm": 0.051764003932476044, + "block11_v_max_spectral_norm": 0.003016353351995349, + "block11_o_update_fnorm": 0.06220399960875511, + "block11_o_max_l1_linf_norm": 0.05196717008948326, + "block11_o_max_spectral_norm": 0.0030166348442435265, + "block11_mlp_win_update_fnorm": 0.05862637236714363, + "block11_mlp_win_max_l1_linf_norm": 0.04174279049038887, + "block11_mlp_win_max_spectral_norm": 0.0028440747410058975, + "block11_mlp_wout_update_fnorm": 0.06088202819228172, + "block11_mlp_wout_max_l1_linf_norm": 0.11265181750059128, + "block11_mlp_wout_max_spectral_norm": 0.002996762515977025, + "total_sharpness": 0.04405119642615318, + "block_total_sharpness": 0.08875157684087753, + "v_norm_block": 0.4261336624622345, + "v_T_H_v_block": 0.016116388142108917, + "v_norm": 0.5403160452842712, + "ip_v_neg_g_hvp": 0.0029582742135971785, + "cos_v_neg_g_hvp": 0.004978805780410767, + "g_hvp_norm": 1.099677562713623, + "ip_v_neg_g_t": 0.03337420895695686, + "cos_v_neg_g_t": 0.019655752927064896, + "g_t_norm": 3.142486572265625, + "g_norm": 1.099677562713623, + "hv_norm": 19.213558197021484, + "cos_v_hv": 0.0012387901078909636, + "hg_norm": 82262.0546875, + "cos_g_hg": -0.2467006891965866, + "v_parallel_norm": 0.0007188011077232659, + "v_perp_norm": 0.540315568447113, + "embed_lm_head_v_norm": 0.3321920335292816, + "embed_lm_head_cos_v_neg_g": 0.026505926623940468, + "layer_1_v_norm": 0.0996284931898117, + "layer_1_cos_v_neg_g": -0.03126213327050209, + "layer_2_v_norm": 0.059723228216171265, + "layer_2_cos_v_neg_g": -0.03830268606543541, + "layer_3_v_norm": 0.05864515155553818, + "layer_3_cos_v_neg_g": -0.007689225487411022, + "layer_4_v_norm": 0.09051515907049179, + "layer_4_cos_v_neg_g": 0.024044403806328773, + "layer_5_v_norm": 0.10904940962791443, + "layer_5_cos_v_neg_g": 0.029012495651841164, + "layer_6_v_norm": 0.1271335780620575, + "layer_6_cos_v_neg_g": 0.02608415298163891, + "layer_7_v_norm": 0.14470556378364563, + "layer_7_cos_v_neg_g": 0.022047443315386772, + "layer_8_v_norm": 0.14629413187503815, + "layer_8_cos_v_neg_g": 0.020439639687538147, + "layer_9_v_norm": 0.14680124819278717, + "layer_9_cos_v_neg_g": 0.02088336832821369, + "layer_10_v_norm": 0.14823046326637268, + "layer_10_cos_v_neg_g": 0.02458530105650425, + "layer_11_v_norm": 0.14189472794532776, + "layer_11_cos_v_neg_g": 0.032531775534152985, + "layer_12_v_norm": 0.1497049778699875, + "layer_12_cos_v_neg_g": 0.06512976437807083, + "block0_q_v_norm": 0.055286046117544174, + "block0_q_cos_v_neg_g": 0.016421489417552948, + "block0_k_v_norm": 0.05379701033234596, + "block0_k_cos_v_neg_g": 0.009917233139276505, + "block0_v_v_norm": 0.031918950378894806, + "block0_v_cos_v_neg_g": -0.02116273157298565, + "block0_o_v_norm": 0.036030542105436325, + "block0_o_cos_v_neg_g": -0.06105247884988785, + "block0_mlp_win_v_norm": 0.029375720769166946, + "block0_mlp_win_cos_v_neg_g": -0.05624747276306152, + "block0_mlp_wout_v_norm": 0.02783018723130226, + "block0_mlp_wout_cos_v_neg_g": -0.08033281564712524, + "block3_q_v_norm": 0.007736275438219309, + "block3_q_cos_v_neg_g": 0.02481844648718834, + "block3_k_v_norm": 0.010394944809377193, + "block3_k_cos_v_neg_g": 0.044193804264068604, + "block3_v_v_norm": 0.024457812309265137, + "block3_v_cos_v_neg_g": 0.019862176850438118, + "block3_o_v_norm": 0.030994627624750137, + "block3_o_cos_v_neg_g": 0.027429960668087006, + "block3_mlp_win_v_norm": 0.046138696372509, + "block3_mlp_win_cos_v_neg_g": 0.030794139951467514, + "block3_mlp_wout_v_norm": 0.06529996544122696, + "block3_mlp_wout_cos_v_neg_g": 0.03332110494375229, + "block7_q_v_norm": 0.05902580916881561, + "block7_q_cos_v_neg_g": 0.027605270966887474, + "block7_k_v_norm": 0.06156986579298973, + "block7_k_cos_v_neg_g": 0.06913460791110992, + "block7_v_v_norm": 0.04394201189279556, + "block7_v_cos_v_neg_g": 0.016424022614955902, + "block7_o_v_norm": 0.061887793242931366, + "block7_o_cos_v_neg_g": 0.06927914172410965, + "block7_mlp_win_v_norm": 0.0690692588686943, + "block7_mlp_win_cos_v_neg_g": 0.02362809330224991, + "block7_mlp_wout_v_norm": 0.05978734791278839, + "block7_mlp_wout_cos_v_neg_g": 0.10536938905715942, + "block11_q_v_norm": 0.06183210015296936, + "block11_q_cos_v_neg_g": 0.07175510376691818, + "block11_k_v_norm": 0.062273621559143066, + "block11_k_cos_v_neg_g": 0.09761423617601395, + "block11_v_v_norm": 0.060614269226789474, + "block11_v_cos_v_neg_g": 0.034124668687582016, + "block11_o_v_norm": 0.06220399960875511, + "block11_o_cos_v_neg_g": 0.0788840726017952, + "block11_mlp_win_v_norm": 0.05862637236714363, + "block11_mlp_win_cos_v_neg_g": 0.08535214513540268, + "block11_mlp_wout_v_norm": 0.06088202819228172, + "block11_mlp_wout_cos_v_neg_g": 0.08006409555673599, + "embed_lm_head_sharpness": -0.002774925669655204, + "layer_1_sharpness": 0.25688478350639343, + "layer_2_sharpness": 1.0270830392837524, + "layer_3_sharpness": 0.07815723866224289, + "layer_4_sharpness": 0.0068018375895917416, + "layer_5_sharpness": 0.007853478193283081, + "layer_6_sharpness": 0.003491114592179656, + "layer_7_sharpness": 0.002030103001743555, + "layer_8_sharpness": 0.001723205205053091, + "layer_9_sharpness": 0.0012648862320929766, + "layer_10_sharpness": 0.0006674494943581522, + "layer_11_sharpness": 0.0005960162961855531, + "layer_12_sharpness": 0.0005399390938691795, + "block0_q_sharpness": -0.0004380691971164197, + "block0_k_sharpness": -0.0014493464259430766, + "block0_v_sharpness": 0.3073559105396271, + "block0_o_sharpness": -0.06985456496477127, + "block0_mlp_win_sharpness": 0.02382405288517475, + "block0_mlp_wout_sharpness": 0.608583390712738, + "block3_q_sharpness": 0.00016130725271068513, + "block3_k_sharpness": 0.0014861926902085543, + "block3_v_sharpness": 0.009798387996852398, + "block3_o_sharpness": 0.004606942646205425, + "block3_mlp_win_sharpness": 0.0040638078935444355, + "block3_mlp_wout_sharpness": 0.0008804136887192726, + "block7_q_sharpness": 0.0001281402219319716, + "block7_k_sharpness": 0.00010964243847411126, + "block7_v_sharpness": 0.005008441861718893, + "block7_o_sharpness": 8.475912181893364e-05, + "block7_mlp_win_sharpness": 0.0008075009100139141, + "block7_mlp_wout_sharpness": 0.00012190788402222097, + "block11_q_sharpness": 0.00013912377471569926, + "block11_k_sharpness": 6.891984958201647e-05, + "block11_v_sharpness": 0.00014048695447854698, + "block11_o_sharpness": 3.6977660784032196e-05, + "block11_mlp_win_sharpness": 0.000393517198972404, + "block11_mlp_wout_sharpness": 0.0007657412206754088, + "sum_layer_numerators": 0.006833025719923038, + "block_diag_sharpness": 0.0376288867636023, + "cross_layer_sharpness": 0.05112269007727523 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/training_log.txt b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..b2bd4c998cd540462c04621e3ad29b35e5874231 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_0b068980-f311-47ed-ae52-48ae6fff6ec3/training_log.txt @@ -0,0 +1,11788 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +import nano_GPT_qkvonorm_pure +from nano_GPT_qkvonorm_pure import GPT, GPTConfig + +# Import debug utilities +# from debug_utils import setup_debugpy + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes, + shuffle_files=False, random_seed=None): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + self.shuffle_files = shuffle_files + self.random_seed = random_seed + self._rng = random.Random(random_seed) if shuffle_files and random_seed is not None else None + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + if self.shuffle_files: + self._shuffle_files() + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + next_shard = (self.current_shard + 1) % len(self.files) + if next_shard == 0 and self.shuffle_files: + self._shuffle_files() + self.current_shard = next_shard + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + + def _shuffle_files(self): + if self._rng is not None: + self._rng.shuffle(self.files) + else: + random.shuffle(self.files) + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + + all_param_groups["embed_lm_head"] = list(model.lm_head.parameters()) + + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # Add fine-grained params for selected layers (0, 3, 7, 11) + selected_layers = [0, 3, 7, 11] + for layer_idx in selected_layers: + block = blocks[layer_idx] + prefix = f"block{layer_idx}" + # Attention: Q, K, V, O + all_param_groups[f"{prefix}_q"] = [block.attn.q_w.weight] + all_param_groups[f"{prefix}_k"] = [block.attn.k_w.weight] + all_param_groups[f"{prefix}_v"] = [block.attn.v_w.weight] + all_param_groups[f"{prefix}_o"] = [block.attn.c_proj.weight] + # MLP: c_fc (win) and c_proj (wout) + all_param_groups[f"{prefix}_mlp_win"] = [block.mlp.c_fc.weight] + all_param_groups[f"{prefix}_mlp_wout"] = [block.mlp.c_proj.weight] + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + original_flash = nano_GPT_qkvonorm_pure.FLASH + nano_GPT_qkvonorm_pure.FLASH = 0 + print0(f"[Enhanced Sharpness @ Step {step}] Disabled FLASH attention for HVP (was {original_flash})") + + # Get block parameter indices for cross-layer analysis (need this before loop) + block_param_indices = set() + for group_name, param_group in all_param_groups.items(): + if group_name.startswith("layer_"): + for p in param_group: + if id(p) in param_to_idx: + block_param_indices.add(param_to_idx[id(p)]) + + # Initialize accumulators for all quantities we need + grads_hvp = None + hvp_v_total = None + hvp_v_block = None + hvp_g_accum = None + layer_hvp_accum = {} + + + group_names_to_process = [gn for gn, pg in all_param_groups.items() + if pg and any(id(p) in param_to_idx for p in pg)] + + if last_training_batches is not None and len(last_training_batches) > 0: + + batch_iterator = [(x, y) for x, y in last_training_batches] + n_batches = len(batch_iterator) + print0(f"[Enhanced Sharpness @ Step {step}] Using {n_batches} microbatches for HVP (out of {grad_accum_steps} training microbatches)") + restore_loader = False + else: + # Fallback: use new batches from train_loader (should rarely happen) + print0(f"[Enhanced Sharpness @ Step {step}] WARNING: last_training_batches is None/empty, using {grad_accum_steps} new batches (inconsistent)") + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + n_batches = grad_accum_steps # Use same number as training for consistency + batch_iterator = [] + shard_was_changed = False + for _ in range(n_batches): + x_hvp, y_hvp = train_loader.next_batch() + batch_iterator.append((x_hvp, y_hvp)) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + restore_loader = True + + + print0(f"[Enhanced Sharpness @ Step {step}] Computing HVPs for {n_batches} microbatches") + for mb_idx, (x_hvp, y_hvp) in enumerate(batch_iterator): + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + + + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + grads_mb = torch.autograd.grad(loss_mb, model.parameters(), create_graph=True, allow_unused=True) + + # Compute H·v (total sharpness) + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_mb, update_direction_v) if g is not None) + + if not isinstance(v_dot_g_total, torch.Tensor): + v_dot_g_total = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_total_mb = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + # Compute H·v_block (block-only sharpness) + if block_param_indices: + v_dot_g_block = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in block_param_indices if grads_mb[i] is not None) + if not isinstance(v_dot_g_block, torch.Tensor): + v_dot_g_block = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_block_mb = torch.autograd.grad(v_dot_g_block, model.parameters(), retain_graph=True, allow_unused=True) + else: + + hvp_v_block_mb = [None] * len(list(model.parameters())) + + + g_dot_g = sum(torch.sum(g * g) for g in grads_mb if g is not None) + if not isinstance(g_dot_g, torch.Tensor): + g_dot_g = torch.tensor(0.0, device=device, requires_grad=True) + + + hvp_g_mb_raw = torch.autograd.grad(g_dot_g, model.parameters(), + retain_graph=True, allow_unused=True) + hvp_g_mb = [h / 2.0 if h is not None else None for h in hvp_g_mb_raw] + + # Compute per-layer H_kk·v_k (for layer-wise sharpness) + for group_idx, group_name in enumerate(group_names_to_process): + param_group = all_param_groups[group_name] + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + is_last_layer = (group_idx == len(group_names_to_process) - 1) + is_last_microbatch = (mb_idx == n_batches - 1) + need_retain = not (is_last_layer and is_last_microbatch) + + try: + v_dot_g_layer = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in indices if grads_mb[i] is not None) + + if not isinstance(v_dot_g_layer, torch.Tensor): + v_dot_g_layer = torch.tensor(0.0, device=device, requires_grad=True) + + hvp_layer_mb = torch.autograd.grad(v_dot_g_layer, model.parameters(), + retain_graph=need_retain, + allow_unused=True) + + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_layer_mb] + else: + layer_hvp_accum[group_name] = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + ] + + # Accumulate layer HVP + # if group_name not in layer_hvp_accum: + # layer_hvp_accum[group_name] = [h.detach() / n_batches if h is not None else None for h in hvp_layer_mb] + # else: + # layer_hvp_accum[group_name] = [ + # (h_acc + h.detach() / n_batches) if (h is not None and h_acc is not None) + # else (h.detach() / n_batches if h is not None else h_acc) + # for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + # ] + # del hvp_layer_mb, v_dot_g_layer + # torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error computing layer HVP for '{group_name}' in microbatch {mb_idx}: {e}") + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = None + + # 6. Accumulate all quantities + if grads_hvp is None: + grads_hvp = [(g.detach() / n_batches).cpu() if g is not None else None for g in grads_mb] + hvp_v_total = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_total_mb] + hvp_v_block = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_block_mb] + hvp_g_accum = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_g_mb] + else: + grads_hvp = [ + (g_acc + (g.detach() / n_batches).cpu()) if (g is not None and g_acc is not None) + else ((g.detach() / n_batches).cpu() if g is not None else g_acc) + for g_acc, g in zip(grads_hvp, grads_mb) + ] + hvp_v_total = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_total, hvp_v_total_mb) + ] + hvp_v_block = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_block, hvp_v_block_mb) + ] + hvp_g_accum = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_g_accum, hvp_g_mb) + ] + + + + if mb_idx % max(1, n_batches // 4) == 0: + print0(f"[Enhanced Sharpness @ Step {step}] Processed microbatch {mb_idx + 1}/{n_batches}") + + + if restore_loader: + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + print0(f"[Enhanced Sharpness @ Step {step}] Finished computing all HVPs for {n_batches} microbatches") + grads_hvp = [g.to(device) if g is not None else None for g in grads_hvp] + hvp_v_total = [h.to(device) if h is not None else None for h in hvp_v_total] + hvp_v_block = [h.to(device) if h is not None else None for h in hvp_v_block] + hvp_g_accum = [h.to(device) if h is not None else None for h in hvp_g_accum] + for group_name in layer_hvp_accum: + if layer_hvp_accum[group_name] is not None: + layer_hvp_accum[group_name] = [h.to(device) if h is not None else None for h in layer_hvp_accum[group_name]] + # --- Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + # hvp_v_total is already computed in the loop above + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_v_total, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_total, torch.Tensor): + vhp_dot_v_total = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_total, torch.Tensor): + v_norm_sq_total = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + + print0(f"[Enhanced Sharpness @ Step {step}] Calculating BLOCK-ONLY total sharpness...") + # hvp_v_block is already computed in the loop above + if block_param_indices: # Only compute if there are block parameters + # Compute v_block^T H v_block (only sum over block indices) + vhp_dot_v_block = sum(torch.sum(hvp_v_block[i] * update_direction_v[i]) + for i in block_param_indices if hvp_v_block[i] is not None) + + v_norm_sq_block = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in block_param_indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_block, torch.Tensor): + vhp_dot_v_block = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_block, torch.Tensor): + v_norm_sq_block = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_block, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_block, op=dist.ReduceOp.AVG) + + if v_norm_sq_block.item() > 1e-12: + analysis_results["block_total_sharpness"] = (vhp_dot_v_block / v_norm_sq_block).item() + else: + analysis_results["block_total_sharpness"] = 0.0 + + analysis_results["v_norm_block"] = torch.sqrt(v_norm_sq_block).item() + analysis_results["v_T_H_v_block"] = vhp_dot_v_block.item() + else: + # No block parameters + analysis_results["block_total_sharpness"] = 0.0 + analysis_results["v_norm_block"] = 0.0 + analysis_results["v_T_H_v_block"] = 0.0 + + torch.cuda.empty_cache() + + # ---- Alignment metrics between update v and (negative) gradient g ---- + eps = 1e-12 + v_norm = torch.sqrt(v_norm_sq_total + eps) + analysis_results["v_norm"] = v_norm.item() + + # --- Version 1: g_hvp --- + ip_v_neg_g_hvp = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + g_hvp_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + + if not isinstance(ip_v_neg_g_hvp, torch.Tensor): + ip_v_neg_g_hvp = torch.tensor(0.0, device=device) + if not isinstance(g_hvp_norm_sq, torch.Tensor): + g_hvp_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_v_neg_g_hvp, op=dist.ReduceOp.AVG) + dist.all_reduce(g_hvp_norm_sq, op=dist.ReduceOp.AVG) + g_hvp_norm = torch.sqrt(g_hvp_norm_sq + eps) + analysis_results["ip_v_neg_g_hvp"] = ip_v_neg_g_hvp.item() + analysis_results["cos_v_neg_g_hvp"] = (ip_v_neg_g_hvp / (v_norm * g_hvp_norm + eps)).item() + analysis_results["g_hvp_norm"] = g_hvp_norm.item() + + # --- Version 2: g_t (original gradient that produced v) --- + # last_training_gradient is the actual gradient from training that led to the update v + if last_training_gradient is not None: + ip_v_neg_g_t = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, last_training_gradient) if g is not None) + g_t_norm_sq = sum(torch.sum(g * g) for g in last_training_gradient if g is not None) + dist.all_reduce(ip_v_neg_g_t, op=dist.ReduceOp.AVG) + dist.all_reduce(g_t_norm_sq, op=dist.ReduceOp.AVG) + g_t_norm = torch.sqrt(g_t_norm_sq + eps) + analysis_results["ip_v_neg_g_t"] = ip_v_neg_g_t.item() + analysis_results["cos_v_neg_g_t"] = (ip_v_neg_g_t / (v_norm * g_t_norm + eps)).item() + analysis_results["g_t_norm"] = g_t_norm.item() + else: + print0(f"[Enhanced Sharpness @ Step {step}] Warning: last_training_gradient is None, skipping g_t metrics") + + # Keep backward compatibility aliases (g_norm uses g_hvp for now) + g_norm_sq = g_hvp_norm_sq + g_norm = g_hvp_norm + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_v_total if hvp is not None) + if not isinstance(hv_norm_sq, torch.Tensor): + hv_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg ---- + # hvp_g_accum is already computed in the loop above + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_accum) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_accum if hg is not None) + if not isinstance(ip_g_hg, torch.Tensor): + ip_g_hg = torch.tensor(0.0, device=device) + if not isinstance(hg_norm_sq, torch.Tensor): + hg_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + if not isinstance(v_parallel_norm_sq, torch.Tensor): + v_parallel_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(torch.clamp(v_norm_sq_total - v_parallel_norm_sq, min=0.0) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + # Ensure they are tensors + if not isinstance(v_norm_sq_layer, torch.Tensor): + v_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(g_norm_sq_layer, torch.Tensor): + g_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(ip_v_neg_g_layer, torch.Tensor): + ip_v_neg_g_layer = torch.tensor(0.0, device=device) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + if group_name not in layer_hvp_accum or layer_hvp_accum[group_name] is None: + print0(f"[Enhanced Sharpness @ Step {step}] No HVP data for '{group_name}', skipping") + analysis_results[f"{group_name}_sharpness"] = 0.0 + continue + + hvp_group_result = layer_hvp_accum[group_name] + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_group, torch.Tensor): + vhp_dot_v_group = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_group, torch.Tensor): + v_norm_sq_group = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- Calculate block-diagonal approximation and cross-layer interaction --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating block-diagonal and cross-layer sharpness...") + + sum_layer_numerators = 0.0 + for layer in range(1, NUM_LAYERS + 1): + layer_name = f"layer_{layer}" + if f"{layer_name}_sharpness" in analysis_results and f"{layer_name}_v_norm" in analysis_results: + s_k = analysis_results[f"{layer_name}_sharpness"] + v_k_norm = analysis_results[f"{layer_name}_v_norm"] + sum_layer_numerators += s_k * (v_k_norm ** 2) + + analysis_results["sum_layer_numerators"] = sum_layer_numerators + + # Block-diagonal sharpness (using block ||v||²) + v_norm_block = analysis_results.get("v_norm_block", 0) + v_norm_sq_block_val = v_norm_block ** 2 if v_norm_block else 1e-12 + + if v_norm_sq_block_val > 1e-12: + analysis_results["block_diag_sharpness"] = sum_layer_numerators / v_norm_sq_block_val + else: + analysis_results["block_diag_sharpness"] = 0.0 + + # Cross-layer interaction = block_total - block_diag + block_total = analysis_results.get("block_total_sharpness", 0) + block_diag = analysis_results.get("block_diag_sharpness", 0) + analysis_results["cross_layer_sharpness"] = block_total - block_diag + + print0(f"[Enhanced Sharpness @ Step {step}] block_total={block_total:.6f}, block_diag={block_diag:.6f}, cross_layer={block_total - block_diag:.6f}") + + # --- 8. Cleanup --- + nano_GPT_qkvonorm_pure.FLASH = original_flash + print0(f"[Enhanced Sharpness @ Step {step}] Restored FLASH attention to {original_flash}") + + print0(f"[Enhanced Sharpness @ Step {step}] Restoring parameters back to θ_{{t+1}}...") + with torch.no_grad(): + for p, v in zip(model.parameters(), update_direction_v): + p.data.add_(v) + + if prev_training_mode: + model.train() + else: + model.eval() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del hvp_v_total, hvp_v_block, hvp_g_accum, layer_hvp_accum + del vhp_dot_v_total, v_norm_sq_total + del vhp_dot_v_block, v_norm_sq_block + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + + # Version 1: g_hvp (new batch, computed at θ_t during HVP calculation) + if 'cos_v_neg_g_hvp' in results: + misc_parts.append(f"cos_v_-g_hvp:{results['cos_v_neg_g_hvp']:.4e}") + if 'g_hvp_norm' in results: + misc_parts.append(f"g_hvp_norm:{results['g_hvp_norm']:.4e}") + + # Version 2: g_t (original gradient that produced v) + if 'cos_v_neg_g_t' in results: + misc_parts.append(f"cos_v_-g_t:{results['cos_v_neg_g_t']:.4e}") + if 'g_t_norm' in results: + misc_parts.append(f"g_t_norm:{results['g_t_norm']:.4e}") + + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d8|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + parser.add_argument("--shuffle_files", action="store_true") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d8", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # Setup debugpy for remote debugging (only activates if DEBUGPY env var is set) + # setup_debugpy(rank=ddp_rank, force=True) + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + nano_GPT_qkvonorm_pure.FLASH = args.flash # Set module-level FLASH for training + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d8": GPTConfig(block_size=1024, vocab_size=50257, n_layer=8, n_head=8, n_embd=512), + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader( + args.input_bin, B, T, ddp_rank, ddp_world_size, + shuffle_files=args.shuffle_files, random_seed=args.seed + ) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + base_module = model.module if ddp else model + # If compiled, unwrap to get the original module + if hasattr(base_module, "_orig_mod"): + base_module = base_module._orig_mod + + raw_params = list(raw_model_uncompiled.parameters()) + train_params = list(base_module.parameters()) + + assert len(raw_params) == len(train_params), \ + f"Parameter count mismatch: raw_model_uncompiled has {len(raw_params)}, training model has {len(train_params)}" + for i, (rp, tp) in enumerate(zip(raw_params, train_params)): + assert rp.data_ptr() == tp.data_ptr(), \ + f"Parameter {i} has different data_ptr: raw_model_uncompiled and training model do not share parameters!" + print0(f"[Verified] raw_model_uncompiled and training model share the same {len(raw_params)} Parameter objects") + + last_training_update = None + last_training_gradient = None # Store the original gradient that produced the update + last_training_batches = None # Store ALL microbatches (x, y) for consistent HVP calculation + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it, base_lr): + min_lr = base_lr * args.lr_decay_frac + cooldown_iters = int(args.num_iterations * 0.2) + # 1) Warmup: linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it + 1) / args.warmup_iters + # 3) Decay: linear decay from base_lr to min_lr in the last cooldown_iters steps + cooldown_start = args.num_iterations - cooldown_iters + if it >= cooldown_start: + decay_ratio = (it - cooldown_start) / cooldown_iters + return base_lr - decay_ratio * (base_lr - min_lr) + # 2) Stable: constant learning rate at base_lr + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + last_training_update=last_training_update, # Pass the real update captured from training + last_training_gradient=last_training_gradient, # Pass the original gradient g_t + last_training_batches=last_training_batches # Pass ALL microbatches for consistent HVP + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + + # Pre-check if we need to collect microbatches for sharpness analysis + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + + microbatches_this_step = [] if will_analyze_sharpness_next else None + + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + + # Store ALL microbatches for memory-efficient HVP calculation + if will_analyze_sharpness_next: + microbatches_this_step.append((x.detach().clone(), y.detach().clone())) + + if ddp: + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + + #no clipping + # norm = torch.nn.utils.clip_grad_norm_(raw_model_uncompiled.parameters(), float('inf')) + + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + print(raw_model_uncompiled.transformer.h[0].attn.q_w.weight[:5,:5]) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + # Save the original gradient g_t that will produce the update v + last_training_gradient = [ + p.grad.detach().clone() if p.grad is not None else torch.zeros_like(p) + for p in raw_model_uncompiled.parameters() + ] + # Capture ALL microbatches for consistent HVP calculation + # This ensures H is computed on the exact same objective as g_t and v + last_training_batches = microbatches_this_step # Already cloned above + else: + params_before_optimizer_step = None + last_training_batches = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p.detach() - p_before + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group()step:0 validation loss:11.020914 +step:0 train loss:11.019319 +step:1 train loss:11.018898 +step:2 train loss:11.009582 +step:3 train loss:11.003741 +step:4 train loss:10.994319 +step:5 train loss:10.981184 +step:6 train loss:10.966648 +step:7 train loss:10.946885 +step:8 train loss:10.926921 +step:9 train loss:10.906700 +step:10 train loss:10.871219 +step:11 train loss:10.847637 +step:12 train loss:10.816176 +step:13 train loss:10.781301 +step:14 train loss:10.744652 +step:15 train loss:10.701284 +step:16 train loss:10.667722 +step:17 train loss:10.623016 +step:18 train loss:10.580212 +step:19 train loss:10.526819 +step:20 train loss:10.491189 +step:21 train loss:10.436900 +step:22 train loss:10.385263 +step:23 train loss:10.321291 +step:24 train loss:10.277889 +step:25 train loss:10.204774 +step:26 train loss:10.170509 +step:27 train loss:10.112287 +step:28 train loss:10.061612 +step:29 train loss:9.993460 +step:30 train loss:9.933224 +step:31 train loss:9.877526 +step:32 train loss:9.813191 +step:33 train loss:9.749033 +step:34 train loss:9.693710 +step:35 train loss:9.617404 +step:36 train loss:9.568756 +step:37 train loss:9.527540 +step:38 train loss:9.408077 +step:39 train loss:9.363045 +step:40 train loss:9.312853 +step:41 train loss:9.258803 +step:42 train loss:9.214275 +step:43 train loss:9.130621 +step:44 train loss:9.053677 +step:45 train loss:9.000873 +step:46 train loss:8.972203 +step:47 train loss:8.872284 +step:48 train loss:8.798306 +step:49 train loss:8.774422 +step:50 train loss:8.671303 +step:51 train loss:8.613094 +step:52 train loss:8.537926 +step:53 train loss:8.507044 +step:54 train loss:8.455448 +step:55 train loss:8.382223 +step:56 train loss:8.322659 +step:57 train loss:8.296807 +step:58 train loss:8.234465 +step:59 train loss:8.172514 +step:60 train loss:8.119673 +step:61 train loss:8.075682 +step:62 train loss:8.007008 +step:63 train loss:7.923621 +step:64 train loss:7.901201 +step:65 train loss:7.899576 +step:66 train loss:7.850436 +step:67 train loss:7.791152 +step:68 train loss:7.723941 +step:69 train loss:7.731280 +step:70 train loss:7.659507 +step:71 train loss:7.604909 +step:72 train loss:7.550514 +step:73 train loss:7.483020 +step:74 train loss:7.503796 +step:75 train loss:7.514821 +step:76 train loss:7.442623 +step:77 train loss:7.396494 +step:78 train loss:7.352963 +step:79 train loss:7.368027 +step:80 train loss:7.321649 +step:81 train loss:7.233479 +step:82 train loss:7.308549 +step:83 train loss:7.202628 +step:84 train loss:7.210100 +step:85 train loss:7.218624 +step:86 train loss:7.191931 +step:87 train loss:7.155905 +step:88 train loss:7.120907 +step:89 train loss:7.175705 +step:90 train loss:7.063250 +step:91 train loss:7.042998 +step:92 train loss:7.084099 +step:93 train loss:6.992135 +step:94 train loss:7.106468 +step:95 train loss:6.933400 +step:96 train loss:6.958757 +step:97 train loss:7.003358 +step:98 train loss:6.967579 +step:99 train loss:6.920109 +step:100 train loss:6.886863 +step:101 train loss:6.923991 +step:102 train loss:6.845167 +step:103 train loss:6.862101 +step:104 train loss:6.861997 +step:105 train loss:6.795881 +step:106 train loss:6.828051 +step:107 train loss:6.812385 +step:108 train loss:6.717745 +step:109 train loss:6.726754 +step:110 train loss:6.734091 +step:111 train loss:6.750269 +step:112 train loss:6.800958 +step:113 train loss:6.686827 +step:114 train loss:6.729537 +step:115 train loss:6.704944 +step:116 train loss:6.614262 +step:117 train loss:6.686800 +step:118 train loss:6.648927 +step:119 train loss:6.595922 +step:120 train loss:6.639727 +step:121 train loss:6.552394 +step:122 train loss:6.428242 +step:123 train loss:6.612949 +step:124 train loss:6.649893 +step:125 train loss:6.590692 +step:126 train loss:6.548303 +step:127 train loss:6.506650 +step:128 train loss:6.734271 +step:129 train loss:6.531928 +step:130 train loss:6.554492 +step:131 train loss:6.598265 +step:132 train loss:6.539042 +step:133 train loss:6.436519 +step:134 train loss:6.408859 +step:135 train loss:6.475854 +step:136 train loss:6.434643 +step:137 train loss:6.506507 +step:138 train loss:6.403322 +step:139 train loss:6.507359 +step:140 train loss:6.434649 +step:141 train loss:6.416883 +step:142 train loss:6.475286 +step:143 train loss:6.320526 +step:144 train loss:6.425361 +step:145 train loss:6.309249 +step:146 train loss:6.352371 +step:147 train loss:6.373145 +step:148 train loss:6.340011 +step:149 train loss:6.374669 +step:150 train loss:6.294371 +step:151 train loss:6.372187 +step:152 train loss:6.308024 +step:153 train loss:6.331081 +step:154 train loss:6.306093 +step:155 train loss:6.325250 +step:156 train loss:6.273241 +step:157 train loss:6.261198 +step:158 train loss:6.238873 +step:159 train loss:6.297297 +step:160 train loss:6.126822 +step:161 train loss:6.175305 +step:162 train loss:6.213131 +step:163 train loss:6.243212 +step:164 train loss:6.240067 +step:165 train loss:6.195261 +step:166 train loss:6.237637 +step:167 train loss:6.215435 +step:168 train loss:6.223723 +step:169 train loss:6.152148 +step:170 train loss:6.243016 +step:171 train loss:6.146731 +step:172 train loss:6.181803 +step:173 train loss:6.128670 +step:174 train loss:6.294766 +step:175 train loss:6.168622 +step:176 train loss:6.099666 +step:177 train loss:6.171862 +step:178 train loss:6.123311 +step:179 train loss:6.150420 +step:180 train loss:6.101341 +step:181 train loss:6.034968 +step:182 train loss:6.142403 +step:183 train loss:6.116555 +step:184 train loss:6.134562 +step:185 train loss:6.063186 +step:186 train loss:6.111618 +step:187 train loss:6.079042 +step:188 train loss:6.004704 +step:189 train loss:6.129032 +step:190 train loss:6.020819 +step:191 train loss:6.118569 +step:192 train loss:6.033823 +step:193 train loss:6.029682 +step:194 train loss:6.024861 +step:195 train loss:6.139820 +step:196 train loss:6.018183 +step:197 train loss:6.018018 +step:198 train loss:6.064744 +step:199 train loss:6.030278 +step:200 train loss:6.042264 +step:201 train loss:6.068335 +step:202 train loss:6.012178 +step:203 train loss:5.976916 +step:204 train loss:6.032545 +step:205 train loss:6.073302 +step:206 train loss:6.003845 +step:207 train loss:5.973257 +step:208 train loss:6.012615 +step:209 train loss:5.938142 +step:210 train loss:5.950331 +step:211 train loss:5.991072 +step:212 train loss:5.950519 +step:213 train loss:5.959386 +step:214 train loss:6.012537 +step:215 train loss:5.917422 +step:216 train loss:5.949398 +step:217 train loss:5.914557 +step:218 train loss:5.930290 +step:219 train loss:5.908162 +step:220 train loss:5.945920 +step:221 train loss:5.918236 +step:222 train loss:5.940826 +step:223 train loss:5.871080 +step:224 train loss:5.913220 +step:225 train loss:5.920226 +step:226 train loss:5.935606 +step:227 train loss:5.921627 +step:228 train loss:5.909790 +step:229 train loss:5.917291 +step:230 train loss:5.892886 +step:231 train loss:5.919023 +step:232 train loss:5.894035 +step:233 train loss:5.929536 +step:234 train loss:5.879390 +step:235 train loss:5.854733 +step:236 train loss:5.807051 +step:237 train loss:5.841582 +step:238 train loss:5.816586 +step:239 train loss:5.819132 +step:240 train loss:5.816198 +step:241 train loss:5.873385 +step:242 train loss:5.864590 +step:243 train loss:5.854676 +step:244 train loss:5.842302 +step:245 train loss:5.808291 +step:246 train loss:5.798473 +step:247 train loss:5.871245 +step:248 train loss:5.761798 +step:249 train loss:5.801194 +step:250 validation loss:5.821643 +step:250 train loss:5.752668 +step:251 train loss:5.809158 +step:252 train loss:5.792186 +step:253 train loss:5.794862 +step:254 train loss:5.798321 +step:255 train loss:5.761742 +step:256 train loss:5.661922 +step:257 train loss:5.755962 +step:258 train loss:5.822498 +step:259 train loss:5.735155 +step:260 train loss:5.811732 +step:261 train loss:5.732316 +step:262 train loss:5.765552 +step:263 train loss:5.716880 +step:264 train loss:5.788340 +step:265 train loss:5.682297 +step:266 train loss:5.660282 +step:267 train loss:5.738584 +step:268 train loss:5.738412 +step:269 train loss:5.657703 +step:270 train loss:5.704422 +step:271 train loss:5.739079 +step:272 train loss:5.741452 +step:273 train loss:5.659697 +step:274 train loss:5.727789 +step:275 train loss:5.699847 +step:276 train loss:5.679149 +step:277 train loss:5.670439 +step:278 train loss:5.672724 +step:279 train loss:5.646330 +step:280 train loss:5.693713 +step:281 train loss:5.682067 +step:282 train loss:5.647560 +step:283 train loss:5.649268 +step:284 train loss:5.665375 +step:285 train loss:5.674855 +step:286 train loss:5.542343 +step:287 train loss:5.507407 +step:288 train loss:5.679415 +step:289 train loss:5.612430 +step:290 train loss:5.669158 +step:291 train loss:5.660268 +step:292 train loss:5.626053 +step:293 train loss:5.659838 +step:294 train loss:5.719237 +step:295 train loss:5.637028 +step:296 train loss:5.616031 +step:297 train loss:5.575044 +step:298 train loss:5.630583 +step:299 train loss:5.621124 +step:300 train loss:5.573595 +step:301 train loss:5.583503 +step:302 train loss:5.596720 +step:303 train loss:5.593069 +step:304 train loss:5.566091 +step:305 train loss:5.605163 +step:306 train loss:5.603005 +step:307 train loss:5.578074 +step:308 train loss:5.638720 +step:309 train loss:5.553208 +step:310 train loss:5.561874 +step:311 train loss:5.401436 +step:312 train loss:5.575068 +step:313 train loss:5.529469 +step:314 train loss:5.488894 +step:315 train loss:5.555838 +step:316 train loss:5.465809 +step:317 train loss:5.549593 +step:318 train loss:5.608890 +step:319 train loss:5.528642 +step:320 train loss:5.536236 +step:321 train loss:5.501552 +step:322 train loss:5.456638 +step:323 train loss:5.549243 +step:324 train loss:5.479722 +step:325 train loss:5.513605 +step:326 train loss:5.504444 +step:327 train loss:5.477467 +step:328 train loss:5.519723 +step:329 train loss:5.468175 +step:330 train loss:5.425490 +step:331 train loss:5.455499 +step:332 train loss:5.544566 +step:333 train loss:5.461542 +step:334 train loss:5.524017 +step:335 train loss:5.406044 +step:336 train loss:5.387534 +step:337 train loss:5.376943 +step:338 train loss:5.421894 +step:339 train loss:5.463885 +step:340 train loss:5.457703 +step:341 train loss:5.391850 +step:342 train loss:5.409793 +step:343 train loss:5.426935 +step:344 train loss:5.312403 +step:345 train loss:5.460879 +step:346 train loss:5.365168 +step:347 train loss:5.366203 +step:348 train loss:5.365544 +step:349 train loss:5.293897 +step:350 train loss:5.378538 +step:351 train loss:5.344782 +step:352 train loss:5.379483 +step:353 train loss:5.330231 +step:354 train loss:5.429210 +step:355 train loss:5.378653 +step:356 train loss:5.383337 +step:357 train loss:5.309890 +step:358 train loss:5.318047 +step:359 train loss:5.370556 +step:360 train loss:5.346153 +step:361 train loss:5.342375 +step:362 train loss:5.285855 +step:363 train loss:5.388946 +step:364 train loss:5.301444 +step:365 train loss:5.280437 +step:366 train loss:5.375336 +step:367 train loss:5.324644 +step:368 train loss:5.309762 +step:369 train loss:5.338367 +step:370 train loss:5.277382 +step:371 train loss:5.299960 +step:372 train loss:5.301460 +step:373 train loss:5.243843 +step:374 train loss:5.265550 +step:375 train loss:5.264798 +step:376 train loss:5.272564 +step:377 train loss:5.276080 +step:378 train loss:5.326243 +step:379 train loss:5.308873 +step:380 train loss:5.232356 +step:381 train loss:5.244274 +step:382 train loss:5.157475 +step:383 train loss:5.212432 +step:384 train loss:5.185880 +step:385 train loss:5.138025 +step:386 train loss:5.198814 +step:387 train loss:5.134203 +step:388 train loss:5.167668 +step:389 train loss:5.180684 +step:390 train loss:5.193630 +step:391 train loss:5.270126 +step:392 train loss:5.157729 +step:393 train loss:5.183566 +step:394 train loss:5.137642 +step:395 train loss:5.078276 +step:396 train loss:5.171164 +step:397 train loss:5.145748 +step:398 train loss:5.158826 +step:399 train loss:5.124743 +step:400 train loss:5.130764 +step:401 train loss:5.122867 +step:402 train loss:5.090757 +step:403 train loss:5.118721 +step:404 train loss:5.089623 +step:405 train loss:5.107733 +step:406 train loss:5.022135 +step:407 train loss:5.036392 +step:408 train loss:5.116864 +step:409 train loss:5.022072 +step:410 train loss:5.077397 +step:411 train loss:5.060477 +step:412 train loss:5.016803 +step:413 train loss:5.034269 +step:414 train loss:5.034101 +step:415 train loss:5.043290 +step:416 train loss:5.063387 +step:417 train loss:5.044974 +step:418 train loss:5.028772 +step:419 train loss:5.018584 +step:420 train loss:5.007085 +step:421 train loss:5.050013 +step:422 train loss:5.051408 +step:423 train loss:4.955959 +step:424 train loss:4.985878 +step:425 train loss:5.059426 +step:426 train loss:5.026524 +step:427 train loss:4.956718 +step:428 train loss:4.966442 +step:429 train loss:4.978050 +step:430 train loss:4.977908 +step:431 train loss:4.962316 +step:432 train loss:4.937092 +step:433 train loss:4.930182 +step:434 train loss:4.930386 +step:435 train loss:4.926970 +step:436 train loss:4.897174 +step:437 train loss:4.987951 +step:438 train loss:4.904163 +step:439 train loss:4.944612 +step:440 train loss:4.921162 +step:441 train loss:4.939589 +step:442 train loss:4.937042 +step:443 train loss:4.941129 +step:444 train loss:4.893933 +step:445 train loss:4.978016 +step:446 train loss:4.903849 +step:447 train loss:4.864440 +step:448 train loss:4.818271 +step:449 train loss:4.865962 +step:450 train loss:4.970491 +step:451 train loss:4.881433 +step:452 train loss:4.830107 +step:453 train loss:4.883797 +step:454 train loss:4.773158 +step:455 train loss:4.819504 +step:456 train loss:4.824333 +step:457 train loss:4.867153 +step:458 train loss:4.775534 +step:459 train loss:4.772263 +step:460 train loss:4.783955 +step:461 train loss:4.882193 +step:462 train loss:4.858613 +step:463 train loss:4.791137 +step:464 train loss:4.838860 +step:465 train loss:4.754385 +step:466 train loss:4.839330 +step:467 train loss:4.744277 +step:468 train loss:4.809006 +step:469 train loss:4.803836 +step:470 train loss:4.801016 +step:471 train loss:4.804769 +step:472 train loss:4.759147 +step:473 train loss:4.735689 +step:474 train loss:4.510305 +step:475 train loss:4.548601 +step:476 train loss:4.698626 +step:477 train loss:4.780788 +step:478 train loss:4.744438 +step:479 train loss:4.715478 +step:480 train loss:4.726923 +step:481 train loss:4.713764 +step:482 train loss:4.672766 +step:483 train loss:4.722514 +step:484 train loss:4.680862 +step:485 train loss:4.680213 +step:486 train loss:4.689015 +step:487 train loss:4.649583 +step:488 train loss:4.716647 +step:489 train loss:4.722298 +step:490 train loss:4.680300 +step:491 train loss:4.686081 +step:492 train loss:4.717659 +step:493 train loss:4.627571 +step:494 train loss:4.733436 +step:495 train loss:4.681418 +step:496 train loss:4.592419 +step:497 train loss:4.750683 +step:498 train loss:4.615220 +step:499 train loss:4.639867 +step:500 validation loss:4.637390 total_sharp:3.2130e-02 L1_sharp:3.0659e-02 L2_sharp:1.0856e-02 L3_sharp:1.1920e-02 L4_sharp:1.0385e-02 L5_sharp:6.5158e-03 L6_sharp:6.4464e-03 L7_sharp:4.1375e-03 L8_sharp:2.7815e-03 L9_sharp:1.8787e-03 L10_sharp:1.3407e-03 L11_sharp:1.0710e-03 L12_sharp:1.0289e-03 total_fnorm:1.7029e+00 total_l1_linf:1.4800e+04 total_spectral:1.7029e+00 L1_fnorm:4.2582e-01 L2_fnorm:3.9004e-01 L3_fnorm:3.7260e-01 L4_fnorm:3.7183e-01 L5_fnorm:4.0052e-01 L6_fnorm:3.9086e-01 L7_fnorm:4.0967e-01 L8_fnorm:4.0315e-01 L9_fnorm:4.1667e-01 L10_fnorm:4.1512e-01 L11_fnorm:4.1879e-01 L12_fnorm:4.1450e-01 L1_l1linf:3.1459e-01 L2_l1linf:3.0606e-01 L3_l1linf:3.0540e-01 L4_l1linf:3.1209e-01 L5_l1linf:3.1622e-01 L6_l1linf:3.1735e-01 L7_l1linf:3.1834e-01 L8_l1linf:3.1957e-01 L9_l1linf:3.2186e-01 L10_l1linf:3.2051e-01 L11_l1linf:3.2370e-01 L12_l1linf:3.2558e-01 L1_spectral:8.6029e-03 L2_spectral:8.6015e-03 L3_spectral:8.6018e-03 L4_spectral:8.6026e-03 L5_spectral:8.6024e-03 L6_spectral:8.6070e-03 L7_spectral:8.6047e-03 L8_spectral:8.6043e-03 L9_spectral:8.6037e-03 L10_spectral:8.6043e-03 L11_spectral:8.6033e-03 L12_spectral:8.6034e-03 v_norm:1.7029e+00 cos_v_-g_hvp:5.2125e-02 g_hvp_norm:1.0063e+00 cos_v_-g_t:5.5395e-02 g_t_norm:9.4995e-01 hv_norm:1.4591e+00 cos_v_hv:3.7499e-02 hg_norm:5.8166e+01 cos_g_hg:7.1342e-01 v_par:3.7998e-03 v_perp:1.7029e+00 L1_cos_v_neg_g:5.5968e-02 L1_v_norm:4.2582e-01 L2_cos_v_neg_g:5.2849e-02 L2_v_norm:3.9004e-01 L3_cos_v_neg_g:4.8798e-02 L3_v_norm:3.7260e-01 L4_cos_v_neg_g:5.4234e-02 L4_v_norm:3.7183e-01 L5_cos_v_neg_g:5.7074e-02 L5_v_norm:4.0052e-01 L6_cos_v_neg_g:7.3785e-02 L6_v_norm:3.9086e-01 L7_cos_v_neg_g:7.6462e-02 L7_v_norm:4.0967e-01 L8_cos_v_neg_g:7.9123e-02 L8_v_norm:4.0315e-01 L9_cos_v_neg_g:7.8156e-02 L9_v_norm:4.1667e-01 L10_cos_v_neg_g:7.7254e-02 L10_v_norm:4.1512e-01 L11_cos_v_neg_g:7.3713e-02 L11_v_norm:4.1879e-01 L12_cos_v_neg_g:6.9308e-02 L12_v_norm:4.1450e-01 +step:500 train loss:4.732164 +step:501 train loss:4.574828 +step:502 train loss:4.676639 +step:503 train loss:4.665346 +step:504 train loss:4.579201 +step:505 train loss:4.606013 +step:506 train loss:4.715780 +step:507 train loss:4.542671 +step:508 train loss:4.597144 +step:509 train loss:4.593663 +step:510 train loss:4.554472 +step:511 train loss:4.603622 +step:512 train loss:4.682435 +step:513 train loss:4.590060 +step:514 train loss:4.555248 +step:515 train loss:4.591908 +step:516 train loss:4.592746 +step:517 train loss:4.551861 +step:518 train loss:4.520304 +step:519 train loss:4.555144 +step:520 train loss:4.511992 +step:521 train loss:4.605177 +step:522 train loss:4.524478 +step:523 train loss:4.544205 +step:524 train loss:4.558096 +step:525 train loss:4.636894 +step:526 train loss:4.535859 +step:527 train loss:4.558940 +step:528 train loss:4.574670 +step:529 train loss:4.479414 +step:530 train loss:4.611872 +step:531 train loss:4.495378 +step:532 train loss:4.529475 +step:533 train loss:4.487421 +step:534 train loss:4.502425 +step:535 train loss:4.511720 +step:536 train loss:4.548640 +step:537 train loss:4.463342 +step:538 train loss:4.509374 +step:539 train loss:4.450528 +step:540 train loss:4.491249 +step:541 train loss:4.540955 +step:542 train loss:4.484219 +step:543 train loss:4.476376 +step:544 train loss:4.558399 +step:545 train loss:4.470210 +step:546 train loss:4.483779 +step:547 train loss:4.529100 +step:548 train loss:4.496132 +step:549 train loss:4.442027 +step:550 train loss:4.455239 +step:551 train loss:4.469602 +step:552 train loss:4.452959 +step:553 train loss:4.457133 +step:554 train loss:4.555522 +step:555 train loss:4.462791 +step:556 train loss:4.453988 +step:557 train loss:4.489164 +step:558 train loss:4.534521 +step:559 train loss:4.482540 +step:560 train loss:4.421073 +step:561 train loss:4.449862 +step:562 train loss:4.422944 +step:563 train loss:4.480453 +step:564 train loss:4.462117 +step:565 train loss:4.426494 +step:566 train loss:4.504919 +step:567 train loss:4.396479 +step:568 train loss:4.476239 +step:569 train loss:4.431070 +step:570 train loss:4.423812 +step:571 train loss:4.703388 +step:572 train loss:4.451790 +step:573 train loss:4.467056 +step:574 train loss:4.389927 +step:575 train loss:4.439502 +step:576 train loss:4.355710 +step:577 train loss:4.376695 +step:578 train loss:4.420111 +step:579 train loss:4.429271 +step:580 train loss:4.440206 +step:581 train loss:4.412538 +step:582 train loss:4.429062 +step:583 train loss:4.471584 +step:584 train loss:4.418801 +step:585 train loss:4.442053 +step:586 train loss:4.342820 +step:587 train loss:4.416929 +step:588 train loss:4.383541 +step:589 train loss:4.394992 +step:590 train loss:4.411103 +step:591 train loss:4.341406 +step:592 train loss:4.402871 +step:593 train loss:4.337777 +step:594 train loss:4.407256 +step:595 train loss:4.361763 +step:596 train loss:4.342898 +step:597 train loss:4.463199 +step:598 train loss:4.444216 +step:599 train loss:4.384462 +step:600 train loss:4.323305 +step:601 train loss:4.346262 +step:602 train loss:4.451830 +step:603 train loss:4.381444 +step:604 train loss:4.362867 +step:605 train loss:4.389843 +step:606 train loss:4.272132 +step:607 train loss:4.354321 +step:608 train loss:4.351230 +step:609 train loss:4.323063 +step:610 train loss:4.354472 +step:611 train loss:4.283895 +step:612 train loss:4.390725 +step:613 train loss:4.378489 +step:614 train loss:4.269578 +step:615 train loss:4.422779 +step:616 train loss:4.234723 +step:617 train loss:4.341340 +step:618 train loss:4.279766 +step:619 train loss:4.359638 +step:620 train loss:4.393004 +step:621 train loss:4.273015 +step:622 train loss:4.328726 +step:623 train loss:4.307460 +step:624 train loss:4.340859 +step:625 train loss:4.444627 +step:626 train loss:4.294858 +step:627 train loss:4.348100 +step:628 train loss:4.281349 +step:629 train loss:4.275353 +step:630 train loss:4.290542 +step:631 train loss:4.327901 +step:632 train loss:4.214264 +step:633 train loss:4.270316 +step:634 train loss:4.242515 +step:635 train loss:4.307024 +step:636 train loss:4.275398 +step:637 train loss:4.274560 +step:638 train loss:4.325326 +step:639 train loss:4.218975 +step:640 train loss:4.298141 +step:641 train loss:4.297360 +step:642 train loss:4.239267 +step:643 train loss:4.299179 +step:644 train loss:4.255074 +step:645 train loss:4.227785 +step:646 train loss:4.398387 +step:647 train loss:4.279534 +step:648 train loss:4.313675 +step:649 train loss:4.301417 +step:650 train loss:4.222666 +step:651 train loss:4.271235 +step:652 train loss:4.291797 +step:653 train loss:4.276437 +step:654 train loss:4.321856 +step:655 train loss:4.225620 +step:656 train loss:4.252447 +step:657 train loss:4.273447 +step:658 train loss:4.241481 +step:659 train loss:4.278580 +step:660 train loss:4.253771 +step:661 train loss:4.272604 +step:662 train loss:4.300527 +step:663 train loss:4.249488 +step:664 train loss:4.305145 +step:665 train loss:4.241267 +step:666 train loss:4.185526 +step:667 train loss:4.251819 +step:668 train loss:4.210974 +step:669 train loss:4.256258 +step:670 train loss:4.300828 +step:671 train loss:4.334010 +step:672 train loss:4.231863 +step:673 train loss:4.159872 +step:674 train loss:4.276681 +step:675 train loss:4.221974 +step:676 train loss:4.225748 +step:677 train loss:4.218518 +step:678 train loss:4.300729 +step:679 train loss:4.253047 +step:680 train loss:4.167534 +step:681 train loss:4.276278 +step:682 train loss:4.207145 +step:683 train loss:4.251371 +step:684 train loss:4.223695 +step:685 train loss:4.324543 +step:686 train loss:4.218571 +step:687 train loss:4.150230 +step:688 train loss:4.235436 +step:689 train loss:4.216775 +step:690 train loss:4.222253 +step:691 train loss:4.244678 +step:692 train loss:4.120308 +step:693 train loss:4.249088 +step:694 train loss:4.243767 +step:695 train loss:4.232481 +step:696 train loss:4.267429 +step:697 train loss:4.202724 +step:698 train loss:4.188390 +step:699 train loss:4.192765 +step:700 train loss:4.245900 +step:701 train loss:4.186461 +step:702 train loss:4.147160 +step:703 train loss:4.243291 +step:704 train loss:4.212337 +step:705 train loss:4.260197 +step:706 train loss:4.210537 +step:707 train loss:4.171269 +step:708 train loss:4.208672 +step:709 train loss:4.174550 +step:710 train loss:4.225008 +step:711 train loss:4.187058 +step:712 train loss:4.136430 +step:713 train loss:4.210715 +step:714 train loss:4.139674 +step:715 train loss:4.162215 +step:716 train loss:4.168159 +step:717 train loss:4.139872 +step:718 train loss:4.230566 +step:719 train loss:4.215828 +step:720 train loss:4.184629 +step:721 train loss:4.215831 +step:722 train loss:4.153132 +step:723 train loss:4.192773 +step:724 train loss:4.161228 +step:725 train loss:4.140967 +step:726 train loss:4.175820 +step:727 train loss:4.100971 +step:728 train loss:4.264729 +step:729 train loss:4.104164 +step:730 train loss:4.179358 +step:731 train loss:4.219927 +step:732 train loss:4.097825 +step:733 train loss:4.195594 +step:734 train loss:4.146303 +step:735 train loss:4.189847 +step:736 train loss:4.171824 +step:737 train loss:4.180269 +step:738 train loss:4.226702 +step:739 train loss:4.085504 +step:740 train loss:4.189027 +step:741 train loss:4.156177 +step:742 train loss:4.124678 +step:743 train loss:4.182644 +step:744 train loss:4.150758 +step:745 train loss:4.117089 +step:746 train loss:4.174060 +step:747 train loss:4.193626 +step:748 train loss:4.184290 +step:749 train loss:4.187005 +step:750 validation loss:4.135032 +step:750 train loss:4.167243 +step:751 train loss:4.148082 +step:752 train loss:4.158070 +step:753 train loss:4.132195 +step:754 train loss:4.102779 +step:755 train loss:4.083861 +step:756 train loss:4.112113 +step:757 train loss:4.098128 +step:758 train loss:4.198669 +step:759 train loss:4.109430 +step:760 train loss:4.209637 +step:761 train loss:4.160750 +step:762 train loss:4.112668 +step:763 train loss:4.141395 +step:764 train loss:4.156448 +step:765 train loss:4.140172 +step:766 train loss:4.210681 +step:767 train loss:4.043539 +step:768 train loss:4.141605 +step:769 train loss:4.140648 +step:770 train loss:4.108849 +step:771 train loss:4.183063 +step:772 train loss:4.157364 +step:773 train loss:4.123827 +step:774 train loss:4.126593 +step:775 train loss:4.110457 +step:776 train loss:4.088274 +step:777 train loss:4.138193 +step:778 train loss:4.207539 +step:779 train loss:4.114871 +step:780 train loss:4.116380 +step:781 train loss:4.198665 +step:782 train loss:4.197795 +step:783 train loss:4.120051 +step:784 train loss:4.117209 +step:785 train loss:4.111501 +step:786 train loss:4.154267 +step:787 train loss:4.167194 +step:788 train loss:4.133830 +step:789 train loss:4.141312 +step:790 train loss:4.166410 +step:791 train loss:4.105467 +step:792 train loss:4.121556 +step:793 train loss:4.099326 +step:794 train loss:4.135060 +step:795 train loss:4.114413 +step:796 train loss:4.206883 +step:797 train loss:4.059089 +step:798 train loss:4.192370 +step:799 train loss:4.108930 +step:800 train loss:4.136325 +step:801 train loss:4.085148 +step:802 train loss:4.136070 +step:803 train loss:4.137622 +step:804 train loss:4.085342 +step:805 train loss:4.151332 +step:806 train loss:4.096300 +step:807 train loss:4.143819 +step:808 train loss:4.043872 +step:809 train loss:4.090723 +step:810 train loss:4.028440 +step:811 train loss:4.072788 +step:812 train loss:4.040104 +step:813 train loss:4.107237 +step:814 train loss:4.072538 +step:815 train loss:4.173122 +step:816 train loss:4.257722 +step:817 train loss:4.111720 +step:818 train loss:4.119170 +step:819 train loss:4.066696 +step:820 train loss:4.064642 +step:821 train loss:4.099452 +step:822 train loss:4.101067 +step:823 train loss:4.076411 +step:824 train loss:4.108871 +step:825 train loss:4.034986 +step:826 train loss:4.175660 +step:827 train loss:4.103065 +step:828 train loss:4.085772 +step:829 train loss:4.068327 +step:830 train loss:4.067343 +step:831 train loss:4.111395 +step:832 train loss:4.100972 +step:833 train loss:4.104225 +step:834 train loss:4.076950 +step:835 train loss:4.082245 +step:836 train loss:4.033286 +step:837 train loss:4.118643 +step:838 train loss:4.050344 +step:839 train loss:4.099447 +step:840 train loss:4.107195 +step:841 train loss:4.077148 +step:842 train loss:4.024161 +step:843 train loss:4.061693 +step:844 train loss:4.035721 +step:845 train loss:4.072018 +step:846 train loss:3.992214 +step:847 train loss:4.082346 +step:848 train loss:4.045697 +step:849 train loss:4.087036 +step:850 train loss:4.080913 +step:851 train loss:4.070946 +step:852 train loss:4.076370 +step:853 train loss:4.146090 +step:854 train loss:4.096458 +step:855 train loss:4.065321 +step:856 train loss:4.045599 +step:857 train loss:4.108268 +step:858 train loss:4.073535 +step:859 train loss:4.044381 +step:860 train loss:4.042871 +step:861 train loss:4.046267 +step:862 train loss:4.066266 +step:863 train loss:4.046103 +step:864 train loss:4.047130 +step:865 train loss:4.004506 +step:866 train loss:4.114934 +step:867 train loss:4.103059 +step:868 train loss:4.088329 +step:869 train loss:4.006936 +step:870 train loss:4.047888 +step:871 train loss:3.983245 +step:872 train loss:4.027237 +step:873 train loss:3.993068 +step:874 train loss:4.038167 +step:875 train loss:4.042494 +step:876 train loss:4.001279 +step:877 train loss:4.005086 +step:878 train loss:4.201957 +step:879 train loss:3.996634 +step:880 train loss:4.007356 +step:881 train loss:3.996343 +step:882 train loss:4.102398 +step:883 train loss:4.012769 +step:884 train loss:4.011245 +step:885 train loss:4.037498 +step:886 train loss:4.058154 +step:887 train loss:3.984098 +step:888 train loss:4.037788 +step:889 train loss:4.006952 +step:890 train loss:4.058006 +step:891 train loss:3.930737 +step:892 train loss:4.084216 +step:893 train loss:4.028533 +step:894 train loss:4.017473 +step:895 train loss:4.000865 +step:896 train loss:4.069477 +step:897 train loss:3.994454 +step:898 train loss:4.012339 +step:899 train loss:3.992231 +step:900 train loss:4.068349 +step:901 train loss:3.995851 +step:902 train loss:4.073490 +step:903 train loss:4.019595 +step:904 train loss:4.058600 +step:905 train loss:3.989138 +step:906 train loss:4.050634 +step:907 train loss:4.058462 +step:908 train loss:4.056140 +step:909 train loss:3.996365 +step:910 train loss:4.023403 +step:911 train loss:3.971215 +step:912 train loss:3.959663 +step:913 train loss:3.990183 +step:914 train loss:4.001534 +step:915 train loss:3.970281 +step:916 train loss:4.036831 +step:917 train loss:4.128633 +step:918 train loss:4.066173 +step:919 train loss:4.022204 +step:920 train loss:4.057279 +step:921 train loss:4.005797 +step:922 train loss:3.994190 +step:923 train loss:3.998244 +step:924 train loss:4.028978 +step:925 train loss:4.005016 +step:926 train loss:4.015786 +step:927 train loss:3.980110 +step:928 train loss:3.981509 +step:929 train loss:3.998503 +step:930 train loss:4.031771 +step:931 train loss:4.024953 +step:932 train loss:4.003810 +step:933 train loss:4.107777 +step:934 train loss:4.043656 +step:935 train loss:4.036613 +step:936 train loss:3.941874 +step:937 train loss:3.949824 +step:938 train loss:3.967501 +step:939 train loss:4.009208 +step:940 train loss:3.975694 +step:941 train loss:4.059198 +step:942 train loss:3.943272 +step:943 train loss:4.030877 +step:944 train loss:3.984617 +step:945 train loss:3.916091 +step:946 train loss:4.005622 +step:947 train loss:4.043151 +step:948 train loss:4.006217 +step:949 train loss:3.992888 +step:950 train loss:3.961211 +step:951 train loss:4.047059 +step:952 train loss:3.969958 +step:953 train loss:4.041458 +step:954 train loss:4.018830 +step:955 train loss:4.005847 +step:956 train loss:4.012000 +step:957 train loss:4.058385 +step:958 train loss:4.024670 +step:959 train loss:3.965564 +step:960 train loss:4.061435 +step:961 train loss:3.984483 +step:962 train loss:4.040113 +step:963 train loss:4.052087 +step:964 train loss:4.019488 +step:965 train loss:4.026001 +step:966 train loss:3.986412 +step:967 train loss:4.014134 +step:968 train loss:4.060121 +step:969 train loss:3.981909 +step:970 train loss:3.997529 +step:971 train loss:4.042961 +step:972 train loss:3.985495 +step:973 train loss:3.979003 +step:974 train loss:3.950320 +step:975 train loss:4.059214 +step:976 train loss:3.949090 +step:977 train loss:3.973920 +step:978 train loss:4.005099 +step:979 train loss:3.955471 +step:980 train loss:3.970694 +step:981 train loss:3.933619 +step:982 train loss:4.040308 +step:983 train loss:4.069141 +step:984 train loss:4.020995 +step:985 train loss:4.013855 +step:986 train loss:4.000014 +step:987 train loss:4.025787 +step:988 train loss:3.982413 +step:989 train loss:3.918095 +step:990 train loss:3.977027 +step:991 train loss:3.990001 +step:992 train loss:4.005186 +step:993 train loss:4.031308 +step:994 train loss:4.026061 +step:995 train loss:4.099657 +step:996 train loss:4.081360 +step:997 train loss:3.977218 +step:998 train loss:4.057056 +step:999 train loss:3.951985 +step:1000 validation loss:3.925035 total_sharp:9.8095e-03 L1_sharp:9.4841e-03 L2_sharp:2.3659e-03 L3_sharp:3.1181e-03 L4_sharp:3.0118e-03 L5_sharp:2.4322e-03 L6_sharp:2.2158e-03 L7_sharp:1.7587e-03 L8_sharp:1.3749e-03 L9_sharp:8.9881e-04 L10_sharp:6.2320e-04 L11_sharp:6.3943e-04 L12_sharp:6.4282e-04 total_fnorm:2.4049e+00 total_l1_linf:2.0623e+04 total_spectral:2.4049e+00 L1_fnorm:6.0146e-01 L2_fnorm:5.3812e-01 L3_fnorm:5.2305e-01 L4_fnorm:5.4013e-01 L5_fnorm:5.7355e-01 L6_fnorm:5.7411e-01 L7_fnorm:5.8797e-01 L8_fnorm:5.8883e-01 L9_fnorm:5.9483e-01 L10_fnorm:5.9494e-01 L11_fnorm:5.9484e-01 L12_fnorm:5.9896e-01 L1_l1linf:4.3708e-01 L2_l1linf:4.4028e-01 L3_l1linf:4.4386e-01 L4_l1linf:4.0990e-01 L5_l1linf:4.0294e-01 L6_l1linf:3.9824e-01 L7_l1linf:3.9794e-01 L8_l1linf:3.9938e-01 L9_l1linf:3.9984e-01 L10_l1linf:3.9966e-01 L11_l1linf:4.0292e-01 L12_l1linf:4.0072e-01 L1_spectral:1.2046e-02 L2_spectral:1.2047e-02 L3_spectral:1.2055e-02 L4_spectral:1.2040e-02 L5_spectral:1.2046e-02 L6_spectral:1.2047e-02 L7_spectral:1.2060e-02 L8_spectral:1.2046e-02 L9_spectral:1.2043e-02 L10_spectral:1.2050e-02 L11_spectral:1.2043e-02 L12_spectral:1.2047e-02 v_norm:2.4049e+00 cos_v_-g_hvp:5.6631e-02 g_hvp_norm:5.4072e-01 cos_v_-g_t:6.3760e-02 g_t_norm:4.8082e-01 hv_norm:8.0541e-01 cos_v_hv:2.9291e-02 hg_norm:2.5011e+01 cos_g_hg:3.9915e-01 v_par:8.2727e-03 v_perp:2.4049e+00 L1_cos_v_neg_g:5.3137e-02 L1_v_norm:6.0146e-01 L2_cos_v_neg_g:4.0903e-02 L2_v_norm:5.3812e-01 L3_cos_v_neg_g:3.9193e-02 L3_v_norm:5.2305e-01 L4_cos_v_neg_g:4.9304e-02 L4_v_norm:5.4013e-01 L5_cos_v_neg_g:5.6078e-02 L5_v_norm:5.7355e-01 L6_cos_v_neg_g:6.0303e-02 L6_v_norm:5.7411e-01 L7_cos_v_neg_g:6.0509e-02 L7_v_norm:5.8797e-01 L8_cos_v_neg_g:6.6795e-02 L8_v_norm:5.8883e-01 L9_cos_v_neg_g:6.7395e-02 L9_v_norm:5.9483e-01 L10_cos_v_neg_g:6.9913e-02 L10_v_norm:5.9494e-01 L11_cos_v_neg_g:7.8917e-02 L11_v_norm:5.9484e-01 L12_cos_v_neg_g:8.5144e-02 L12_v_norm:5.9896e-01 +step:1000 train loss:4.021703 +step:1001 train loss:3.989363 +step:1002 train loss:3.916237 +step:1003 train loss:3.962654 +step:1004 train loss:3.920994 +step:1005 train loss:4.021123 +step:1006 train loss:3.995099 +step:1007 train loss:4.046372 +step:1008 train loss:3.917477 +step:1009 train loss:4.036783 +step:1010 train loss:4.002403 +step:1011 train loss:3.965806 +step:1012 train loss:3.958068 +step:1013 train loss:3.974015 +step:1014 train loss:3.945999 +step:1015 train loss:3.912701 +step:1016 train loss:4.008874 +step:1017 train loss:4.090768 +step:1018 train loss:4.061246 +step:1019 train loss:3.909638 +step:1020 train loss:4.015245 +step:1021 train loss:3.942534 +step:1022 train loss:3.962161 +step:1023 train loss:3.937722 +step:1024 train loss:3.923211 +step:1025 train loss:3.953424 +step:1026 train loss:3.937197 +step:1027 train loss:3.920708 +step:1028 train loss:3.948052 +step:1029 train loss:3.886089 +step:1030 train loss:3.992720 +step:1031 train loss:3.972666 +step:1032 train loss:3.984678 +step:1033 train loss:3.974416 +step:1034 train loss:3.985719 +step:1035 train loss:4.092209 +step:1036 train loss:4.088765 +step:1037 train loss:3.929237 +step:1038 train loss:3.942857 +step:1039 train loss:3.962218 +step:1040 train loss:3.991882 +step:1041 train loss:4.024231 +step:1042 train loss:3.982345 +step:1043 train loss:3.941665 +step:1044 train loss:3.913799 +step:1045 train loss:3.930079 +step:1046 train loss:3.943474 +step:1047 train loss:3.966439 +step:1048 train loss:4.043380 +step:1049 train loss:4.030916 +step:1050 train loss:4.016534 +step:1051 train loss:4.117812 +step:1052 train loss:3.977043 +step:1053 train loss:4.057892 +step:1054 train loss:3.970869 +step:1055 train loss:3.921040 +step:1056 train loss:4.038753 +step:1057 train loss:3.947023 +step:1058 train loss:3.977385 +step:1059 train loss:3.976696 +step:1060 train loss:3.947655 +step:1061 train loss:3.943568 +step:1062 train loss:3.953509 +step:1063 train loss:3.963355 +step:1064 train loss:3.942894 +step:1065 train loss:3.959924 +step:1066 train loss:3.933980 +step:1067 train loss:3.988414 +step:1068 train loss:3.939008 +step:1069 train loss:3.937071 +step:1070 train loss:3.898194 +step:1071 train loss:3.985256 +step:1072 train loss:3.971173 +step:1073 train loss:3.987393 +step:1074 train loss:3.915405 +step:1075 train loss:3.976775 +step:1076 train loss:3.884488 +step:1077 train loss:3.971936 +step:1078 train loss:3.938645 +step:1079 train loss:3.940648 +step:1080 train loss:3.977679 +step:1081 train loss:3.948987 +step:1082 train loss:3.980662 +step:1083 train loss:3.964264 +step:1084 train loss:4.046206 +step:1085 train loss:3.945026 +step:1086 train loss:3.974039 +step:1087 train loss:3.942722 +step:1088 train loss:3.961462 +step:1089 train loss:3.887745 +step:1090 train loss:3.931640 +step:1091 train loss:3.898754 +step:1092 train loss:3.913740 +step:1093 train loss:3.951436 +step:1094 train loss:4.031929 +step:1095 train loss:3.883228 +step:1096 train loss:3.965021 +step:1097 train loss:3.936563 +step:1098 train loss:3.907467 +step:1099 train loss:3.915751 +step:1100 train loss:3.943927 +step:1101 train loss:3.875426 +step:1102 train loss:3.950786 +step:1103 train loss:3.959966 +step:1104 train loss:3.977454 +step:1105 train loss:3.942590 +step:1106 train loss:3.952970 +step:1107 train loss:3.882633 +step:1108 train loss:3.896504 +step:1109 train loss:3.937352 +step:1110 train loss:3.928073 +step:1111 train loss:3.884083 +step:1112 train loss:3.928511 +step:1113 train loss:3.895820 +step:1114 train loss:3.905240 +step:1115 train loss:3.917282 +step:1116 train loss:3.960925 +step:1117 train loss:3.901067 +step:1118 train loss:3.890623 +step:1119 train loss:3.920154 +step:1120 train loss:3.874755 +step:1121 train loss:3.931610 +step:1122 train loss:3.951665 +step:1123 train loss:3.922936 +step:1124 train loss:3.934715 +step:1125 train loss:3.914538 +step:1126 train loss:3.906749 +step:1127 train loss:3.874285 +step:1128 train loss:3.906052 +step:1129 train loss:3.851130 +step:1130 train loss:3.957535 +step:1131 train loss:3.904523 +step:1132 train loss:4.008268 +step:1133 train loss:3.978054 +step:1134 train loss:3.908361 +step:1135 train loss:3.912686 +step:1136 train loss:3.950147 +step:1137 train loss:3.877919 +step:1138 train loss:3.976588 +step:1139 train loss:3.904893 +step:1140 train loss:3.913727 +step:1141 train loss:3.888327 +step:1142 train loss:3.881800 +step:1143 train loss:3.881592 +step:1144 train loss:3.966771 +step:1145 train loss:3.938313 +step:1146 train loss:3.995801 +step:1147 train loss:3.922635 +step:1148 train loss:3.940880 +step:1149 train loss:3.920029 +step:1150 train loss:3.942004 +step:1151 train loss:3.865977 +step:1152 train loss:3.912738 +step:1153 train loss:3.869403 +step:1154 train loss:3.833734 +step:1155 train loss:3.886307 +step:1156 train loss:3.899545 +step:1157 train loss:3.798637 +step:1158 train loss:3.898276 +step:1159 train loss:3.861210 +step:1160 train loss:3.824837 +step:1161 train loss:3.925493 +step:1162 train loss:3.931134 +step:1163 train loss:3.899031 +step:1164 train loss:3.986743 +step:1165 train loss:3.871783 +step:1166 train loss:3.816842 +step:1167 train loss:3.885561 +step:1168 train loss:3.898555 +step:1169 train loss:3.937628 +step:1170 train loss:3.882366 +step:1171 train loss:3.898277 +step:1172 train loss:3.840793 +step:1173 train loss:3.852227 +step:1174 train loss:3.890272 +step:1175 train loss:3.953773 +step:1176 train loss:3.854235 +step:1177 train loss:3.869246 +step:1178 train loss:3.886173 +step:1179 train loss:3.879244 +step:1180 train loss:3.865499 +step:1181 train loss:3.878434 +step:1182 train loss:3.842490 +step:1183 train loss:3.838502 +step:1184 train loss:3.862018 +step:1185 train loss:3.863704 +step:1186 train loss:3.941167 +step:1187 train loss:3.841283 +step:1188 train loss:3.970578 +step:1189 train loss:3.834824 +step:1190 train loss:3.869249 +step:1191 train loss:3.820386 +step:1192 train loss:3.838201 +step:1193 train loss:3.936875 +step:1194 train loss:3.909017 +step:1195 train loss:3.959964 +step:1196 train loss:3.851668 +step:1197 train loss:3.859658 +step:1198 train loss:3.819346 +step:1199 train loss:3.842216 +step:1200 train loss:3.860252 +step:1201 train loss:3.769607 +step:1202 train loss:3.938175 +step:1203 train loss:3.778680 +step:1204 train loss:3.839852 +step:1205 train loss:3.955287 +step:1206 train loss:3.871864 +step:1207 train loss:3.835895 +step:1208 train loss:3.856254 +step:1209 train loss:3.898940 +step:1210 train loss:3.838355 +step:1211 train loss:3.882010 +step:1212 train loss:3.844891 +step:1213 train loss:3.812047 +step:1214 train loss:3.868626 +step:1215 train loss:3.910086 +step:1216 train loss:3.850602 +step:1217 train loss:3.865091 +step:1218 train loss:3.889892 +step:1219 train loss:3.830344 +step:1220 train loss:3.975177 +step:1221 train loss:3.911283 +step:1222 train loss:3.848860 +step:1223 train loss:3.787231 +step:1224 train loss:3.853147 +step:1225 train loss:3.890268 +step:1226 train loss:3.821136 +step:1227 train loss:3.857376 +step:1228 train loss:3.890246 +step:1229 train loss:3.823320 +step:1230 train loss:3.802452 +step:1231 train loss:3.861108 +step:1232 train loss:3.819946 +step:1233 train loss:3.829432 +step:1234 train loss:3.835024 +step:1235 train loss:3.864632 +step:1236 train loss:3.814527 +step:1237 train loss:3.827530 +step:1238 train loss:3.827865 +step:1239 train loss:3.881396 +step:1240 train loss:3.826529 +step:1241 train loss:3.853686 +step:1242 train loss:3.828950 +step:1243 train loss:3.823207 +step:1244 train loss:3.852138 +step:1245 train loss:3.864925 +step:1246 train loss:3.832647 +step:1247 train loss:3.780670 +step:1248 train loss:3.816674 +step:1249 train loss:3.870758 +step:1250 validation loss:3.814690 +step:1250 train loss:3.805945 +step:1251 train loss:3.873004 +step:1252 train loss:3.867452 +step:1253 train loss:3.840669 +step:1254 train loss:3.863753 +step:1255 train loss:3.817963 +step:1256 train loss:3.839991 +step:1257 train loss:3.867362 +step:1258 train loss:3.784937 +step:1259 train loss:3.853414 +step:1260 train loss:3.816409 +step:1261 train loss:3.905412 +step:1262 train loss:3.799374 +step:1263 train loss:3.934300 +step:1264 train loss:3.862268 +step:1265 train loss:3.904467 +step:1266 train loss:3.866479 +step:1267 train loss:3.834619 +step:1268 train loss:3.869950 +step:1269 train loss:3.895439 +step:1270 train loss:3.727451 +step:1271 train loss:3.842857 +step:1272 train loss:3.831950 +step:1273 train loss:3.837514 +step:1274 train loss:3.862473 +step:1275 train loss:3.929031 +step:1276 train loss:3.781018 +step:1277 train loss:3.879709 +step:1278 train loss:3.820533 +step:1279 train loss:3.806144 +step:1280 train loss:3.893370 +step:1281 train loss:3.893002 +step:1282 train loss:3.841995 +step:1283 train loss:4.062478 +step:1284 train loss:3.950936 +step:1285 train loss:3.897947 +step:1286 train loss:3.828609 +step:1287 train loss:3.868905 +step:1288 train loss:3.874991 +step:1289 train loss:3.890880 +step:1290 train loss:3.869302 +step:1291 train loss:3.935125 +step:1292 train loss:3.828219 +step:1293 train loss:3.877692 +step:1294 train loss:3.858461 +step:1295 train loss:3.831107 +step:1296 train loss:3.824683 +step:1297 train loss:3.807395 +step:1298 train loss:3.869069 +step:1299 train loss:3.891365 +step:1300 train loss:3.876106 +step:1301 train loss:3.823712 +step:1302 train loss:3.821025 +step:1303 train loss:3.830167 +step:1304 train loss:3.807135 +step:1305 train loss:3.828697 +step:1306 train loss:3.903426 +step:1307 train loss:3.767056 +step:1308 train loss:3.840054 +step:1309 train loss:3.847357 +step:1310 train loss:3.900259 +step:1311 train loss:3.784889 +step:1312 train loss:3.854870 +step:1313 train loss:3.840837 +step:1314 train loss:3.763280 +step:1315 train loss:3.809502 +step:1316 train loss:3.845226 +step:1317 train loss:3.826625 +step:1318 train loss:3.904652 +step:1319 train loss:3.917742 +step:1320 train loss:3.871321 +step:1321 train loss:3.844792 +step:1322 train loss:3.903639 +step:1323 train loss:3.786283 +step:1324 train loss:3.856198 +step:1325 train loss:3.858586 +step:1326 train loss:3.840213 +step:1327 train loss:3.789441 +step:1328 train loss:3.834180 +step:1329 train loss:3.861636 +step:1330 train loss:3.811738 +step:1331 train loss:3.784529 +step:1332 train loss:3.868357 +step:1333 train loss:3.832072 +step:1334 train loss:3.736307 +step:1335 train loss:3.791865 +step:1336 train loss:3.833448 +step:1337 train loss:3.902365 +step:1338 train loss:3.830308 +step:1339 train loss:3.809765 +step:1340 train loss:3.847353 +step:1341 train loss:3.822894 +step:1342 train loss:3.832649 +step:1343 train loss:3.801900 +step:1344 train loss:3.841720 +step:1345 train loss:3.841766 +step:1346 train loss:3.766851 +step:1347 train loss:3.865609 +step:1348 train loss:3.992064 +step:1349 train loss:3.783981 +step:1350 train loss:3.765267 +step:1351 train loss:3.896698 +step:1352 train loss:3.862642 +step:1353 train loss:3.858237 +step:1354 train loss:3.788526 +step:1355 train loss:3.845017 +step:1356 train loss:3.772288 +step:1357 train loss:3.840303 +step:1358 train loss:3.846685 +step:1359 train loss:3.797988 +step:1360 train loss:3.813814 +step:1361 train loss:3.859198 +step:1362 train loss:3.894123 +step:1363 train loss:3.844172 +step:1364 train loss:3.834138 +step:1365 train loss:3.759410 +step:1366 train loss:4.142302 +step:1367 train loss:3.880728 +step:1368 train loss:3.832476 +step:1369 train loss:3.860290 +step:1370 train loss:3.849071 +step:1371 train loss:3.848876 +step:1372 train loss:3.802432 +step:1373 train loss:3.776878 +step:1374 train loss:3.852706 +step:1375 train loss:3.843557 +step:1376 train loss:3.752270 +step:1377 train loss:3.882615 +step:1378 train loss:3.788450 +step:1379 train loss:3.828760 +step:1380 train loss:3.858811 +step:1381 train loss:3.786273 +step:1382 train loss:3.836526 +step:1383 train loss:3.777431 +step:1384 train loss:3.874687 +step:1385 train loss:3.863973 +step:1386 train loss:3.899505 +step:1387 train loss:3.773068 +step:1388 train loss:3.788810 +step:1389 train loss:3.865972 +step:1390 train loss:3.824645 +step:1391 train loss:3.798107 +step:1392 train loss:3.855805 +step:1393 train loss:3.900326 +step:1394 train loss:3.767226 +step:1395 train loss:3.857470 +step:1396 train loss:3.781123 +step:1397 train loss:3.824038 +step:1398 train loss:3.828940 +step:1399 train loss:3.803858 +step:1400 train loss:3.866556 +step:1401 train loss:3.769755 +step:1402 train loss:3.825961 +step:1403 train loss:3.795181 +step:1404 train loss:3.791443 +step:1405 train loss:3.851365 +step:1406 train loss:3.787169 +step:1407 train loss:3.771900 +step:1408 train loss:3.800326 +step:1409 train loss:3.784161 +step:1410 train loss:3.852394 +step:1411 train loss:3.830801 +step:1412 train loss:3.854336 +step:1413 train loss:3.831918 +step:1414 train loss:3.831692 +step:1415 train loss:3.800657 +step:1416 train loss:3.802717 +step:1417 train loss:3.956099 +step:1418 train loss:3.796133 +step:1419 train loss:3.773763 +step:1420 train loss:3.799163 +step:1421 train loss:3.815940 +step:1422 train loss:3.820113 +step:1423 train loss:3.779164 +step:1424 train loss:3.826687 +step:1425 train loss:3.813695 +step:1426 train loss:3.798332 +step:1427 train loss:3.888079 +step:1428 train loss:3.838029 +step:1429 train loss:3.800356 +step:1430 train loss:3.859787 +step:1431 train loss:3.930435 +step:1432 train loss:3.794632 +step:1433 train loss:3.891017 +step:1434 train loss:3.778083 +step:1435 train loss:3.853966 +step:1436 train loss:3.813873 +step:1437 train loss:3.786793 +step:1438 train loss:3.948159 +step:1439 train loss:3.776744 +step:1440 train loss:3.793516 +step:1441 train loss:3.822322 +step:1442 train loss:3.722175 +step:1443 train loss:3.833505 +step:1444 train loss:3.789243 +step:1445 train loss:3.848968 +step:1446 train loss:3.778109 +step:1447 train loss:3.805656 +step:1448 train loss:3.817619 +step:1449 train loss:3.778848 +step:1450 train loss:3.851737 +step:1451 train loss:3.788134 +step:1452 train loss:3.847020 +step:1453 train loss:3.838301 +step:1454 train loss:3.751156 +step:1455 train loss:3.820108 +step:1456 train loss:3.729380 +step:1457 train loss:3.750406 +step:1458 train loss:3.796484 +step:1459 train loss:3.736774 +step:1460 train loss:3.783056 +step:1461 train loss:3.798783 +step:1462 train loss:3.752167 +step:1463 train loss:3.836684 +step:1464 train loss:3.794047 +step:1465 train loss:3.806697 +step:1466 train loss:3.806733 +step:1467 train loss:3.766087 +step:1468 train loss:3.816240 +step:1469 train loss:3.823775 +step:1470 train loss:3.765066 +step:1471 train loss:3.886783 +step:1472 train loss:3.774059 +step:1473 train loss:3.793925 +step:1474 train loss:3.789566 +step:1475 train loss:3.785625 +step:1476 train loss:3.789559 +step:1477 train loss:3.774413 +step:1478 train loss:3.821382 +step:1479 train loss:3.740500 +step:1480 train loss:3.802065 +step:1481 train loss:3.818226 +step:1482 train loss:3.771680 +step:1483 train loss:3.762818 +step:1484 train loss:3.822590 +step:1485 train loss:3.828735 +step:1486 train loss:3.816553 +step:1487 train loss:3.864266 +step:1488 train loss:3.817995 +step:1489 train loss:3.776848 +step:1490 train loss:3.811619 +step:1491 train loss:3.699927 +step:1492 train loss:3.816494 +step:1493 train loss:3.822976 +step:1494 train loss:3.706479 +step:1495 train loss:3.740683 +step:1496 train loss:3.820553 +step:1497 train loss:3.799672 +step:1498 train loss:3.779215 +step:1499 train loss:3.800947 +step:1500 validation loss:3.736207 total_sharp:7.7047e-03 L1_sharp:7.6679e-03 L2_sharp:1.1072e-03 L3_sharp:2.4026e-03 L4_sharp:2.2528e-03 L5_sharp:1.7060e-03 L6_sharp:1.7586e-03 L7_sharp:1.8746e-03 L8_sharp:1.3936e-03 L9_sharp:9.5385e-04 L10_sharp:5.8682e-04 L11_sharp:6.3575e-04 L12_sharp:5.8671e-04 total_fnorm:2.4077e+00 total_l1_linf:2.0594e+04 total_spectral:2.4077e+00 L1_fnorm:5.9621e-01 L2_fnorm:5.3735e-01 L3_fnorm:5.1977e-01 L4_fnorm:5.4479e-01 L5_fnorm:5.7946e-01 L6_fnorm:5.8472e-01 L7_fnorm:5.9260e-01 L8_fnorm:5.9594e-01 L9_fnorm:5.9865e-01 L10_fnorm:6.0022e-01 L11_fnorm:6.0041e-01 L12_fnorm:6.0270e-01 L1_l1linf:4.3087e-01 L2_l1linf:4.0985e-01 L3_l1linf:4.0660e-01 L4_l1linf:3.9186e-01 L5_l1linf:4.0489e-01 L6_l1linf:4.0978e-01 L7_l1linf:4.1070e-01 L8_l1linf:4.1171e-01 L9_l1linf:4.1213e-01 L10_l1linf:4.0896e-01 L11_l1linf:4.0413e-01 L12_l1linf:3.9767e-01 L1_spectral:1.2046e-02 L2_spectral:1.2045e-02 L3_spectral:1.2050e-02 L4_spectral:1.2049e-02 L5_spectral:1.2048e-02 L6_spectral:1.2061e-02 L7_spectral:1.2048e-02 L8_spectral:1.2046e-02 L9_spectral:1.2046e-02 L10_spectral:1.2041e-02 L11_spectral:1.2045e-02 L12_spectral:1.2043e-02 v_norm:2.4077e+00 cos_v_-g_hvp:5.2536e-02 g_hvp_norm:5.5202e-01 cos_v_-g_t:5.9543e-02 g_t_norm:4.8901e-01 hv_norm:7.6016e-01 cos_v_hv:2.4404e-02 hg_norm:1.7083e+01 cos_g_hg:6.2579e-01 v_par:7.7022e-03 v_perp:2.4077e+00 L1_cos_v_neg_g:3.6811e-02 L1_v_norm:5.9621e-01 L2_cos_v_neg_g:4.2186e-02 L2_v_norm:5.3735e-01 L3_cos_v_neg_g:4.3434e-02 L3_v_norm:5.1977e-01 L4_cos_v_neg_g:4.8125e-02 L4_v_norm:5.4479e-01 L5_cos_v_neg_g:5.5601e-02 L5_v_norm:5.7946e-01 L6_cos_v_neg_g:5.7648e-02 L6_v_norm:5.8472e-01 L7_cos_v_neg_g:5.8199e-02 L7_v_norm:5.9260e-01 L8_cos_v_neg_g:5.9538e-02 L8_v_norm:5.9594e-01 L9_cos_v_neg_g:6.1512e-02 L9_v_norm:5.9865e-01 L10_cos_v_neg_g:6.7433e-02 L10_v_norm:6.0022e-01 L11_cos_v_neg_g:7.5942e-02 L11_v_norm:6.0041e-01 L12_cos_v_neg_g:9.1945e-02 L12_v_norm:6.0270e-01 +step:1500 train loss:3.771475 +step:1501 train loss:3.800450 +step:1502 train loss:3.759738 +step:1503 train loss:3.889473 +step:1504 train loss:3.811985 +step:1505 train loss:3.812264 +step:1506 train loss:3.791785 +step:1507 train loss:3.871920 +step:1508 train loss:3.811922 +step:1509 train loss:3.832445 +step:1510 train loss:3.811520 +step:1511 train loss:3.780662 +step:1512 train loss:3.747830 +step:1513 train loss:3.776507 +step:1514 train loss:3.838044 +step:1515 train loss:3.826273 +step:1516 train loss:3.749695 +step:1517 train loss:3.744349 +step:1518 train loss:3.765339 +step:1519 train loss:3.804131 +step:1520 train loss:3.794093 +step:1521 train loss:3.754174 +step:1522 train loss:3.804331 +step:1523 train loss:3.760104 +step:1524 train loss:3.741833 +step:1525 train loss:3.848359 +step:1526 train loss:3.819778 +step:1527 train loss:3.878581 +step:1528 train loss:3.875426 +step:1529 train loss:3.816689 +step:1530 train loss:3.777494 +step:1531 train loss:3.806563 +step:1532 train loss:3.858202 +step:1533 train loss:3.876167 +step:1534 train loss:3.815699 +step:1535 train loss:3.812826 +step:1536 train loss:3.749450 +step:1537 train loss:3.874563 +step:1538 train loss:3.744707 +step:1539 train loss:3.860457 +step:1540 train loss:3.828038 +step:1541 train loss:3.852723 +step:1542 train loss:3.767627 +step:1543 train loss:3.914989 +step:1544 train loss:3.923436 +step:1545 train loss:3.760223 +step:1546 train loss:3.829922 +step:1547 train loss:3.805828 +step:1548 train loss:3.769361 +step:1549 train loss:3.776788 +step:1550 train loss:3.745177 +step:1551 train loss:3.786227 +step:1552 train loss:3.819871 +step:1553 train loss:3.908381 +step:1554 train loss:3.780143 +step:1555 train loss:3.718892 +step:1556 train loss:3.768956 +step:1557 train loss:3.812366 +step:1558 train loss:3.805999 +step:1559 train loss:3.919782 +step:1560 train loss:3.849578 +step:1561 train loss:3.797922 +step:1562 train loss:3.839130 +step:1563 train loss:3.796000 +step:1564 train loss:3.758195 +step:1565 train loss:3.844894 +step:1566 train loss:3.764612 +step:1567 train loss:3.775113 +step:1568 train loss:4.067455 +step:1569 train loss:3.833366 +step:1570 train loss:4.029431 +step:1571 train loss:3.802198 +step:1572 train loss:3.790394 +step:1573 train loss:3.838931 +step:1574 train loss:3.760678 +step:1575 train loss:3.842839 +step:1576 train loss:3.765834 +step:1577 train loss:3.830705 +step:1578 train loss:3.905460 +step:1579 train loss:3.855502 +step:1580 train loss:3.829566 +step:1581 train loss:3.816749 +step:1582 train loss:3.771287 +step:1583 train loss:3.886929 +step:1584 train loss:3.863863 +step:1585 train loss:3.761939 +step:1586 train loss:3.827426 +step:1587 train loss:3.840191 +step:1588 train loss:3.770201 +step:1589 train loss:3.798172 +step:1590 train loss:3.848909 +step:1591 train loss:3.797508 +step:1592 train loss:3.773748 +step:1593 train loss:3.864941 +step:1594 train loss:3.779202 +step:1595 train loss:3.764270 +step:1596 train loss:3.769887 +step:1597 train loss:3.772387 +step:1598 train loss:3.779800 +step:1599 train loss:3.725838 +step:1600 train loss:3.794707 +step:1601 train loss:3.802020 +step:1602 train loss:3.814080 +step:1603 train loss:3.737683 +step:1604 train loss:3.776580 +step:1605 train loss:3.809633 +step:1606 train loss:3.721995 +step:1607 train loss:3.725900 +step:1608 train loss:3.776173 +step:1609 train loss:3.800530 +step:1610 train loss:3.783615 +step:1611 train loss:3.763131 +step:1612 train loss:3.739817 +step:1613 train loss:3.810255 +step:1614 train loss:3.775970 +step:1615 train loss:3.778901 +step:1616 train loss:3.744931 +step:1617 train loss:3.803637 +step:1618 train loss:3.830274 +step:1619 train loss:3.783049 +step:1620 train loss:3.785288 +step:1621 train loss:3.790518 +step:1622 train loss:3.768245 +step:1623 train loss:3.802678 +step:1624 train loss:3.808791 +step:1625 train loss:3.866117 +step:1626 train loss:3.816241 +step:1627 train loss:3.859368 +step:1628 train loss:3.831538 +step:1629 train loss:3.747633 +step:1630 train loss:3.757469 +step:1631 train loss:3.879837 +step:1632 train loss:3.768525 +step:1633 train loss:3.840245 +step:1634 train loss:3.830968 +step:1635 train loss:3.732282 +step:1636 train loss:3.780254 +step:1637 train loss:3.740384 +step:1638 train loss:3.796351 +step:1639 train loss:3.885861 +step:1640 train loss:3.766625 +step:1641 train loss:3.850104 +step:1642 train loss:3.773758 +step:1643 train loss:3.843068 +step:1644 train loss:3.758658 +step:1645 train loss:3.828917 +step:1646 train loss:3.899740 +step:1647 train loss:3.776409 +step:1648 train loss:3.804936 +step:1649 train loss:3.778193 +step:1650 train loss:3.753775 +step:1651 train loss:3.753243 +step:1652 train loss:3.784735 +step:1653 train loss:3.775464 +step:1654 train loss:3.852838 +step:1655 train loss:3.810236 +step:1656 train loss:3.707901 +step:1657 train loss:3.785722 +step:1658 train loss:3.788172 +step:1659 train loss:3.751626 +step:1660 train loss:3.732736 +step:1661 train loss:3.781678 +step:1662 train loss:3.770746 +step:1663 train loss:3.763598 +step:1664 train loss:3.763140 +step:1665 train loss:3.786510 +step:1666 train loss:3.828269 +step:1667 train loss:3.785879 +step:1668 train loss:3.892372 +step:1669 train loss:3.765756 +step:1670 train loss:3.756505 +step:1671 train loss:3.728485 +step:1672 train loss:3.740937 +step:1673 train loss:3.676522 +step:1674 train loss:3.712399 +step:1675 train loss:3.741871 +step:1676 train loss:3.748979 +step:1677 train loss:3.822553 +step:1678 train loss:3.762406 +step:1679 train loss:3.775900 +step:1680 train loss:3.783034 +step:1681 train loss:3.791878 +step:1682 train loss:3.795855 +step:1683 train loss:3.732777 +step:1684 train loss:3.830116 +step:1685 train loss:3.811367 +step:1686 train loss:3.735686 +step:1687 train loss:3.716635 +step:1688 train loss:3.801335 +step:1689 train loss:3.795315 +step:1690 train loss:3.873795 +step:1691 train loss:3.778568 +step:1692 train loss:3.828403 +step:1693 train loss:3.744183 +step:1694 train loss:3.727743 +step:1695 train loss:3.770023 +step:1696 train loss:3.814966 +step:1697 train loss:3.907519 +step:1698 train loss:3.786565 +step:1699 train loss:3.705886 +step:1700 train loss:3.785583 +step:1701 train loss:3.738216 +step:1702 train loss:3.718250 +step:1703 train loss:3.772228 +step:1704 train loss:3.765617 +step:1705 train loss:3.726222 +step:1706 train loss:3.741513 +step:1707 train loss:3.808593 +step:1708 train loss:3.787045 +step:1709 train loss:3.759724 +step:1710 train loss:3.758259 +step:1711 train loss:3.779050 +step:1712 train loss:3.791485 +step:1713 train loss:3.731531 +step:1714 train loss:3.762512 +step:1715 train loss:3.726048 +step:1716 train loss:3.760055 +step:1717 train loss:3.694096 +step:1718 train loss:3.802276 +step:1719 train loss:3.752463 +step:1720 train loss:3.736050 +step:1721 train loss:3.743400 +step:1722 train loss:3.763119 +step:1723 train loss:3.747839 +step:1724 train loss:3.814950 +step:1725 train loss:3.764317 +step:1726 train loss:3.762865 +step:1727 train loss:3.814417 +step:1728 train loss:3.685713 +step:1729 train loss:3.710766 +step:1730 train loss:3.722324 +step:1731 train loss:3.759063 +step:1732 train loss:3.742957 +step:1733 train loss:3.826363 +step:1734 train loss:3.713630 +step:1735 train loss:3.813884 +step:1736 train loss:3.704760 +step:1737 train loss:3.783687 +step:1738 train loss:3.687744 +step:1739 train loss:3.844489 +step:1740 train loss:3.706757 +step:1741 train loss:3.715240 +step:1742 train loss:3.761163 +step:1743 train loss:3.850603 +step:1744 train loss:3.738038 +step:1745 train loss:3.718294 +step:1746 train loss:3.932524 +step:1747 train loss:3.729862 +step:1748 train loss:3.740381 +step:1749 train loss:3.709924 +step:1750 validation loss:3.701236 +step:1750 train loss:3.753100 +step:1751 train loss:3.692944 +step:1752 train loss:3.771334 +step:1753 train loss:3.767509 +step:1754 train loss:3.755114 +step:1755 train loss:3.792370 +step:1756 train loss:3.726852 +step:1757 train loss:3.743072 +step:1758 train loss:3.754986 +step:1759 train loss:3.739892 +step:1760 train loss:3.742311 +step:1761 train loss:3.717199 +step:1762 train loss:3.771749 +step:1763 train loss:3.696342 +step:1764 train loss:3.786718 +step:1765 train loss:3.718737 +step:1766 train loss:3.772421 +step:1767 train loss:3.717883 +step:1768 train loss:3.738352 +step:1769 train loss:3.701547 +step:1770 train loss:3.761019 +step:1771 train loss:3.741000 +step:1772 train loss:3.785459 +step:1773 train loss:3.732126 +step:1774 train loss:3.800797 +step:1775 train loss:3.747374 +step:1776 train loss:3.739760 +step:1777 train loss:3.717580 +step:1778 train loss:3.697330 +step:1779 train loss:3.771371 +step:1780 train loss:3.712956 +step:1781 train loss:3.764873 +step:1782 train loss:3.786853 +step:1783 train loss:3.702185 +step:1784 train loss:3.721326 +step:1785 train loss:3.799609 +step:1786 train loss:3.760199 +step:1787 train loss:3.733093 +step:1788 train loss:3.704258 +step:1789 train loss:3.715587 +step:1790 train loss:3.711591 +step:1791 train loss:3.772607 +step:1792 train loss:3.744326 +step:1793 train loss:3.730512 +step:1794 train loss:3.851233 +step:1795 train loss:3.671582 +step:1796 train loss:3.772488 +step:1797 train loss:3.743515 +step:1798 train loss:3.787831 +step:1799 train loss:3.692312 +step:1800 train loss:3.729659 +step:1801 train loss:3.721946 +step:1802 train loss:3.777836 +step:1803 train loss:3.713700 +step:1804 train loss:3.723180 +step:1805 train loss:3.775470 +step:1806 train loss:3.710682 +step:1807 train loss:3.713739 +step:1808 train loss:3.785352 +step:1809 train loss:3.763107 +step:1810 train loss:3.682717 +step:1811 train loss:3.763314 +step:1812 train loss:3.708689 +step:1813 train loss:3.704571 +step:1814 train loss:3.783668 +step:1815 train loss:3.731284 +step:1816 train loss:3.712498 +step:1817 train loss:3.692958 +step:1818 train loss:3.724852 +step:1819 train loss:3.759038 +step:1820 train loss:3.690178 +step:1821 train loss:3.759727 +step:1822 train loss:3.699318 +step:1823 train loss:3.665606 +step:1824 train loss:3.810580 +step:1825 train loss:3.700411 +step:1826 train loss:3.708409 +step:1827 train loss:3.725478 +step:1828 train loss:3.677238 +step:1829 train loss:3.767400 +step:1830 train loss:3.734938 +step:1831 train loss:3.681521 +step:1832 train loss:3.706627 +step:1833 train loss:3.625680 +step:1834 train loss:3.680070 +step:1835 train loss:3.658431 +step:1836 train loss:3.701536 +step:1837 train loss:3.752429 +step:1838 train loss:3.726416 +step:1839 train loss:3.729750 +step:1840 train loss:3.714848 +step:1841 train loss:3.805290 +step:1842 train loss:3.724668 +step:1843 train loss:3.773668 +step:1844 train loss:3.722737 +step:1845 train loss:3.821756 +step:1846 train loss:3.730264 +step:1847 train loss:3.707882 +step:1848 train loss:3.757854 +step:1849 train loss:3.685669 +step:1850 train loss:3.833108 +step:1851 train loss:3.677667 +step:1852 train loss:3.704407 +step:1853 train loss:3.711031 +step:1854 train loss:3.751517 +step:1855 train loss:3.670569 +step:1856 train loss:3.735696 +step:1857 train loss:3.675449 +step:1858 train loss:3.697692 +step:1859 train loss:3.712342 +step:1860 train loss:3.863644 +step:1861 train loss:3.739750 +step:1862 train loss:3.736919 +step:1863 train loss:3.811733 +step:1864 train loss:3.698825 +step:1865 train loss:3.694539 +step:1866 train loss:3.674685 +step:1867 train loss:3.830633 +step:1868 train loss:3.666594 +step:1869 train loss:3.733728 +step:1870 train loss:3.760608 +step:1871 train loss:3.668891 +step:1872 train loss:3.662226 +step:1873 train loss:3.724823 +step:1874 train loss:3.676361 +step:1875 train loss:3.721139 +step:1876 train loss:3.690586 +step:1877 train loss:3.692387 +step:1878 train loss:3.725584 +step:1879 train loss:3.695358 +step:1880 train loss:3.697109 +step:1881 train loss:3.707147 +step:1882 train loss:3.639897 +step:1883 train loss:3.683717 +step:1884 train loss:3.761973 +step:1885 train loss:3.757453 +step:1886 train loss:3.719016 +step:1887 train loss:3.735965 +step:1888 train loss:3.652445 +step:1889 train loss:3.682900 +step:1890 train loss:3.659050 +step:1891 train loss:3.745210 +step:1892 train loss:3.729519 +step:1893 train loss:3.682896 +step:1894 train loss:3.706630 +step:1895 train loss:3.730266 +step:1896 train loss:3.730025 +step:1897 train loss:3.699772 +step:1898 train loss:3.744106 +step:1899 train loss:3.761117 +step:1900 train loss:3.706188 +step:1901 train loss:3.706923 +step:1902 train loss:3.765007 +step:1903 train loss:3.670549 +step:1904 train loss:3.673823 +step:1905 train loss:3.704191 +step:1906 train loss:3.721206 +step:1907 train loss:3.692239 +step:1908 train loss:3.690083 +step:1909 train loss:3.733304 +step:1910 train loss:3.674602 +step:1911 train loss:3.719646 +step:1912 train loss:3.779129 +step:1913 train loss:3.742407 +step:1914 train loss:3.726093 +step:1915 train loss:3.788330 +step:1916 train loss:3.734174 +step:1917 train loss:3.728616 +step:1918 train loss:3.713955 +step:1919 train loss:3.659596 +step:1920 train loss:3.662060 +step:1921 train loss:3.703836 +step:1922 train loss:3.776721 +step:1923 train loss:3.666372 +step:1924 train loss:3.735108 +step:1925 train loss:3.695551 +step:1926 train loss:3.723363 +step:1927 train loss:3.742333 +step:1928 train loss:3.717864 +step:1929 train loss:3.785446 +step:1930 train loss:3.688282 +step:1931 train loss:3.792837 +step:1932 train loss:3.763090 +step:1933 train loss:3.724851 +step:1934 train loss:3.633236 +step:1935 train loss:3.710577 +step:1936 train loss:3.734776 +step:1937 train loss:3.749909 +step:1938 train loss:3.670911 +step:1939 train loss:3.724346 +step:1940 train loss:3.729727 +step:1941 train loss:3.720882 +step:1942 train loss:3.684992 +step:1943 train loss:3.695418 +step:1944 train loss:3.742503 +step:1945 train loss:3.640238 +step:1946 train loss:3.731402 +step:1947 train loss:3.740303 +step:1948 train loss:3.719323 +step:1949 train loss:3.672485 +step:1950 train loss:3.684119 +step:1951 train loss:3.692671 +step:1952 train loss:3.682860 +step:1953 train loss:3.732251 +step:1954 train loss:3.703343 +step:1955 train loss:3.710993 +step:1956 train loss:3.720490 +step:1957 train loss:3.766142 +step:1958 train loss:3.749568 +step:1959 train loss:3.731242 +step:1960 train loss:3.724796 +step:1961 train loss:3.702398 +step:1962 train loss:3.764170 +step:1963 train loss:3.682281 +step:1964 train loss:3.831613 +step:1965 train loss:3.695939 +step:1966 train loss:3.699501 +step:1967 train loss:3.680725 +step:1968 train loss:3.732729 +step:1969 train loss:3.662218 +step:1970 train loss:3.775462 +step:1971 train loss:3.682698 +step:1972 train loss:3.743081 +step:1973 train loss:3.721041 +step:1974 train loss:3.656366 +step:1975 train loss:3.737059 +step:1976 train loss:3.663857 +step:1977 train loss:3.846945 +step:1978 train loss:3.735997 +step:1979 train loss:3.729012 +step:1980 train loss:3.707229 +step:1981 train loss:3.705124 +step:1982 train loss:3.653260 +step:1983 train loss:3.728103 +step:1984 train loss:3.716590 +step:1985 train loss:3.739020 +step:1986 train loss:3.766662 +step:1987 train loss:3.732494 +step:1988 train loss:3.655014 +step:1989 train loss:3.742635 +step:1990 train loss:3.709479 +step:1991 train loss:3.710609 +step:1992 train loss:3.714928 +step:1993 train loss:3.666713 +step:1994 train loss:3.685334 +step:1995 train loss:3.649750 +step:1996 train loss:3.664618 +step:1997 train loss:3.681841 +step:1998 train loss:3.680801 +step:1999 train loss:3.687502 +step:2000 validation loss:3.644914 total_sharp:5.3187e-03 L1_sharp:7.2840e-03 L2_sharp:1.1200e-03 L3_sharp:1.1173e-03 L4_sharp:1.0820e-03 L5_sharp:1.0415e-03 L6_sharp:1.1113e-03 L7_sharp:1.2377e-03 L8_sharp:1.1253e-03 L9_sharp:9.5194e-04 L10_sharp:5.4811e-04 L11_sharp:5.4348e-04 L12_sharp:4.8769e-04 total_fnorm:2.4004e+00 total_l1_linf:2.0522e+04 total_spectral:2.4004e+00 L1_fnorm:5.8823e-01 L2_fnorm:5.2061e-01 L3_fnorm:5.0952e-01 L4_fnorm:5.5055e-01 L5_fnorm:5.8348e-01 L6_fnorm:5.8825e-01 L7_fnorm:5.9662e-01 L8_fnorm:5.9789e-01 L9_fnorm:5.9811e-01 L10_fnorm:5.9886e-01 L11_fnorm:5.9996e-01 L12_fnorm:6.0356e-01 L1_l1linf:4.3703e-01 L2_l1linf:4.0449e-01 L3_l1linf:3.9359e-01 L4_l1linf:4.0023e-01 L5_l1linf:4.1081e-01 L6_l1linf:4.1492e-01 L7_l1linf:4.1043e-01 L8_l1linf:4.1745e-01 L9_l1linf:4.1638e-01 L10_l1linf:4.1085e-01 L11_l1linf:4.0677e-01 L12_l1linf:3.9851e-01 L1_spectral:1.2052e-02 L2_spectral:1.2042e-02 L3_spectral:1.4899e-02 L4_spectral:1.2045e-02 L5_spectral:1.2050e-02 L6_spectral:1.2066e-02 L7_spectral:1.2048e-02 L8_spectral:1.2047e-02 L9_spectral:1.2044e-02 L10_spectral:1.2044e-02 L11_spectral:1.2043e-02 L12_spectral:1.2044e-02 v_norm:2.4004e+00 cos_v_-g_hvp:3.9862e-02 g_hvp_norm:5.4344e-01 cos_v_-g_t:4.6213e-02 g_t_norm:4.7255e-01 hv_norm:6.6508e-01 cos_v_hv:1.9196e-02 hg_norm:2.2984e+01 cos_g_hg:5.8562e-01 v_par:6.9108e-03 v_perp:2.4004e+00 L1_cos_v_neg_g:2.4095e-02 L1_v_norm:5.8823e-01 L2_cos_v_neg_g:2.9050e-02 L2_v_norm:5.2061e-01 L3_cos_v_neg_g:2.7565e-02 L3_v_norm:5.0952e-01 L4_cos_v_neg_g:3.4795e-02 L4_v_norm:5.5055e-01 L5_cos_v_neg_g:4.0778e-02 L5_v_norm:5.8348e-01 L6_cos_v_neg_g:4.2095e-02 L6_v_norm:5.8825e-01 L7_cos_v_neg_g:4.4500e-02 L7_v_norm:5.9662e-01 L8_cos_v_neg_g:4.4971e-02 L8_v_norm:5.9789e-01 L9_cos_v_neg_g:4.5982e-02 L9_v_norm:5.9811e-01 L10_cos_v_neg_g:5.0070e-02 L10_v_norm:5.9886e-01 L11_cos_v_neg_g:5.9519e-02 L11_v_norm:5.9996e-01 L12_cos_v_neg_g:8.5641e-02 L12_v_norm:6.0356e-01 +step:2000 train loss:3.694217 +step:2001 train loss:3.710161 +step:2002 train loss:3.683551 +step:2003 train loss:3.675006 +step:2004 train loss:3.723266 +step:2005 train loss:3.736580 +step:2006 train loss:3.698985 +step:2007 train loss:3.703822 +step:2008 train loss:3.713699 +step:2009 train loss:3.676180 +step:2010 train loss:3.660903 +step:2011 train loss:3.696278 +step:2012 train loss:3.828389 +step:2013 train loss:3.687340 +step:2014 train loss:3.706032 +step:2015 train loss:3.716387 +step:2016 train loss:3.696840 +step:2017 train loss:3.753679 +step:2018 train loss:3.687470 +step:2019 train loss:3.735005 +step:2020 train loss:3.726067 +step:2021 train loss:3.711280 +step:2022 train loss:3.746498 +step:2023 train loss:3.664419 +step:2024 train loss:3.727074 +step:2025 train loss:3.729718 +step:2026 train loss:3.793467 +step:2027 train loss:3.660120 +step:2028 train loss:3.697184 +step:2029 train loss:3.669993 +step:2030 train loss:3.717501 +step:2031 train loss:3.733175 +step:2032 train loss:3.710354 +step:2033 train loss:3.739017 +step:2034 train loss:3.693427 +step:2035 train loss:3.661067 +step:2036 train loss:3.727160 +step:2037 train loss:3.678539 +step:2038 train loss:3.686014 +step:2039 train loss:3.686630 +step:2040 train loss:3.681267 +step:2041 train loss:3.684252 +step:2042 train loss:3.707575 +step:2043 train loss:3.647719 +step:2044 train loss:3.683446 +step:2045 train loss:3.678447 +step:2046 train loss:3.678794 +step:2047 train loss:3.792315 +step:2048 train loss:3.636585 +step:2049 train loss:3.730611 +step:2050 train loss:3.717840 +step:2051 train loss:3.689065 +step:2052 train loss:3.625364 +step:2053 train loss:3.653351 +step:2054 train loss:3.686728 +step:2055 train loss:3.678695 +step:2056 train loss:3.666777 +step:2057 train loss:3.685630 +step:2058 train loss:3.589514 +step:2059 train loss:3.665536 +step:2060 train loss:3.623564 +step:2061 train loss:3.722557 +step:2062 train loss:3.701397 +step:2063 train loss:3.699263 +step:2064 train loss:3.704940 +step:2065 train loss:3.669043 +step:2066 train loss:3.686902 +step:2067 train loss:3.640157 +step:2068 train loss:3.690535 +step:2069 train loss:3.711371 +step:2070 train loss:3.698267 +step:2071 train loss:3.674921 +step:2072 train loss:3.665442 +step:2073 train loss:3.743538 +step:2074 train loss:3.699666 +step:2075 train loss:3.730110 +step:2076 train loss:3.638710 +step:2077 train loss:3.661440 +step:2078 train loss:3.721023 +step:2079 train loss:3.671040 +step:2080 train loss:3.688722 +step:2081 train loss:3.663480 +step:2082 train loss:3.705049 +step:2083 train loss:3.711170 +step:2084 train loss:3.663058 +step:2085 train loss:3.683411 +step:2086 train loss:3.644728 +step:2087 train loss:3.658767 +step:2088 train loss:3.657131 +step:2089 train loss:3.739402 +step:2090 train loss:3.664856 +step:2091 train loss:3.673369 +step:2092 train loss:3.693821 +step:2093 train loss:3.685712 +step:2094 train loss:3.792374 +step:2095 train loss:3.799711 +step:2096 train loss:3.709026 +step:2097 train loss:3.687127 +step:2098 train loss:3.708413 +step:2099 train loss:3.663442 +step:2100 train loss:3.663424 +step:2101 train loss:3.667180 +step:2102 train loss:3.665857 +step:2103 train loss:3.695038 +step:2104 train loss:3.630165 +step:2105 train loss:3.829742 +step:2106 train loss:3.760686 +step:2107 train loss:3.728772 +step:2108 train loss:3.751405 +step:2109 train loss:3.667368 +step:2110 train loss:3.719998 +step:2111 train loss:3.712751 +step:2112 train loss:3.688511 +step:2113 train loss:3.683472 +step:2114 train loss:3.728944 +step:2115 train loss:3.690217 +step:2116 train loss:3.703201 +step:2117 train loss:3.714653 +step:2118 train loss:3.718428 +step:2119 train loss:3.621861 +step:2120 train loss:3.708844 +step:2121 train loss:3.670551 +step:2122 train loss:3.652670 +step:2123 train loss:3.754022 +step:2124 train loss:3.686521 +step:2125 train loss:3.660043 +step:2126 train loss:3.785594 +step:2127 train loss:3.633618 +step:2128 train loss:3.771256 +step:2129 train loss:3.679728 +step:2130 train loss:3.734362 +step:2131 train loss:3.640543 +step:2132 train loss:3.629457 +step:2133 train loss:3.638740 +step:2134 train loss:3.642829 +step:2135 train loss:3.677776 +step:2136 train loss:3.644297 +step:2137 train loss:3.605782 +step:2138 train loss:3.680485 +step:2139 train loss:3.653134 +step:2140 train loss:3.564654 +step:2141 train loss:3.720222 +step:2142 train loss:3.697808 +step:2143 train loss:3.765714 +step:2144 train loss:3.714191 +step:2145 train loss:3.682192 +step:2146 train loss:3.930381 +step:2147 train loss:3.671448 +step:2148 train loss:3.710846 +step:2149 train loss:3.650768 +step:2150 train loss:3.647921 +step:2151 train loss:3.752537 +step:2152 train loss:3.667091 +step:2153 train loss:3.669199 +step:2154 train loss:3.656941 +step:2155 train loss:3.730403 +step:2156 train loss:3.698089 +step:2157 train loss:3.677521 +step:2158 train loss:3.663961 +step:2159 train loss:3.629157 +step:2160 train loss:3.737785 +step:2161 train loss:3.697885 +step:2162 train loss:3.648910 +step:2163 train loss:3.699332 +step:2164 train loss:3.630286 +step:2165 train loss:3.696443 +step:2166 train loss:3.636548 +step:2167 train loss:3.689895 +step:2168 train loss:3.707740 +step:2169 train loss:3.676342 +step:2170 train loss:3.689994 +step:2171 train loss:3.689010 +step:2172 train loss:3.667922 +step:2173 train loss:3.665613 +step:2174 train loss:3.856034 +step:2175 train loss:3.706609 +step:2176 train loss:3.676125 +step:2177 train loss:3.686278 +step:2178 train loss:3.703572 +step:2179 train loss:3.621459 +step:2180 train loss:3.643324 +step:2181 train loss:3.624008 +step:2182 train loss:3.640189 +step:2183 train loss:3.672338 +step:2184 train loss:3.682012 +step:2185 train loss:3.689035 +step:2186 train loss:3.716487 +step:2187 train loss:3.716454 +step:2188 train loss:3.687055 +step:2189 train loss:3.676092 +step:2190 train loss:3.693825 +step:2191 train loss:3.663893 +step:2192 train loss:3.662597 +step:2193 train loss:3.668175 +step:2194 train loss:3.689478 +step:2195 train loss:3.695622 +step:2196 train loss:3.682572 +step:2197 train loss:3.698589 +step:2198 train loss:3.683595 +step:2199 train loss:3.701051 +step:2200 train loss:3.680593 +step:2201 train loss:3.670535 +step:2202 train loss:3.678575 +step:2203 train loss:3.645533 +step:2204 train loss:3.596347 +step:2205 train loss:3.702589 +step:2206 train loss:3.651269 +step:2207 train loss:3.651834 +step:2208 train loss:3.664077 +step:2209 train loss:3.632661 +step:2210 train loss:3.655192 +step:2211 train loss:3.613560 +step:2212 train loss:3.717118 +step:2213 train loss:3.691927 +step:2214 train loss:3.642034 +step:2215 train loss:3.702778 +step:2216 train loss:3.679058 +step:2217 train loss:3.671069 +step:2218 train loss:3.634366 +step:2219 train loss:3.628602 +step:2220 train loss:3.670604 +step:2221 train loss:3.668530 +step:2222 train loss:3.662368 +step:2223 train loss:3.629734 +step:2224 train loss:3.721192 +step:2225 train loss:3.643047 +step:2226 train loss:3.657270 +step:2227 train loss:3.681435 +step:2228 train loss:3.716692 +step:2229 train loss:3.616839 +step:2230 train loss:3.725961 +step:2231 train loss:3.677482 +step:2232 train loss:3.690398 +step:2233 train loss:3.659646 +step:2234 train loss:3.712705 +step:2235 train loss:3.662554 +step:2236 train loss:3.609643 +step:2237 train loss:3.692914 +step:2238 train loss:3.622061 +step:2239 train loss:3.709768 +step:2240 train loss:3.697201 +step:2241 train loss:3.679936 +step:2242 train loss:3.741666 +step:2243 train loss:3.664270 +step:2244 train loss:3.643952 +step:2245 train loss:3.809660 +step:2246 train loss:3.673587 +step:2247 train loss:3.644115 +step:2248 train loss:3.635606 +step:2249 train loss:3.734834 +step:2250 validation loss:3.625390 +step:2250 train loss:3.654504 +step:2251 train loss:3.762657 +step:2252 train loss:3.717587 +step:2253 train loss:3.670379 +step:2254 train loss:3.661808 +step:2255 train loss:3.647036 +step:2256 train loss:3.647163 +step:2257 train loss:3.622022 +step:2258 train loss:3.615993 +step:2259 train loss:3.713009 +step:2260 train loss:3.588230 +step:2261 train loss:3.685530 +step:2262 train loss:3.692277 +step:2263 train loss:3.668044 +step:2264 train loss:3.696315 +step:2265 train loss:3.626732 +step:2266 train loss:3.832706 +step:2267 train loss:3.690005 +step:2268 train loss:3.696024 +step:2269 train loss:3.658575 +step:2270 train loss:3.563525 +step:2271 train loss:3.630318 +step:2272 train loss:3.656077 +step:2273 train loss:3.696211 +step:2274 train loss:3.717671 +step:2275 train loss:3.653169 +step:2276 train loss:3.709382 +step:2277 train loss:3.618521 +step:2278 train loss:3.697965 +step:2279 train loss:3.608396 +step:2280 train loss:3.617757 +step:2281 train loss:3.650173 +step:2282 train loss:3.632524 +step:2283 train loss:3.657939 +step:2284 train loss:3.633117 +step:2285 train loss:3.665919 +step:2286 train loss:3.708138 +step:2287 train loss:3.666423 +step:2288 train loss:3.639478 +step:2289 train loss:3.717683 +step:2290 train loss:3.718213 +step:2291 train loss:3.802906 +step:2292 train loss:3.637231 +step:2293 train loss:3.681937 +step:2294 train loss:3.705810 +step:2295 train loss:3.637852 +step:2296 train loss:3.634785 +step:2297 train loss:3.675416 +step:2298 train loss:3.682698 +step:2299 train loss:3.702306 +step:2300 train loss:3.637179 +step:2301 train loss:3.620623 +step:2302 train loss:3.744158 +step:2303 train loss:3.691848 +step:2304 train loss:3.634684 +step:2305 train loss:3.646588 +step:2306 train loss:3.662165 +step:2307 train loss:3.640528 +step:2308 train loss:3.688126 +step:2309 train loss:3.702724 +step:2310 train loss:3.669582 +step:2311 train loss:3.650449 +step:2312 train loss:3.673656 +step:2313 train loss:3.685637 +step:2314 train loss:3.721402 +step:2315 train loss:3.704646 +step:2316 train loss:3.683660 +step:2317 train loss:3.605170 +step:2318 train loss:3.656673 +step:2319 train loss:3.677558 +step:2320 train loss:3.670919 +step:2321 train loss:3.652163 +step:2322 train loss:3.679201 +step:2323 train loss:3.649153 +step:2324 train loss:3.714053 +step:2325 train loss:3.699483 +step:2326 train loss:3.629463 +step:2327 train loss:3.652987 +step:2328 train loss:3.656756 +step:2329 train loss:3.658109 +step:2330 train loss:3.653963 +step:2331 train loss:3.698814 +step:2332 train loss:3.673550 +step:2333 train loss:3.604708 +step:2334 train loss:3.707282 +step:2335 train loss:3.739008 +step:2336 train loss:3.675856 +step:2337 train loss:3.679725 +step:2338 train loss:3.660664 +step:2339 train loss:3.659564 +step:2340 train loss:3.641539 +step:2341 train loss:3.694901 +step:2342 train loss:3.657362 +step:2343 train loss:3.667321 +step:2344 train loss:3.654880 +step:2345 train loss:3.620644 +step:2346 train loss:3.631357 +step:2347 train loss:3.728158 +step:2348 train loss:3.706521 +step:2349 train loss:3.648829 +step:2350 train loss:3.677603 +step:2351 train loss:3.691446 +step:2352 train loss:3.662978 +step:2353 train loss:3.583929 +step:2354 train loss:3.637420 +step:2355 train loss:3.683391 +step:2356 train loss:3.602270 +step:2357 train loss:3.703873 +step:2358 train loss:3.696068 +step:2359 train loss:3.667000 +step:2360 train loss:3.670503 +step:2361 train loss:3.698631 +step:2362 train loss:3.625343 +step:2363 train loss:3.637009 +step:2364 train loss:3.672180 +step:2365 train loss:3.690571 +step:2366 train loss:3.611079 +step:2367 train loss:3.693527 +step:2368 train loss:3.624465 +step:2369 train loss:3.600801 +step:2370 train loss:3.667806 +step:2371 train loss:3.655285 +step:2372 train loss:3.650878 +step:2373 train loss:3.730863 +step:2374 train loss:3.675066 +step:2375 train loss:3.650774 +step:2376 train loss:3.707502 +step:2377 train loss:3.687816 +step:2378 train loss:3.820450 +step:2379 train loss:3.735123 +step:2380 train loss:3.623546 +step:2381 train loss:3.663372 +step:2382 train loss:3.674054 +step:2383 train loss:3.675241 +step:2384 train loss:3.693467 +step:2385 train loss:3.694202 +step:2386 train loss:3.748388 +step:2387 train loss:3.665489 +step:2388 train loss:3.676453 +step:2389 train loss:3.648048 +step:2390 train loss:3.665126 +step:2391 train loss:3.677324 +step:2392 train loss:3.661381 +step:2393 train loss:3.645181 +step:2394 train loss:3.645709 +step:2395 train loss:3.741674 +step:2396 train loss:3.599399 +step:2397 train loss:3.657400 +step:2398 train loss:3.586731 +step:2399 train loss:3.709428 +step:2400 train loss:3.632651 +step:2401 train loss:3.657858 +step:2402 train loss:3.711115 +step:2403 train loss:3.718459 +step:2404 train loss:3.671082 +step:2405 train loss:3.645488 +step:2406 train loss:3.653241 +step:2407 train loss:3.661546 +step:2408 train loss:3.689525 +step:2409 train loss:3.598927 +step:2410 train loss:3.645408 +step:2411 train loss:3.634978 +step:2412 train loss:3.589252 +step:2413 train loss:3.689315 +step:2414 train loss:3.691479 +step:2415 train loss:3.654674 +step:2416 train loss:3.621267 +step:2417 train loss:3.739835 +step:2418 train loss:3.618972 +step:2419 train loss:3.631012 +step:2420 train loss:3.629193 +step:2421 train loss:3.645894 +step:2422 train loss:3.680328 +step:2423 train loss:3.659588 +step:2424 train loss:3.644525 +step:2425 train loss:3.686331 +step:2426 train loss:3.629466 +step:2427 train loss:3.708118 +step:2428 train loss:3.687887 +step:2429 train loss:3.639935 +step:2430 train loss:3.597373 +step:2431 train loss:3.612072 +step:2432 train loss:3.624392 +step:2433 train loss:3.661436 +step:2434 train loss:3.654852 +step:2435 train loss:3.805962 +step:2436 train loss:3.646813 +step:2437 train loss:3.635810 +step:2438 train loss:3.650795 +step:2439 train loss:3.627992 +step:2440 train loss:3.604303 +step:2441 train loss:3.615336 +step:2442 train loss:3.671199 +step:2443 train loss:3.665215 +step:2444 train loss:3.732448 +step:2445 train loss:3.666675 +step:2446 train loss:3.646549 +step:2447 train loss:3.631247 +step:2448 train loss:3.639151 +step:2449 train loss:3.670640 +step:2450 train loss:3.639855 +step:2451 train loss:3.636565 +step:2452 train loss:3.672070 +step:2453 train loss:3.644625 +step:2454 train loss:3.640528 +step:2455 train loss:3.691650 +step:2456 train loss:3.646880 +step:2457 train loss:3.664142 +step:2458 train loss:3.682393 +step:2459 train loss:3.641871 +step:2460 train loss:3.647383 +step:2461 train loss:3.662337 +step:2462 train loss:3.662436 +step:2463 train loss:3.644169 +step:2464 train loss:3.733648 +step:2465 train loss:3.781316 +step:2466 train loss:3.727696 +step:2467 train loss:3.668025 +step:2468 train loss:3.664955 +step:2469 train loss:3.668293 +step:2470 train loss:3.647930 +step:2471 train loss:3.658620 +step:2472 train loss:3.676081 +step:2473 train loss:3.641549 +step:2474 train loss:3.673894 +step:2475 train loss:3.701667 +step:2476 train loss:3.751721 +step:2477 train loss:3.682266 +step:2478 train loss:3.663506 +step:2479 train loss:3.624503 +step:2480 train loss:3.657049 +step:2481 train loss:3.633591 +step:2482 train loss:3.624737 +step:2483 train loss:3.673712 +step:2484 train loss:3.709368 +step:2485 train loss:3.742641 +step:2486 train loss:3.634652 +step:2487 train loss:3.651758 +step:2488 train loss:3.673584 +step:2489 train loss:3.602582 +step:2490 train loss:3.650740 +step:2491 train loss:3.613844 +step:2492 train loss:3.609292 +step:2493 train loss:3.654220 +step:2494 train loss:3.658473 +step:2495 train loss:3.636185 +step:2496 train loss:3.678294 +step:2497 train loss:3.685237 +step:2498 train loss:3.718006 +step:2499 train loss:3.657976 +step:2500 validation loss:3.592027 total_sharp:6.4864e-03 L1_sharp:1.1636e-02 L2_sharp:5.9055e-03 L3_sharp:2.4647e-03 L4_sharp:1.0333e-03 L5_sharp:9.8872e-04 L6_sharp:1.0635e-03 L7_sharp:1.1440e-03 L8_sharp:1.0111e-03 L9_sharp:7.5412e-04 L10_sharp:4.6028e-04 L11_sharp:4.8431e-04 L12_sharp:5.7448e-04 total_fnorm:2.3996e+00 total_l1_linf:2.0521e+04 total_spectral:2.3996e+00 L1_fnorm:5.8356e-01 L2_fnorm:4.8824e-01 L3_fnorm:4.8510e-01 L4_fnorm:5.5144e-01 L5_fnorm:5.8549e-01 L6_fnorm:5.8957e-01 L7_fnorm:5.9897e-01 L8_fnorm:5.9900e-01 L9_fnorm:5.9961e-01 L10_fnorm:5.9897e-01 L11_fnorm:5.9771e-01 L12_fnorm:6.0274e-01 L1_l1linf:4.3937e-01 L2_l1linf:4.1581e-01 L3_l1linf:3.9504e-01 L4_l1linf:4.0130e-01 L5_l1linf:4.1188e-01 L6_l1linf:4.1674e-01 L7_l1linf:4.1265e-01 L8_l1linf:4.1438e-01 L9_l1linf:4.1351e-01 L10_l1linf:4.1691e-01 L11_l1linf:4.1083e-01 L12_l1linf:3.9631e-01 L1_spectral:1.2054e-02 L2_spectral:1.2063e-02 L3_spectral:1.4400e-02 L4_spectral:1.2041e-02 L5_spectral:1.2047e-02 L6_spectral:1.2062e-02 L7_spectral:1.2060e-02 L8_spectral:1.2046e-02 L9_spectral:1.2042e-02 L10_spectral:1.2043e-02 L11_spectral:1.2047e-02 L12_spectral:1.2050e-02 v_norm:2.3996e+00 cos_v_-g_hvp:3.7141e-02 g_hvp_norm:6.2738e-01 cos_v_-g_t:4.1407e-02 g_t_norm:5.6860e-01 hv_norm:7.6870e-01 cos_v_hv:2.0249e-02 hg_norm:2.8202e+01 cos_g_hg:5.8574e-01 v_par:6.3241e-03 v_perp:2.3996e+00 L1_cos_v_neg_g:2.9224e-02 L1_v_norm:5.8356e-01 L2_cos_v_neg_g:4.2471e-02 L2_v_norm:4.8824e-01 L3_cos_v_neg_g:3.1940e-02 L3_v_norm:4.8510e-01 L4_cos_v_neg_g:3.3909e-02 L4_v_norm:5.5144e-01 L5_cos_v_neg_g:3.6873e-02 L5_v_norm:5.8549e-01 L6_cos_v_neg_g:3.8670e-02 L6_v_norm:5.8957e-01 L7_cos_v_neg_g:3.6799e-02 L7_v_norm:5.9897e-01 L8_cos_v_neg_g:3.9301e-02 L8_v_norm:5.9900e-01 L9_cos_v_neg_g:4.0065e-02 L9_v_norm:5.9961e-01 L10_cos_v_neg_g:4.5979e-02 L10_v_norm:5.9897e-01 L11_cos_v_neg_g:5.4271e-02 L11_v_norm:5.9771e-01 L12_cos_v_neg_g:7.4581e-02 L12_v_norm:6.0274e-01 +step:2500 train loss:3.654018 +step:2501 train loss:3.569746 +step:2502 train loss:3.745718 +step:2503 train loss:3.657632 +step:2504 train loss:3.677469 +step:2505 train loss:3.643487 +step:2506 train loss:3.675056 +step:2507 train loss:3.599835 +step:2508 train loss:3.660620 +step:2509 train loss:3.610045 +step:2510 train loss:3.638874 +step:2511 train loss:3.603430 +step:2512 train loss:3.699428 +step:2513 train loss:3.629147 +step:2514 train loss:3.660911 +step:2515 train loss:3.664683 +step:2516 train loss:3.629630 +step:2517 train loss:3.580432 +step:2518 train loss:3.623443 +step:2519 train loss:3.688386 +step:2520 train loss:3.610868 +step:2521 train loss:3.660953 +step:2522 train loss:3.664521 +step:2523 train loss:3.650119 +step:2524 train loss:3.684181 +step:2525 train loss:3.606596 +step:2526 train loss:3.640898 +step:2527 train loss:3.604485 +step:2528 train loss:3.725130 +step:2529 train loss:3.612953 +step:2530 train loss:3.642348 +step:2531 train loss:3.648811 +step:2532 train loss:3.639922 +step:2533 train loss:3.717835 +step:2534 train loss:3.733035 +step:2535 train loss:3.604615 +step:2536 train loss:3.649684 +step:2537 train loss:3.594376 +step:2538 train loss:3.674170 +step:2539 train loss:3.597617 +step:2540 train loss:3.646991 +step:2541 train loss:3.625994 +step:2542 train loss:3.631074 +step:2543 train loss:3.645664 +step:2544 train loss:3.639266 +step:2545 train loss:3.602322 +step:2546 train loss:3.643858 +step:2547 train loss:3.665681 +step:2548 train loss:3.649890 +step:2549 train loss:3.565895 +step:2550 train loss:3.657168 +step:2551 train loss:3.614892 +step:2552 train loss:3.670110 +step:2553 train loss:3.604067 +step:2554 train loss:3.719875 +step:2555 train loss:3.595423 +step:2556 train loss:3.640439 +step:2557 train loss:3.620380 +step:2558 train loss:3.655371 +step:2559 train loss:3.646395 +step:2560 train loss:3.656406 +step:2561 train loss:3.607659 +step:2562 train loss:3.586792 +step:2563 train loss:3.690296 +step:2564 train loss:3.657472 +step:2565 train loss:3.692322 +step:2566 train loss:3.611262 +step:2567 train loss:3.641759 +step:2568 train loss:3.562538 +step:2569 train loss:3.654261 +step:2570 train loss:3.594536 +step:2571 train loss:3.643031 +step:2572 train loss:3.559168 +step:2573 train loss:3.587285 +step:2574 train loss:3.549193 +step:2575 train loss:3.614896 +step:2576 train loss:3.622036 +step:2577 train loss:3.662047 +step:2578 train loss:3.602813 +step:2579 train loss:3.661715 +step:2580 train loss:3.643695 +step:2581 train loss:3.655098 +step:2582 train loss:3.659102 +step:2583 train loss:3.658763 +step:2584 train loss:3.635127 +step:2585 train loss:3.569642 +step:2586 train loss:3.638373 +step:2587 train loss:3.549745 +step:2588 train loss:3.582994 +step:2589 train loss:3.612973 +step:2590 train loss:3.601954 +step:2591 train loss:3.608877 +step:2592 train loss:3.651949 +step:2593 train loss:3.657217 +step:2594 train loss:3.659003 +step:2595 train loss:3.635765 +step:2596 train loss:3.659878 +step:2597 train loss:3.617984 +step:2598 train loss:3.664354 +step:2599 train loss:3.610536 +step:2600 train loss:3.624135 +step:2601 train loss:3.634762 +step:2602 train loss:3.682888 +step:2603 train loss:3.634355 +step:2604 train loss:3.619933 +step:2605 train loss:3.597272 +step:2606 train loss:3.716607 +step:2607 train loss:3.622188 +step:2608 train loss:3.669838 +step:2609 train loss:3.674807 +step:2610 train loss:3.632901 +step:2611 train loss:3.585909 +step:2612 train loss:3.624325 +step:2613 train loss:3.607651 +step:2614 train loss:3.665603 +step:2615 train loss:3.735907 +step:2616 train loss:3.670166 +step:2617 train loss:3.635071 +step:2618 train loss:3.645282 +step:2619 train loss:3.655561 +step:2620 train loss:3.698319 +step:2621 train loss:3.634182 +step:2622 train loss:3.668765 +step:2623 train loss:3.606611 +step:2624 train loss:3.611192 +step:2625 train loss:3.652688 +step:2626 train loss:3.626615 +step:2627 train loss:3.620950 +step:2628 train loss:3.735226 +step:2629 train loss:3.663082 +step:2630 train loss:3.603815 +step:2631 train loss:3.671256 +step:2632 train loss:3.625525 +step:2633 train loss:3.652626 +step:2634 train loss:3.617758 +step:2635 train loss:3.623032 +step:2636 train loss:3.570129 +step:2637 train loss:3.575550 +step:2638 train loss:3.565488 +step:2639 train loss:3.648369 +step:2640 train loss:3.656406 +step:2641 train loss:3.566405 +step:2642 train loss:3.598046 +step:2643 train loss:3.670568 +step:2644 train loss:3.752739 +step:2645 train loss:3.637668 +step:2646 train loss:3.606143 +step:2647 train loss:3.586926 +step:2648 train loss:3.707472 +step:2649 train loss:3.693583 +step:2650 train loss:3.652608 +step:2651 train loss:3.709918 +step:2652 train loss:3.722425 +step:2653 train loss:3.618839 +step:2654 train loss:3.675680 +step:2655 train loss:3.670353 +step:2656 train loss:3.591825 +step:2657 train loss:3.586927 +step:2658 train loss:3.566525 +step:2659 train loss:3.590185 +step:2660 train loss:3.649698 +step:2661 train loss:3.605130 +step:2662 train loss:3.589370 +step:2663 train loss:3.716182 +step:2664 train loss:3.611781 +step:2665 train loss:3.655751 +step:2666 train loss:3.657583 +step:2667 train loss:3.713751 +step:2668 train loss:3.626231 +step:2669 train loss:3.598799 +step:2670 train loss:3.593527 +step:2671 train loss:3.647136 +step:2672 train loss:3.615103 +step:2673 train loss:3.638746 +step:2674 train loss:3.708404 +step:2675 train loss:3.694739 +step:2676 train loss:3.569163 +step:2677 train loss:3.686509 +step:2678 train loss:3.637881 +step:2679 train loss:3.684271 +step:2680 train loss:3.638217 +step:2681 train loss:3.678253 +step:2682 train loss:3.611749 +step:2683 train loss:3.562460 +step:2684 train loss:3.654088 +step:2685 train loss:3.655190 +step:2686 train loss:3.623077 +step:2687 train loss:3.657688 +step:2688 train loss:3.589509 +step:2689 train loss:3.656919 +step:2690 train loss:3.664518 +step:2691 train loss:3.566584 +step:2692 train loss:3.672424 +step:2693 train loss:3.606017 +step:2694 train loss:3.590123 +step:2695 train loss:3.664554 +step:2696 train loss:3.655982 +step:2697 train loss:3.589789 +step:2698 train loss:3.673129 +step:2699 train loss:3.629423 +step:2700 train loss:3.569853 +step:2701 train loss:3.588426 +step:2702 train loss:3.579934 +step:2703 train loss:3.675174 +step:2704 train loss:3.576626 +step:2705 train loss:3.709559 +step:2706 train loss:3.546670 +step:2707 train loss:3.590758 +step:2708 train loss:3.608956 +step:2709 train loss:3.657216 +step:2710 train loss:3.689768 +step:2711 train loss:3.608924 +step:2712 train loss:3.554128 +step:2713 train loss:3.591489 +step:2714 train loss:3.635164 +step:2715 train loss:3.575905 +step:2716 train loss:3.620273 +step:2717 train loss:3.612439 +step:2718 train loss:3.624911 +step:2719 train loss:3.585975 +step:2720 train loss:3.638047 +step:2721 train loss:3.656664 +step:2722 train loss:3.581803 +step:2723 train loss:3.626102 +step:2724 train loss:3.585612 +step:2725 train loss:3.584955 +step:2726 train loss:3.611487 +step:2727 train loss:3.549615 +step:2728 train loss:3.609025 +step:2729 train loss:3.575275 +step:2730 train loss:3.621101 +step:2731 train loss:3.596826 +step:2732 train loss:3.624132 +step:2733 train loss:3.637005 +step:2734 train loss:3.577340 +step:2735 train loss:3.568660 +step:2736 train loss:3.630656 +step:2737 train loss:3.552290 +step:2738 train loss:3.560925 +step:2739 train loss:3.617728 +step:2740 train loss:3.596510 +step:2741 train loss:3.544996 +step:2742 train loss:3.617164 +step:2743 train loss:3.648010 +step:2744 train loss:3.616296 +step:2745 train loss:3.583352 +step:2746 train loss:3.618872 +step:2747 train loss:3.597656 +step:2748 train loss:3.594919 +step:2749 train loss:3.567471 +step:2750 validation loss:3.564966 +step:2750 train loss:3.633256 +step:2751 train loss:3.611506 +step:2752 train loss:3.606274 +step:2753 train loss:3.632072 +step:2754 train loss:3.638088 +step:2755 train loss:3.583778 +step:2756 train loss:3.607418 +step:2757 train loss:3.629157 +step:2758 train loss:3.606412 +step:2759 train loss:3.608317 +step:2760 train loss:3.631607 +step:2761 train loss:3.586736 +step:2762 train loss:3.593937 +step:2763 train loss:3.615000 +step:2764 train loss:3.657867 +step:2765 train loss:3.613168 +step:2766 train loss:3.625269 +step:2767 train loss:3.639178 +step:2768 train loss:3.595937 +step:2769 train loss:3.574514 +step:2770 train loss:3.602437 +step:2771 train loss:3.649460 +step:2772 train loss:3.720750 +step:2773 train loss:3.690775 +step:2774 train loss:3.547658 +step:2775 train loss:3.612072 +step:2776 train loss:3.610200 +step:2777 train loss:3.647130 +step:2778 train loss:3.668129 +step:2779 train loss:3.618483 +step:2780 train loss:3.620768 +step:2781 train loss:3.591626 +step:2782 train loss:3.613412 +step:2783 train loss:3.597448 +step:2784 train loss:3.688090 +step:2785 train loss:3.596775 +step:2786 train loss:3.567499 +step:2787 train loss:3.673359 +step:2788 train loss:3.602232 +step:2789 train loss:3.623051 +step:2790 train loss:3.595514 +step:2791 train loss:3.612689 +step:2792 train loss:3.600224 +step:2793 train loss:3.609655 +step:2794 train loss:3.572003 +step:2795 train loss:3.580924 +step:2796 train loss:3.623694 +step:2797 train loss:3.603669 +step:2798 train loss:3.599836 +step:2799 train loss:3.579453 +step:2800 train loss:3.629601 +step:2801 train loss:3.590463 +step:2802 train loss:3.638356 +step:2803 train loss:3.649539 +step:2804 train loss:3.581503 +step:2805 train loss:3.686824 +step:2806 train loss:3.644265 +step:2807 train loss:3.570454 +step:2808 train loss:3.594445 +step:2809 train loss:3.603441 +step:2810 train loss:3.623394 +step:2811 train loss:3.555282 +step:2812 train loss:3.638301 +step:2813 train loss:3.662483 +step:2814 train loss:3.574001 +step:2815 train loss:3.607499 +step:2816 train loss:3.555539 +step:2817 train loss:3.622731 +step:2818 train loss:3.597207 +step:2819 train loss:3.544641 +step:2820 train loss:3.608370 +step:2821 train loss:3.706042 +step:2822 train loss:3.642381 +step:2823 train loss:3.602255 +step:2824 train loss:3.638060 +step:2825 train loss:3.567681 +step:2826 train loss:3.586823 +step:2827 train loss:3.579318 +step:2828 train loss:3.553521 +step:2829 train loss:3.612124 +step:2830 train loss:3.545743 +step:2831 train loss:3.646976 +step:2832 train loss:3.628697 +step:2833 train loss:3.613353 +step:2834 train loss:3.619286 +step:2835 train loss:3.588271 +step:2836 train loss:3.610859 +step:2837 train loss:3.552434 +step:2838 train loss:3.576277 +step:2839 train loss:3.548133 +step:2840 train loss:3.659915 +step:2841 train loss:3.603584 +step:2842 train loss:3.599688 +step:2843 train loss:3.637401 +step:2844 train loss:3.559022 +step:2845 train loss:3.618145 +step:2846 train loss:3.576011 +step:2847 train loss:3.623079 +step:2848 train loss:3.612083 +step:2849 train loss:3.636382 +step:2850 train loss:3.704157 +step:2851 train loss:3.626117 +step:2852 train loss:3.657567 +step:2853 train loss:3.619582 +step:2854 train loss:3.598721 +step:2855 train loss:3.617686 +step:2856 train loss:3.731720 +step:2857 train loss:3.594338 +step:2858 train loss:3.613816 +step:2859 train loss:3.580207 +step:2860 train loss:3.611648 +step:2861 train loss:3.724713 +step:2862 train loss:3.623140 +step:2863 train loss:3.642209 +step:2864 train loss:3.646545 +step:2865 train loss:3.621679 +step:2866 train loss:3.634978 +step:2867 train loss:3.662307 +step:2868 train loss:3.592476 +step:2869 train loss:3.651184 +step:2870 train loss:3.689222 +step:2871 train loss:3.601149 +step:2872 train loss:3.619244 +step:2873 train loss:3.615883 +step:2874 train loss:3.619054 +step:2875 train loss:3.628444 +step:2876 train loss:3.629389 +step:2877 train loss:3.650308 +step:2878 train loss:3.619676 +step:2879 train loss:3.595868 +step:2880 train loss:3.677647 +step:2881 train loss:3.627689 +step:2882 train loss:3.619967 +step:2883 train loss:3.666551 +step:2884 train loss:3.667024 +step:2885 train loss:3.620868 +step:2886 train loss:3.613966 +step:2887 train loss:3.652907 +step:2888 train loss:3.670984 +step:2889 train loss:3.641406 +step:2890 train loss:3.650172 +step:2891 train loss:3.632882 +step:2892 train loss:3.621032 +step:2893 train loss:3.609681 +step:2894 train loss:3.659990 +step:2895 train loss:3.606076 +step:2896 train loss:3.632608 +step:2897 train loss:3.646129 +step:2898 train loss:3.684767 +step:2899 train loss:3.615603 +step:2900 train loss:3.607014 +step:2901 train loss:3.672254 +step:2902 train loss:3.549400 +step:2903 train loss:3.689159 +step:2904 train loss:3.662437 +step:2905 train loss:3.639783 +step:2906 train loss:3.611032 +step:2907 train loss:3.671421 +step:2908 train loss:3.608637 +step:2909 train loss:3.649416 +step:2910 train loss:3.684817 +step:2911 train loss:3.571047 +step:2912 train loss:3.604103 +step:2913 train loss:3.642245 +step:2914 train loss:3.597551 +step:2915 train loss:3.622056 +step:2916 train loss:3.576245 +step:2917 train loss:3.601165 +step:2918 train loss:3.666193 +step:2919 train loss:3.658285 +step:2920 train loss:3.628252 +step:2921 train loss:3.616638 +step:2922 train loss:3.583763 +step:2923 train loss:3.612343 +step:2924 train loss:3.607564 +step:2925 train loss:3.652011 +step:2926 train loss:3.633047 +step:2927 train loss:3.574469 +step:2928 train loss:3.637927 +step:2929 train loss:3.591042 +step:2930 train loss:3.580383 +step:2931 train loss:3.608228 +step:2932 train loss:3.643231 +step:2933 train loss:3.716479 +step:2934 train loss:3.622244 +step:2935 train loss:3.593455 +step:2936 train loss:3.611859 +step:2937 train loss:3.635214 +step:2938 train loss:3.634897 +step:2939 train loss:3.747887 +step:2940 train loss:3.653609 +step:2941 train loss:3.679639 +step:2942 train loss:3.612078 +step:2943 train loss:3.639322 +step:2944 train loss:3.628378 +step:2945 train loss:3.633591 +step:2946 train loss:3.612768 +step:2947 train loss:3.589839 +step:2948 train loss:3.583485 +step:2949 train loss:3.598692 +step:2950 train loss:3.679031 +step:2951 train loss:3.631655 +step:2952 train loss:3.631139 +step:2953 train loss:3.621558 +step:2954 train loss:3.630887 +step:2955 train loss:3.706831 +step:2956 train loss:3.612895 +step:2957 train loss:3.624081 +step:2958 train loss:3.655074 +step:2959 train loss:3.607159 +step:2960 train loss:3.633426 +step:2961 train loss:3.604820 +step:2962 train loss:3.619244 +step:2963 train loss:3.593531 +step:2964 train loss:3.640835 +step:2965 train loss:3.681827 +step:2966 train loss:3.561514 +step:2967 train loss:3.630857 +step:2968 train loss:3.548882 +step:2969 train loss:3.643813 +step:2970 train loss:3.597226 +step:2971 train loss:3.569833 +step:2972 train loss:3.543660 +step:2973 train loss:3.633598 +step:2974 train loss:3.552522 +step:2975 train loss:3.564117 +step:2976 train loss:3.591848 +step:2977 train loss:3.583738 +step:2978 train loss:3.602533 +step:2979 train loss:3.554824 +step:2980 train loss:3.591251 +step:2981 train loss:3.630939 +step:2982 train loss:3.551275 +step:2983 train loss:3.587742 +step:2984 train loss:3.602318 +step:2985 train loss:3.592230 +step:2986 train loss:3.620023 +step:2987 train loss:3.581270 +step:2988 train loss:3.606977 +step:2989 train loss:3.624404 +step:2990 train loss:3.564485 +step:2991 train loss:3.627052 +step:2992 train loss:3.557365 +step:2993 train loss:3.540809 +step:2994 train loss:3.635895 +step:2995 train loss:3.573862 +step:2996 train loss:3.568614 +step:2997 train loss:3.570681 +step:2998 train loss:3.576491 +step:2999 train loss:3.555672 +step:3000 validation loss:3.548201 total_sharp:5.9028e-03 L1_sharp:8.5163e-03 L2_sharp:2.1649e-03 L3_sharp:1.9195e-03 L4_sharp:1.4812e-03 L5_sharp:1.0501e-03 L6_sharp:1.3360e-03 L7_sharp:1.4991e-03 L8_sharp:1.4874e-03 L9_sharp:9.0066e-04 L10_sharp:5.1731e-04 L11_sharp:5.9347e-04 L12_sharp:9.7288e-04 total_fnorm:2.3930e+00 total_l1_linf:2.0475e+04 total_spectral:2.3930e+00 L1_fnorm:5.8482e-01 L2_fnorm:5.0145e-01 L3_fnorm:4.5423e-01 L4_fnorm:5.4971e-01 L5_fnorm:5.8518e-01 L6_fnorm:5.8812e-01 L7_fnorm:5.9696e-01 L8_fnorm:5.9706e-01 L9_fnorm:5.9577e-01 L10_fnorm:5.9653e-01 L11_fnorm:5.9255e-01 L12_fnorm:6.0144e-01 L1_l1linf:4.3931e-01 L2_l1linf:4.2121e-01 L3_l1linf:4.5328e-01 L4_l1linf:4.0157e-01 L5_l1linf:4.0725e-01 L6_l1linf:4.1198e-01 L7_l1linf:4.0819e-01 L8_l1linf:4.1058e-01 L9_l1linf:4.1060e-01 L10_l1linf:4.1133e-01 L11_l1linf:4.0719e-01 L12_l1linf:4.0275e-01 L1_spectral:1.2046e-02 L2_spectral:1.2062e-02 L3_spectral:1.7664e-02 L4_spectral:1.2042e-02 L5_spectral:1.2051e-02 L6_spectral:1.2048e-02 L7_spectral:1.2051e-02 L8_spectral:1.2050e-02 L9_spectral:1.2055e-02 L10_spectral:1.2046e-02 L11_spectral:1.2045e-02 L12_spectral:1.2043e-02 v_norm:2.3930e+00 cos_v_-g_hvp:3.5457e-02 g_hvp_norm:6.1492e-01 cos_v_-g_t:3.9867e-02 g_t_norm:5.5189e-01 hv_norm:8.2997e-01 cos_v_hv:1.7019e-02 hg_norm:8.5612e+01 cos_g_hg:2.0171e-01 v_par:6.3011e-03 v_perp:2.3930e+00 L1_cos_v_neg_g:2.5542e-02 L1_v_norm:5.8482e-01 L2_cos_v_neg_g:2.6440e-02 L2_v_norm:5.0145e-01 L3_cos_v_neg_g:2.9126e-02 L3_v_norm:4.5423e-01 L4_cos_v_neg_g:3.2625e-02 L4_v_norm:5.4971e-01 L5_cos_v_neg_g:3.4199e-02 L5_v_norm:5.8518e-01 L6_cos_v_neg_g:3.5728e-02 L6_v_norm:5.8812e-01 L7_cos_v_neg_g:3.5582e-02 L7_v_norm:5.9696e-01 L8_cos_v_neg_g:3.7048e-02 L8_v_norm:5.9706e-01 L9_cos_v_neg_g:3.6682e-02 L9_v_norm:5.9577e-01 L10_cos_v_neg_g:3.9500e-02 L10_v_norm:5.9653e-01 L11_cos_v_neg_g:4.9258e-02 L11_v_norm:5.9255e-01 L12_cos_v_neg_g:7.2687e-02 L12_v_norm:6.0144e-01 +step:3000 train loss:3.529516 +step:3001 train loss:3.598372 +step:3002 train loss:3.639766 +step:3003 train loss:3.606676 +step:3004 train loss:3.620482 +step:3005 train loss:3.608593 +step:3006 train loss:3.624619 +step:3007 train loss:3.658924 +step:3008 train loss:3.631883 +step:3009 train loss:3.521714 +step:3010 train loss:3.605637 +step:3011 train loss:3.590659 +step:3012 train loss:3.556413 +step:3013 train loss:3.581062 +step:3014 train loss:3.542829 +step:3015 train loss:3.591568 +step:3016 train loss:3.588283 +step:3017 train loss:3.663419 +step:3018 train loss:3.618166 +step:3019 train loss:3.541294 +step:3020 train loss:3.603934 +step:3021 train loss:3.594727 +step:3022 train loss:3.563187 +step:3023 train loss:3.560835 +step:3024 train loss:3.582343 +step:3025 train loss:3.614769 +step:3026 train loss:3.616732 +step:3027 train loss:3.550331 +step:3028 train loss:3.626913 +step:3029 train loss:3.564336 +step:3030 train loss:3.629567 +step:3031 train loss:3.558028 +step:3032 train loss:3.574271 +step:3033 train loss:3.671963 +step:3034 train loss:3.534469 +step:3035 train loss:3.636531 +step:3036 train loss:3.580249 +step:3037 train loss:3.543967 +step:3038 train loss:3.602539 +step:3039 train loss:3.538265 +step:3040 train loss:3.598456 +step:3041 train loss:3.607010 +step:3042 train loss:3.562636 +step:3043 train loss:3.585320 +step:3044 train loss:3.511576 +step:3045 train loss:3.604353 +step:3046 train loss:3.680089 +step:3047 train loss:3.652654 +step:3048 train loss:3.609156 +step:3049 train loss:3.627860 +step:3050 train loss:3.585052 +step:3051 train loss:3.590386 +step:3052 train loss:3.600185 +step:3053 train loss:3.597140 +step:3054 train loss:3.554079 +step:3055 train loss:3.522980 +step:3056 train loss:3.599796 +step:3057 train loss:3.648299 +step:3058 train loss:3.633796 +step:3059 train loss:3.627688 +step:3060 train loss:3.616270 +step:3061 train loss:3.587371 +step:3062 train loss:3.588972 +step:3063 train loss:3.533657 +step:3064 train loss:3.613173 +step:3065 train loss:3.534244 +step:3066 train loss:3.578050 +step:3067 train loss:3.563521 +step:3068 train loss:3.482643 +step:3069 train loss:3.625167 +step:3070 train loss:3.576275 +step:3071 train loss:3.619131 +step:3072 train loss:3.592841 +step:3073 train loss:3.831976 +step:3074 train loss:3.604136 +step:3075 train loss:3.540377 +step:3076 train loss:3.619521 +step:3077 train loss:3.525148 +step:3078 train loss:3.591222 +step:3079 train loss:3.640885 +step:3080 train loss:3.532518 +step:3081 train loss:3.651552 +step:3082 train loss:3.545571 +step:3083 train loss:3.600894 +step:3084 train loss:3.578354 +step:3085 train loss:3.571348 +step:3086 train loss:3.672035 +step:3087 train loss:3.579849 +step:3088 train loss:3.575150 +step:3089 train loss:3.634166 +step:3090 train loss:3.525489 +step:3091 train loss:3.601875 +step:3092 train loss:3.518623 +step:3093 train loss:3.591517 +step:3094 train loss:3.581597 +step:3095 train loss:3.564405 +step:3096 train loss:3.564395 +step:3097 train loss:3.503569 +step:3098 train loss:3.637166 +step:3099 train loss:3.614110 +step:3100 train loss:3.557193 +step:3101 train loss:3.541439 +step:3102 train loss:3.636347 +step:3103 train loss:3.609269 +step:3104 train loss:3.602229 +step:3105 train loss:3.574740 +step:3106 train loss:3.582323 +step:3107 train loss:3.554817 +step:3108 train loss:3.610179 +step:3109 train loss:3.561384 +step:3110 train loss:3.604332 +step:3111 train loss:3.638901 +step:3112 train loss:3.556524 +step:3113 train loss:3.613171 +step:3114 train loss:3.558169 +step:3115 train loss:3.558533 +step:3116 train loss:3.609456 +step:3117 train loss:3.604993 +step:3118 train loss:3.574834 +step:3119 train loss:3.504907 +step:3120 train loss:3.601797 +step:3121 train loss:3.576386 +step:3122 train loss:3.603553 +step:3123 train loss:3.567941 +step:3124 train loss:3.602629 +step:3125 train loss:3.564736 +step:3126 train loss:3.518312 +step:3127 train loss:3.575511 +step:3128 train loss:3.571652 +step:3129 train loss:3.553377 +step:3130 train loss:3.554020 +step:3131 train loss:3.566044 +step:3132 train loss:3.631622 +step:3133 train loss:3.550238 +step:3134 train loss:3.610543 +step:3135 train loss:3.563535 +step:3136 train loss:3.565386 +step:3137 train loss:3.607732 +step:3138 train loss:3.532584 +step:3139 train loss:3.635601 +step:3140 train loss:3.518052 +step:3141 train loss:3.638247 +step:3142 train loss:3.554908 +step:3143 train loss:3.573087 +step:3144 train loss:3.558997 +step:3145 train loss:3.568067 +step:3146 train loss:3.549034 +step:3147 train loss:3.522392 +step:3148 train loss:3.616327 +step:3149 train loss:3.547346 +step:3150 train loss:3.592515 +step:3151 train loss:3.597277 +step:3152 train loss:3.556089 +step:3153 train loss:3.568701 +step:3154 train loss:3.559229 +step:3155 train loss:3.564155 +step:3156 train loss:3.634431 +step:3157 train loss:3.641348 +step:3158 train loss:3.613356 +step:3159 train loss:3.590065 +step:3160 train loss:3.595564 +step:3161 train loss:3.674769 +step:3162 train loss:3.650514 +step:3163 train loss:3.659962 +step:3164 train loss:3.650858 +step:3165 train loss:3.591335 +step:3166 train loss:3.578650 +step:3167 train loss:3.565169 +step:3168 train loss:3.672569 +step:3169 train loss:3.572879 +step:3170 train loss:3.630552 +step:3171 train loss:3.649172 +step:3172 train loss:3.621658 +step:3173 train loss:3.645476 +step:3174 train loss:3.639259 +step:3175 train loss:3.597256 +step:3176 train loss:3.571105 +step:3177 train loss:3.531548 +step:3178 train loss:3.635108 +step:3179 train loss:3.588769 +step:3180 train loss:3.572632 +step:3181 train loss:3.616175 +step:3182 train loss:3.619926 +step:3183 train loss:3.620622 +step:3184 train loss:3.622252 +step:3185 train loss:3.563284 +step:3186 train loss:3.642669 +step:3187 train loss:3.546906 +step:3188 train loss:3.598890 +step:3189 train loss:3.745737 +step:3190 train loss:3.582770 +step:3191 train loss:3.580797 +step:3192 train loss:3.575920 +step:3193 train loss:3.600342 +step:3194 train loss:3.582274 +step:3195 train loss:3.679143 +step:3196 train loss:3.606927 +step:3197 train loss:3.545093 +step:3198 train loss:3.612845 +step:3199 train loss:3.599266 +step:3200 train loss:3.569456 +step:3201 train loss:3.623496 +step:3202 train loss:3.532911 +step:3203 train loss:3.644231 +step:3204 train loss:3.591135 +step:3205 train loss:3.614599 +step:3206 train loss:3.632682 +step:3207 train loss:3.723845 +step:3208 train loss:3.683793 +step:3209 train loss:3.584188 +step:3210 train loss:3.625491 +step:3211 train loss:3.593045 +step:3212 train loss:3.615272 +step:3213 train loss:3.641209 +step:3214 train loss:3.643244 +step:3215 train loss:3.573655 +step:3216 train loss:3.585327 +step:3217 train loss:3.614532 +step:3218 train loss:3.616777 +step:3219 train loss:3.574891 +step:3220 train loss:3.623372 +step:3221 train loss:3.616577 +step:3222 train loss:3.556478 +step:3223 train loss:3.660181 +step:3224 train loss:3.574343 +step:3225 train loss:3.605457 +step:3226 train loss:3.568465 +step:3227 train loss:3.627667 +step:3228 train loss:3.590883 +step:3229 train loss:3.559216 +step:3230 train loss:3.567865 +step:3231 train loss:3.592681 +step:3232 train loss:3.593019 +step:3233 train loss:3.563652 +step:3234 train loss:3.569727 +step:3235 train loss:3.683475 +step:3236 train loss:3.617720 +step:3237 train loss:3.588083 +step:3238 train loss:3.627095 +step:3239 train loss:3.584605 +step:3240 train loss:3.595664 +step:3241 train loss:3.546257 +step:3242 train loss:3.597486 +step:3243 train loss:3.627179 +step:3244 train loss:3.576769 +step:3245 train loss:3.571542 +step:3246 train loss:3.587571 +step:3247 train loss:3.650640 +step:3248 train loss:3.585570 +step:3249 train loss:3.574607 +step:3250 validation loss:3.538795 +step:3250 train loss:3.615924 +step:3251 train loss:3.615529 +step:3252 train loss:3.575769 +step:3253 train loss:3.559345 +step:3254 train loss:3.583204 +step:3255 train loss:3.596334 +step:3256 train loss:3.563091 +step:3257 train loss:3.672533 +step:3258 train loss:3.679253 +step:3259 train loss:3.615463 +step:3260 train loss:3.588681 +step:3261 train loss:3.582308 +step:3262 train loss:3.553870 +step:3263 train loss:3.593950 +step:3264 train loss:3.628078 +step:3265 train loss:3.571992 +step:3266 train loss:3.533043 +step:3267 train loss:3.584936 +step:3268 train loss:3.572826 +step:3269 train loss:3.611145 +step:3270 train loss:3.638324 +step:3271 train loss:3.599084 +step:3272 train loss:3.580471 +step:3273 train loss:3.624873 +step:3274 train loss:3.727642 +step:3275 train loss:3.613090 +step:3276 train loss:3.564002 +step:3277 train loss:3.604179 +step:3278 train loss:3.603818 +step:3279 train loss:3.601061 +step:3280 train loss:3.582253 +step:3281 train loss:3.605298 +step:3282 train loss:3.587929 +step:3283 train loss:3.516123 +step:3284 train loss:3.546759 +step:3285 train loss:3.582960 +step:3286 train loss:3.614504 +step:3287 train loss:3.597857 +step:3288 train loss:3.605338 +step:3289 train loss:3.604846 +step:3290 train loss:3.634472 +step:3291 train loss:3.551574 +step:3292 train loss:3.622621 +step:3293 train loss:3.575856 +step:3294 train loss:3.636208 +step:3295 train loss:3.560007 +step:3296 train loss:3.657539 +step:3297 train loss:3.599832 +step:3298 train loss:3.622303 +step:3299 train loss:3.549973 +step:3300 train loss:3.581596 +step:3301 train loss:3.594997 +step:3302 train loss:3.684217 +step:3303 train loss:3.606945 +step:3304 train loss:3.588063 +step:3305 train loss:3.572765 +step:3306 train loss:3.572288 +step:3307 train loss:3.577454 +step:3308 train loss:3.672213 +step:3309 train loss:3.606933 +step:3310 train loss:3.583575 +step:3311 train loss:3.639100 +step:3312 train loss:3.667935 +step:3313 train loss:3.557927 +step:3314 train loss:3.629320 +step:3315 train loss:3.572913 +step:3316 train loss:3.629428 +step:3317 train loss:3.627440 +step:3318 train loss:3.621724 +step:3319 train loss:3.621747 +step:3320 train loss:3.576676 +step:3321 train loss:3.556649 +step:3322 train loss:3.593334 +step:3323 train loss:3.551377 +step:3324 train loss:3.565553 +step:3325 train loss:3.665349 +step:3326 train loss:3.526894 +step:3327 train loss:3.594646 +step:3328 train loss:3.590626 +step:3329 train loss:3.546133 +step:3330 train loss:3.524406 +step:3331 train loss:3.664043 +step:3332 train loss:3.631304 +step:3333 train loss:3.587745 +step:3334 train loss:3.603618 +step:3335 train loss:3.657160 +step:3336 train loss:3.619232 +step:3337 train loss:3.717621 +step:3338 train loss:3.567204 +step:3339 train loss:3.654000 +step:3340 train loss:3.656941 +step:3341 train loss:3.541319 +step:3342 train loss:3.577890 +step:3343 train loss:3.587549 +step:3344 train loss:3.511258 +step:3345 train loss:3.621633 +step:3346 train loss:3.623367 +step:3347 train loss:3.604366 +step:3348 train loss:3.588284 +step:3349 train loss:3.566170 +step:3350 train loss:3.622758 +step:3351 train loss:3.594265 +step:3352 train loss:3.609430 +step:3353 train loss:3.561337 +step:3354 train loss:3.612033 +step:3355 train loss:3.553837 +step:3356 train loss:3.574470 +step:3357 train loss:3.647910 +step:3358 train loss:3.562344 +step:3359 train loss:3.527063 +step:3360 train loss:3.617756 +step:3361 train loss:3.564826 +step:3362 train loss:3.621621 +step:3363 train loss:3.584102 +step:3364 train loss:3.572264 +step:3365 train loss:3.591070 +step:3366 train loss:3.586601 +step:3367 train loss:3.553466 +step:3368 train loss:3.556951 +step:3369 train loss:3.518096 +step:3370 train loss:3.590995 +step:3371 train loss:3.585278 +step:3372 train loss:3.589395 +step:3373 train loss:3.629225 +step:3374 train loss:3.591215 +step:3375 train loss:3.606078 +step:3376 train loss:3.537640 +step:3377 train loss:3.550770 +step:3378 train loss:3.524410 +step:3379 train loss:3.569736 +step:3380 train loss:3.606195 +step:3381 train loss:3.600315 +step:3382 train loss:3.527063 +step:3383 train loss:3.619087 +step:3384 train loss:3.576778 +step:3385 train loss:3.560802 +step:3386 train loss:3.607834 +step:3387 train loss:3.583735 +step:3388 train loss:3.611590 +step:3389 train loss:3.520710 +step:3390 train loss:3.580000 +step:3391 train loss:3.622246 +step:3392 train loss:3.568610 +step:3393 train loss:3.530233 +step:3394 train loss:3.578364 +step:3395 train loss:3.582540 +step:3396 train loss:3.609135 +step:3397 train loss:3.719714 +step:3398 train loss:3.512788 +step:3399 train loss:3.569047 +step:3400 train loss:3.542827 +step:3401 train loss:3.588818 +step:3402 train loss:3.579570 +step:3403 train loss:3.661104 +step:3404 train loss:3.569921 +step:3405 train loss:3.614614 +step:3406 train loss:3.560274 +step:3407 train loss:3.606478 +step:3408 train loss:3.625950 +step:3409 train loss:3.642337 +step:3410 train loss:3.666170 +step:3411 train loss:3.603206 +step:3412 train loss:3.574375 +step:3413 train loss:3.531776 +step:3414 train loss:3.553216 +step:3415 train loss:3.579797 +step:3416 train loss:3.655598 +step:3417 train loss:3.592566 +step:3418 train loss:3.581583 +step:3419 train loss:3.563770 +step:3420 train loss:3.654842 +step:3421 train loss:3.635346 +step:3422 train loss:3.598987 +step:3423 train loss:3.557343 +step:3424 train loss:3.621098 +step:3425 train loss:3.604123 +step:3426 train loss:3.637041 +step:3427 train loss:3.607364 +step:3428 train loss:3.547330 +step:3429 train loss:3.609782 +step:3430 train loss:3.632199 +step:3431 train loss:3.564352 +step:3432 train loss:3.580146 +step:3433 train loss:3.609861 +step:3434 train loss:3.567176 +step:3435 train loss:3.523590 +step:3436 train loss:3.577173 +step:3437 train loss:3.599948 +step:3438 train loss:3.585697 +step:3439 train loss:3.545946 +step:3440 train loss:3.593273 +step:3441 train loss:3.615453 +step:3442 train loss:3.534045 +step:3443 train loss:3.520715 +step:3444 train loss:3.535215 +step:3445 train loss:3.546849 +step:3446 train loss:3.573195 +step:3447 train loss:3.546872 +step:3448 train loss:3.564741 +step:3449 train loss:3.590289 +step:3450 train loss:3.611732 +step:3451 train loss:3.578176 +step:3452 train loss:3.519303 +step:3453 train loss:3.558402 +step:3454 train loss:3.602358 +step:3455 train loss:3.566029 +step:3456 train loss:3.556073 +step:3457 train loss:3.557123 +step:3458 train loss:3.579307 +step:3459 train loss:3.813020 +step:3460 train loss:3.579326 +step:3461 train loss:3.587587 +step:3462 train loss:3.568110 +step:3463 train loss:3.641885 +step:3464 train loss:3.602557 +step:3465 train loss:3.594408 +step:3466 train loss:3.523497 +step:3467 train loss:3.568826 +step:3468 train loss:3.548826 +step:3469 train loss:3.586241 +step:3470 train loss:3.532525 +step:3471 train loss:3.595556 +step:3472 train loss:3.590297 +step:3473 train loss:3.625551 +step:3474 train loss:3.574344 +step:3475 train loss:3.543962 +step:3476 train loss:3.631030 +step:3477 train loss:3.569391 +step:3478 train loss:3.638588 +step:3479 train loss:3.528088 +step:3480 train loss:3.673071 +step:3481 train loss:3.582835 +step:3482 train loss:3.603059 +step:3483 train loss:3.564825 +step:3484 train loss:3.577857 +step:3485 train loss:3.599618 +step:3486 train loss:3.564403 +step:3487 train loss:3.658133 +step:3488 train loss:3.523391 +step:3489 train loss:3.557489 +step:3490 train loss:3.572997 +step:3491 train loss:3.530210 +step:3492 train loss:3.571694 +step:3493 train loss:3.723542 +step:3494 train loss:3.566236 +step:3495 train loss:3.566796 +step:3496 train loss:3.508825 +step:3497 train loss:3.528243 +step:3498 train loss:3.593715 +step:3499 train loss:3.564897 +step:3500 validation loss:3.521209 total_sharp:5.4580e-03 L1_sharp:5.0831e-03 L2_sharp:4.8967e-03 L3_sharp:3.0891e-03 L4_sharp:1.2783e-03 L5_sharp:1.1738e-03 L6_sharp:1.4158e-03 L7_sharp:1.7084e-03 L8_sharp:1.2445e-03 L9_sharp:8.1932e-04 L10_sharp:4.4859e-04 L11_sharp:4.8573e-04 L12_sharp:3.7857e-04 total_fnorm:2.3758e+00 total_l1_linf:2.0293e+04 total_spectral:2.3758e+00 L1_fnorm:5.7518e-01 L2_fnorm:4.7603e-01 L3_fnorm:4.1633e-01 L4_fnorm:5.5008e-01 L5_fnorm:5.8512e-01 L6_fnorm:5.9149e-01 L7_fnorm:5.9670e-01 L8_fnorm:5.9914e-01 L9_fnorm:5.9891e-01 L10_fnorm:5.9688e-01 L11_fnorm:5.9093e-01 L12_fnorm:6.0159e-01 L1_l1linf:4.4025e-01 L2_l1linf:4.3691e-01 L3_l1linf:5.4702e-01 L4_l1linf:4.0238e-01 L5_l1linf:4.0759e-01 L6_l1linf:4.1221e-01 L7_l1linf:4.0763e-01 L8_l1linf:4.1114e-01 L9_l1linf:4.1159e-01 L10_l1linf:4.1555e-01 L11_l1linf:4.0702e-01 L12_l1linf:3.9542e-01 L1_spectral:1.2051e-02 L2_spectral:1.2237e-02 L3_spectral:2.1186e-02 L4_spectral:1.2047e-02 L5_spectral:1.2056e-02 L6_spectral:1.2052e-02 L7_spectral:1.2054e-02 L8_spectral:1.2056e-02 L9_spectral:1.2060e-02 L10_spectral:1.2045e-02 L11_spectral:1.2046e-02 L12_spectral:1.2047e-02 v_norm:2.3758e+00 cos_v_-g_hvp:3.1244e-02 g_hvp_norm:6.6285e-01 cos_v_-g_t:3.9031e-02 g_t_norm:5.5886e-01 hv_norm:7.0819e-01 cos_v_hv:1.8310e-02 hg_norm:4.7970e+01 cos_g_hg:4.3068e-01 v_par:5.2873e-03 v_perp:2.3758e+00 L1_cos_v_neg_g:2.1939e-02 L1_v_norm:5.7518e-01 L2_cos_v_neg_g:2.8880e-02 L2_v_norm:4.7603e-01 L3_cos_v_neg_g:3.0718e-02 L3_v_norm:4.1633e-01 L4_cos_v_neg_g:3.0902e-02 L4_v_norm:5.5008e-01 L5_cos_v_neg_g:3.3337e-02 L5_v_norm:5.8512e-01 L6_cos_v_neg_g:3.3850e-02 L6_v_norm:5.9149e-01 L7_cos_v_neg_g:3.3147e-02 L7_v_norm:5.9670e-01 L8_cos_v_neg_g:3.4327e-02 L8_v_norm:5.9914e-01 L9_cos_v_neg_g:3.4303e-02 L9_v_norm:5.9891e-01 L10_cos_v_neg_g:3.7042e-02 L10_v_norm:5.9688e-01 L11_cos_v_neg_g:4.6644e-02 L11_v_norm:5.9093e-01 L12_cos_v_neg_g:7.4501e-02 L12_v_norm:6.0159e-01 +step:3500 train loss:3.559973 +step:3501 train loss:3.574153 +step:3502 train loss:3.533967 +step:3503 train loss:3.555995 +step:3504 train loss:3.570786 +step:3505 train loss:3.554377 +step:3506 train loss:3.585466 +step:3507 train loss:3.551180 +step:3508 train loss:3.552133 +step:3509 train loss:3.603854 +step:3510 train loss:3.525984 +step:3511 train loss:3.609479 +step:3512 train loss:3.644238 +step:3513 train loss:3.619505 +step:3514 train loss:3.682944 +step:3515 train loss:3.534984 +step:3516 train loss:3.571086 +step:3517 train loss:3.555385 +step:3518 train loss:3.578961 +step:3519 train loss:3.566417 +step:3520 train loss:3.539122 +step:3521 train loss:3.597148 +step:3522 train loss:3.561213 +step:3523 train loss:3.526839 +step:3524 train loss:3.558118 +step:3525 train loss:3.526577 +step:3526 train loss:3.554697 +step:3527 train loss:3.605640 +step:3528 train loss:3.581118 +step:3529 train loss:3.535799 +step:3530 train loss:3.502039 +step:3531 train loss:3.596910 +step:3532 train loss:3.543778 +step:3533 train loss:3.532610 +step:3534 train loss:3.560861 +step:3535 train loss:3.574764 +step:3536 train loss:3.565720 +step:3537 train loss:3.593501 +step:3538 train loss:3.542928 +step:3539 train loss:3.567113 +step:3540 train loss:3.562184 +step:3541 train loss:3.586855 +step:3542 train loss:3.569414 +step:3543 train loss:3.586830 +step:3544 train loss:3.500898 +step:3545 train loss:3.555649 +step:3546 train loss:3.502845 +step:3547 train loss:3.512123 +step:3548 train loss:3.547703 +step:3549 train loss:3.549146 +step:3550 train loss:3.527056 +step:3551 train loss:3.602545 +step:3552 train loss:3.588432 +step:3553 train loss:3.570139 +step:3554 train loss:3.645652 +step:3555 train loss:3.540087 +step:3556 train loss:3.527755 +step:3557 train loss:3.559001 +step:3558 train loss:3.545008 +step:3559 train loss:3.595481 +step:3560 train loss:3.655485 +step:3561 train loss:3.571568 +step:3562 train loss:3.573416 +step:3563 train loss:3.668313 +step:3564 train loss:3.507002 +step:3565 train loss:3.532796 +step:3566 train loss:3.568964 +step:3567 train loss:3.620458 +step:3568 train loss:3.556183 +step:3569 train loss:3.552448 +step:3570 train loss:3.571183 +step:3571 train loss:3.549511 +step:3572 train loss:3.622558 +step:3573 train loss:3.567489 +step:3574 train loss:3.575666 +step:3575 train loss:3.526269 +step:3576 train loss:3.543813 +step:3577 train loss:3.553868 +step:3578 train loss:3.576705 +step:3579 train loss:3.489093 +step:3580 train loss:3.542765 +step:3581 train loss:3.531692 +step:3582 train loss:3.496704 +step:3583 train loss:3.552530 +step:3584 train loss:3.538538 +step:3585 train loss:3.570269 +step:3586 train loss:3.531898 +step:3587 train loss:3.541945 +step:3588 train loss:3.528506 +step:3589 train loss:3.551528 +step:3590 train loss:3.587517 +step:3591 train loss:3.570138 +step:3592 train loss:3.613872 +step:3593 train loss:3.576147 +step:3594 train loss:3.519848 +step:3595 train loss:3.626543 +step:3596 train loss:3.578645 +step:3597 train loss:3.506197 +step:3598 train loss:3.597718 +step:3599 train loss:3.547040 +step:3600 train loss:3.535536 +step:3601 train loss:3.548922 +step:3602 train loss:3.534374 +step:3603 train loss:3.493714 +step:3604 train loss:3.625148 +step:3605 train loss:3.538224 +step:3606 train loss:3.611321 +step:3607 train loss:3.629089 +step:3608 train loss:3.570673 +step:3609 train loss:3.709743 +step:3610 train loss:3.595732 +step:3611 train loss:3.562504 +step:3612 train loss:3.544733 +step:3613 train loss:3.512287 +step:3614 train loss:3.532881 +step:3615 train loss:3.557387 +step:3616 train loss:3.550724 +step:3617 train loss:3.498866 +step:3618 train loss:3.548792 +step:3619 train loss:3.538362 +step:3620 train loss:3.564630 +step:3621 train loss:3.650323 +step:3622 train loss:3.513038 +step:3623 train loss:3.468319 +step:3624 train loss:3.600377 +step:3625 train loss:3.564389 +step:3626 train loss:3.473587 +step:3627 train loss:3.571180 +step:3628 train loss:3.569983 +step:3629 train loss:3.544676 +step:3630 train loss:3.538139 +step:3631 train loss:3.541576 +step:3632 train loss:3.524133 +step:3633 train loss:3.566240 +step:3634 train loss:3.537142 +step:3635 train loss:3.592609 +step:3636 train loss:3.591602 +step:3637 train loss:3.673484 +step:3638 train loss:3.501558 +step:3639 train loss:3.571864 +step:3640 train loss:3.564225 +step:3641 train loss:3.552738 +step:3642 train loss:3.547396 +step:3643 train loss:3.547017 +step:3644 train loss:3.556330 +step:3645 train loss:3.559387 +step:3646 train loss:3.552268 +step:3647 train loss:3.538054 +step:3648 train loss:3.591158 +step:3649 train loss:3.610816 +step:3650 train loss:3.523517 +step:3651 train loss:3.596230 +step:3652 train loss:3.536534 +step:3653 train loss:3.543494 +step:3654 train loss:3.532013 +step:3655 train loss:3.504606 +step:3656 train loss:3.553724 +step:3657 train loss:3.525239 +step:3658 train loss:3.621279 +step:3659 train loss:3.554112 +step:3660 train loss:3.509511 +step:3661 train loss:3.534374 +step:3662 train loss:3.565299 +step:3663 train loss:3.586216 +step:3664 train loss:3.552820 +step:3665 train loss:3.538416 +step:3666 train loss:3.533530 +step:3667 train loss:3.543195 +step:3668 train loss:3.537935 +step:3669 train loss:3.595254 +step:3670 train loss:3.565692 +step:3671 train loss:3.579892 +step:3672 train loss:3.584350 +step:3673 train loss:3.530303 +step:3674 train loss:3.541518 +step:3675 train loss:3.563761 +step:3676 train loss:3.512448 +step:3677 train loss:3.503134 +step:3678 train loss:3.606565 +step:3679 train loss:3.570410 +step:3680 train loss:3.567297 +step:3681 train loss:3.606516 +step:3682 train loss:3.502099 +step:3683 train loss:3.510498 +step:3684 train loss:3.552411 +step:3685 train loss:3.575404 +step:3686 train loss:3.553765 +step:3687 train loss:3.945961 +step:3688 train loss:3.594166 +step:3689 train loss:3.542001 +step:3690 train loss:3.539960 +step:3691 train loss:3.579706 +step:3692 train loss:3.572615 +step:3693 train loss:3.531563 +step:3694 train loss:3.555110 +step:3695 train loss:3.498526 +step:3696 train loss:3.500348 +step:3697 train loss:3.558093 +step:3698 train loss:3.530927 +step:3699 train loss:3.579626 +step:3700 train loss:3.611610 +step:3701 train loss:3.532351 +step:3702 train loss:3.528835 +step:3703 train loss:3.551000 +step:3704 train loss:3.549093 +step:3705 train loss:3.497467 +step:3706 train loss:3.521548 +step:3707 train loss:3.547244 +step:3708 train loss:3.537271 +step:3709 train loss:3.473972 +step:3710 train loss:3.533079 +step:3711 train loss:3.524081 +step:3712 train loss:3.534194 +step:3713 train loss:3.525074 +step:3714 train loss:3.492293 +step:3715 train loss:3.510553 +step:3716 train loss:3.562958 +step:3717 train loss:3.541282 +step:3718 train loss:3.585607 +step:3719 train loss:3.489911 +step:3720 train loss:3.611557 +step:3721 train loss:3.625809 +step:3722 train loss:3.523572 +step:3723 train loss:3.508221 +step:3724 train loss:3.685060 +step:3725 train loss:3.571437 +step:3726 train loss:3.591638 +step:3727 train loss:3.549616 +step:3728 train loss:3.557616 +step:3729 train loss:3.666522 +step:3730 train loss:3.863863 +step:3731 train loss:3.550662 +step:3732 train loss:3.559422 +step:3733 train loss:3.649273 +step:3734 train loss:3.602468 +step:3735 train loss:3.595063 +step:3736 train loss:3.592591 +step:3737 train loss:3.564339 +step:3738 train loss:3.572704 +step:3739 train loss:3.544698 +step:3740 train loss:3.560297 +step:3741 train loss:3.636897 +step:3742 train loss:3.553866 +step:3743 train loss:3.590701 +step:3744 train loss:3.489597 +step:3745 train loss:3.542423 +step:3746 train loss:3.574162 +step:3747 train loss:3.589371 +step:3748 train loss:3.650823 +step:3749 train loss:3.547601 +step:3750 validation loss:3.521504 +step:3750 train loss:3.536656 +step:3751 train loss:3.592860 +step:3752 train loss:3.576802 +step:3753 train loss:3.515140 +step:3754 train loss:3.565802 +step:3755 train loss:3.531209 +step:3756 train loss:3.545924 +step:3757 train loss:3.508277 +step:3758 train loss:3.490613 +step:3759 train loss:3.593115 +step:3760 train loss:3.606241 +step:3761 train loss:3.524533 +step:3762 train loss:3.582628 +step:3763 train loss:3.520827 +step:3764 train loss:3.542557 +step:3765 train loss:3.575335 +step:3766 train loss:3.535117 +step:3767 train loss:3.551123 +step:3768 train loss:3.560170 +step:3769 train loss:3.604189 +step:3770 train loss:3.589859 +step:3771 train loss:3.478111 +step:3772 train loss:3.598480 +step:3773 train loss:3.535700 +step:3774 train loss:3.601496 +step:3775 train loss:3.598409 +step:3776 train loss:3.564504 +step:3777 train loss:3.653684 +step:3778 train loss:3.555652 +step:3779 train loss:3.604871 +step:3780 train loss:3.569969 +step:3781 train loss:3.487148 +step:3782 train loss:3.600468 +step:3783 train loss:3.563030 +step:3784 train loss:3.515570 +step:3785 train loss:3.602371 +step:3786 train loss:3.560070 +step:3787 train loss:3.578770 +step:3788 train loss:3.557156 +step:3789 train loss:3.578117 +step:3790 train loss:3.564432 +step:3791 train loss:3.512189 +step:3792 train loss:3.606477 +step:3793 train loss:3.525869 +step:3794 train loss:3.551618 +step:3795 train loss:3.550719 +step:3796 train loss:3.572215 +step:3797 train loss:3.566393 +step:3798 train loss:3.522787 +step:3799 train loss:3.541183 +step:3800 train loss:3.570256 +step:3801 train loss:3.558914 +step:3802 train loss:3.558816 +step:3803 train loss:3.508294 +step:3804 train loss:3.586665 +step:3805 train loss:3.477770 +step:3806 train loss:3.564687 +step:3807 train loss:3.552011 +step:3808 train loss:3.527156 +step:3809 train loss:3.594581 +step:3810 train loss:3.600112 +step:3811 train loss:3.555345 +step:3812 train loss:3.565079 +step:3813 train loss:3.564820 +step:3814 train loss:3.529976 +step:3815 train loss:3.561266 +step:3816 train loss:3.537511 +step:3817 train loss:3.543096 +step:3818 train loss:3.503930 +step:3819 train loss:3.477789 +step:3820 train loss:3.553741 +step:3821 train loss:3.639747 +step:3822 train loss:3.596482 +step:3823 train loss:3.520780 +step:3824 train loss:3.563201 +step:3825 train loss:3.563536 +step:3826 train loss:3.523249 +step:3827 train loss:3.521066 +step:3828 train loss:3.543703 +step:3829 train loss:3.517134 +step:3830 train loss:3.588298 +step:3831 train loss:3.520980 +step:3832 train loss:3.520671 +step:3833 train loss:3.532891 +step:3834 train loss:3.509588 +step:3835 train loss:3.471710 +step:3836 train loss:3.577046 +step:3837 train loss:3.513499 +step:3838 train loss:3.511904 +step:3839 train loss:3.529654 +step:3840 train loss:3.560782 +step:3841 train loss:3.511141 +step:3842 train loss:3.590381 +step:3843 train loss:3.575548 +step:3844 train loss:3.519835 +step:3845 train loss:3.490667 +step:3846 train loss:3.497144 +step:3847 train loss:3.590482 +step:3848 train loss:3.611281 +step:3849 train loss:3.520884 +step:3850 train loss:3.527821 +step:3851 train loss:3.511758 +step:3852 train loss:3.567766 +step:3853 train loss:3.536962 +step:3854 train loss:3.538187 +step:3855 train loss:3.515304 +step:3856 train loss:3.561588 +step:3857 train loss:3.598559 +step:3858 train loss:3.506359 +step:3859 train loss:3.572644 +step:3860 train loss:3.534077 +step:3861 train loss:3.548922 +step:3862 train loss:3.513247 +step:3863 train loss:3.577455 +step:3864 train loss:3.529854 +step:3865 train loss:3.528711 +step:3866 train loss:3.530770 +step:3867 train loss:3.557398 +step:3868 train loss:3.636420 +step:3869 train loss:3.529783 +step:3870 train loss:3.562365 +step:3871 train loss:3.506850 +step:3872 train loss:3.590189 +step:3873 train loss:3.515952 +step:3874 train loss:3.498708 +step:3875 train loss:3.582688 +step:3876 train loss:3.515325 +step:3877 train loss:3.537716 +step:3878 train loss:3.545356 +step:3879 train loss:3.555736 +step:3880 train loss:3.536369 +step:3881 train loss:3.568573 +step:3882 train loss:3.518654 +step:3883 train loss:3.528196 +step:3884 train loss:3.511788 +step:3885 train loss:3.617997 +step:3886 train loss:3.613017 +step:3887 train loss:3.534299 +step:3888 train loss:3.480943 +step:3889 train loss:3.537606 +step:3890 train loss:3.463912 +step:3891 train loss:3.517624 +step:3892 train loss:3.558040 +step:3893 train loss:3.526849 +step:3894 train loss:3.540038 +step:3895 train loss:3.522413 +step:3896 train loss:3.507779 +step:3897 train loss:3.559664 +step:3898 train loss:3.617606 +step:3899 train loss:3.565866 +step:3900 train loss:3.554540 +step:3901 train loss:3.579932 +step:3902 train loss:3.510295 +step:3903 train loss:3.519530 +step:3904 train loss:3.522120 +step:3905 train loss:3.558435 +step:3906 train loss:3.526136 +step:3907 train loss:3.555804 +step:3908 train loss:3.520510 +step:3909 train loss:3.587060 +step:3910 train loss:3.580272 +step:3911 train loss:3.573961 +step:3912 train loss:3.606459 +step:3913 train loss:3.642072 +step:3914 train loss:3.484009 +step:3915 train loss:3.578728 +step:3916 train loss:3.544752 +step:3917 train loss:3.528726 +step:3918 train loss:3.552001 +step:3919 train loss:3.541971 +step:3920 train loss:3.636960 +step:3921 train loss:3.549669 +step:3922 train loss:3.602089 +step:3923 train loss:3.494300 +step:3924 train loss:3.616059 +step:3925 train loss:3.567390 +step:3926 train loss:3.564754 +step:3927 train loss:3.541242 +step:3928 train loss:3.491393 +step:3929 train loss:3.600245 +step:3930 train loss:3.647032 +step:3931 train loss:3.568725 +step:3932 train loss:3.607034 +step:3933 train loss:3.582813 +step:3934 train loss:3.607240 +step:3935 train loss:3.539484 +step:3936 train loss:3.477784 +step:3937 train loss:3.458191 +step:3938 train loss:3.586829 +step:3939 train loss:3.563712 +step:3940 train loss:3.549098 +step:3941 train loss:3.522341 +step:3942 train loss:3.586152 +step:3943 train loss:3.601398 +step:3944 train loss:3.557060 +step:3945 train loss:3.573898 +step:3946 train loss:3.539730 +step:3947 train loss:3.561650 +step:3948 train loss:3.549801 +step:3949 train loss:3.560992 +step:3950 train loss:3.564612 +step:3951 train loss:3.541625 +step:3952 train loss:3.657940 +step:3953 train loss:3.559948 +step:3954 train loss:3.591354 +step:3955 train loss:3.550246 +step:3956 train loss:3.587433 +step:3957 train loss:3.526938 +step:3958 train loss:3.566965 +step:3959 train loss:3.509566 +step:3960 train loss:3.579449 +step:3961 train loss:3.532743 +step:3962 train loss:3.542072 +step:3963 train loss:3.534110 +step:3964 train loss:3.518129 +step:3965 train loss:3.523606 +step:3966 train loss:3.586293 +step:3967 train loss:3.510285 +step:3968 train loss:3.557405 +step:3969 train loss:3.519959 +step:3970 train loss:3.555865 +step:3971 train loss:3.557989 +step:3972 train loss:3.585249 +step:3973 train loss:3.513572 +step:3974 train loss:3.577954 +step:3975 train loss:3.503465 +step:3976 train loss:3.580859 +step:3977 train loss:3.597107 +step:3978 train loss:3.549321 +step:3979 train loss:3.506088 +step:3980 train loss:3.551883 +step:3981 train loss:3.536237 +step:3982 train loss:3.536261 +step:3983 train loss:3.606217 +step:3984 train loss:3.531002 +step:3985 train loss:3.563030 +step:3986 train loss:3.561064 +step:3987 train loss:3.519600 +step:3988 train loss:3.542044 +step:3989 train loss:3.526320 +step:3990 train loss:3.549962 +step:3991 train loss:3.549597 +step:3992 train loss:3.562206 +step:3993 train loss:3.646663 +step:3994 train loss:3.484982 +step:3995 train loss:3.555407 +step:3996 train loss:3.604924 +step:3997 train loss:3.546542 +step:3998 train loss:3.641224 +step:3999 train loss:3.577208 +step:4000 validation loss:3.500810 total_sharp:6.2609e-03 L1_sharp:1.3169e-02 L2_sharp:6.2011e-03 L3_sharp:4.8004e-03 L4_sharp:1.7017e-03 L5_sharp:1.0141e-03 L6_sharp:1.1517e-03 L7_sharp:1.4563e-03 L8_sharp:1.1037e-03 L9_sharp:7.1419e-04 L10_sharp:4.4966e-04 L11_sharp:5.9255e-04 L12_sharp:6.7925e-04 total_fnorm:2.3704e+00 total_l1_linf:2.0238e+04 total_spectral:2.3704e+00 L1_fnorm:5.6817e-01 L2_fnorm:4.4249e-01 L3_fnorm:4.1228e-01 L4_fnorm:5.4015e-01 L5_fnorm:5.8518e-01 L6_fnorm:5.9109e-01 L7_fnorm:6.0057e-01 L8_fnorm:5.9993e-01 L9_fnorm:5.9895e-01 L10_fnorm:5.9813e-01 L11_fnorm:5.8782e-01 L12_fnorm:6.0046e-01 L1_l1linf:4.3617e-01 L2_l1linf:4.5595e-01 L3_l1linf:5.6036e-01 L4_l1linf:4.3640e-01 L5_l1linf:4.0730e-01 L6_l1linf:4.1052e-01 L7_l1linf:4.0508e-01 L8_l1linf:4.1259e-01 L9_l1linf:4.1280e-01 L10_l1linf:4.1344e-01 L11_l1linf:4.1599e-01 L12_l1linf:4.1480e-01 L1_spectral:1.2045e-02 L2_spectral:1.4509e-02 L3_spectral:2.1645e-02 L4_spectral:1.2040e-02 L5_spectral:1.2049e-02 L6_spectral:1.2050e-02 L7_spectral:1.2046e-02 L8_spectral:1.2046e-02 L9_spectral:1.2053e-02 L10_spectral:1.2046e-02 L11_spectral:1.2043e-02 L12_spectral:1.2049e-02 v_norm:2.3704e+00 cos_v_-g_hvp:2.6916e-02 g_hvp_norm:7.6042e-01 cos_v_-g_t:2.8442e-02 g_t_norm:7.8620e-01 hv_norm:1.0035e+00 cos_v_hv:1.4788e-02 hg_norm:7.4539e+01 cos_g_hg:6.3355e-01 v_par:6.3471e-03 v_perp:2.3704e+00 L1_cos_v_neg_g:1.9512e-02 L1_v_norm:5.6817e-01 L2_cos_v_neg_g:2.7521e-02 L2_v_norm:4.4249e-01 L3_cos_v_neg_g:2.9605e-02 L3_v_norm:4.1228e-01 L4_cos_v_neg_g:2.8334e-02 L4_v_norm:5.4015e-01 L5_cos_v_neg_g:3.0712e-02 L5_v_norm:5.8518e-01 L6_cos_v_neg_g:3.1684e-02 L6_v_norm:5.9109e-01 L7_cos_v_neg_g:3.2679e-02 L7_v_norm:6.0057e-01 L8_cos_v_neg_g:3.3335e-02 L8_v_norm:5.9993e-01 L9_cos_v_neg_g:3.3312e-02 L9_v_norm:5.9895e-01 L10_cos_v_neg_g:3.5776e-02 L10_v_norm:5.9813e-01 L11_cos_v_neg_g:4.5055e-02 L11_v_norm:5.8782e-01 L12_cos_v_neg_g:6.3559e-02 L12_v_norm:6.0046e-01 +step:4000 train loss:3.572426 +step:4001 train loss:3.700848 +step:4002 train loss:3.563863 +step:4003 train loss:3.580180 +step:4004 train loss:3.592199 +step:4005 train loss:3.556542 +step:4006 train loss:3.560386 +step:4007 train loss:3.576000 +step:4008 train loss:3.547380 +step:4009 train loss:3.595251 +step:4010 train loss:3.624587 +step:4011 train loss:3.580432 +step:4012 train loss:3.519217 +step:4013 train loss:3.535264 +step:4014 train loss:3.557156 +step:4015 train loss:3.538785 +step:4016 train loss:3.542105 +step:4017 train loss:3.559749 +step:4018 train loss:3.606501 +step:4019 train loss:3.524348 +step:4020 train loss:3.563990 +step:4021 train loss:3.523163 +step:4022 train loss:3.583267 +step:4023 train loss:3.496241 +step:4024 train loss:3.538573 +step:4025 train loss:3.546281 +step:4026 train loss:3.572288 +step:4027 train loss:3.519986 +step:4028 train loss:3.567381 +step:4029 train loss:3.593609 +step:4030 train loss:3.592843 +step:4031 train loss:3.668820 +step:4032 train loss:3.522168 +step:4033 train loss:3.614784 +step:4034 train loss:3.565813 +step:4035 train loss:3.595474 +step:4036 train loss:3.522083 +step:4037 train loss:3.542646 +step:4038 train loss:3.532913 +step:4039 train loss:3.583848 +step:4040 train loss:3.536136 +step:4041 train loss:3.523483 +step:4042 train loss:3.525863 +step:4043 train loss:3.538131 +step:4044 train loss:3.580125 +step:4045 train loss:3.578622 +step:4046 train loss:3.603080 +step:4047 train loss:3.553928 +step:4048 train loss:3.623988 +step:4049 train loss:3.601302 +step:4050 train loss:3.554200 +step:4051 train loss:3.581807 +step:4052 train loss:3.626279 +step:4053 train loss:3.554774 +step:4054 train loss:3.561416 +step:4055 train loss:3.553859 +step:4056 train loss:3.527297 +step:4057 train loss:3.579009 +step:4058 train loss:3.601826 +step:4059 train loss:3.558762 +step:4060 train loss:3.582370 +step:4061 train loss:3.567146 +step:4062 train loss:3.544271 +step:4063 train loss:3.576974 +step:4064 train loss:3.561022 +step:4065 train loss:3.599561 +step:4066 train loss:3.544016 +step:4067 train loss:3.750946 +step:4068 train loss:3.465013 +step:4069 train loss:3.553629 +step:4070 train loss:3.538309 +step:4071 train loss:3.556153 +step:4072 train loss:3.535075 +step:4073 train loss:3.579910 +step:4074 train loss:3.509050 +step:4075 train loss:3.579787 +step:4076 train loss:3.551045 +step:4077 train loss:3.556435 +step:4078 train loss:3.520932 +step:4079 train loss:3.565114 +step:4080 train loss:3.706739 +step:4081 train loss:3.665934 +step:4082 train loss:3.666663 +step:4083 train loss:3.551454 +step:4084 train loss:3.571804 +step:4085 train loss:3.549804 +step:4086 train loss:3.519259 +step:4087 train loss:3.495350 +step:4088 train loss:3.533732 +step:4089 train loss:3.545243 +step:4090 train loss:3.572389 +step:4091 train loss:3.489780 +step:4092 train loss:3.548460 +step:4093 train loss:3.516347 +step:4094 train loss:3.535270 +step:4095 train loss:3.609722 +step:4096 train loss:3.616616 +step:4097 train loss:3.548149 +step:4098 train loss:3.551842 +step:4099 train loss:3.578908 +step:4100 train loss:3.593870 +step:4101 train loss:3.596298 +step:4102 train loss:3.487516 +step:4103 train loss:3.533614 +step:4104 train loss:3.488661 +step:4105 train loss:3.576026 +step:4106 train loss:3.506541 +step:4107 train loss:3.554375 +step:4108 train loss:3.492138 +step:4109 train loss:3.631617 +step:4110 train loss:3.518728 +step:4111 train loss:3.534555 +step:4112 train loss:3.664263 +step:4113 train loss:3.460512 +step:4114 train loss:3.563820 +step:4115 train loss:3.502244 +step:4116 train loss:3.608862 +step:4117 train loss:3.549845 +step:4118 train loss:3.514295 +step:4119 train loss:3.590571 +step:4120 train loss:3.520525 +step:4121 train loss:3.498703 +step:4122 train loss:3.500971 +step:4123 train loss:3.541730 +step:4124 train loss:3.493070 +step:4125 train loss:3.503175 +step:4126 train loss:3.625852 +step:4127 train loss:3.496503 +step:4128 train loss:3.518939 +step:4129 train loss:3.515817 +step:4130 train loss:3.552335 +step:4131 train loss:3.548331 +step:4132 train loss:3.554402 +step:4133 train loss:3.526008 +step:4134 train loss:3.521227 +step:4135 train loss:3.585285 +step:4136 train loss:3.509246 +step:4137 train loss:3.505836 +step:4138 train loss:3.539062 +step:4139 train loss:3.480743 +step:4140 train loss:3.520148 +step:4141 train loss:3.561345 +step:4142 train loss:3.471617 +step:4143 train loss:3.582954 +step:4144 train loss:3.495569 +step:4145 train loss:3.531666 +step:4146 train loss:3.569914 +step:4147 train loss:3.521519 +step:4148 train loss:3.553765 +step:4149 train loss:3.509635 +step:4150 train loss:3.568459 +step:4151 train loss:3.534736 +step:4152 train loss:3.508341 +step:4153 train loss:3.514572 +step:4154 train loss:3.574332 +step:4155 train loss:3.680294 +step:4156 train loss:3.563861 +step:4157 train loss:3.515883 +step:4158 train loss:3.498556 +step:4159 train loss:3.513058 +step:4160 train loss:3.547708 +step:4161 train loss:3.569278 +step:4162 train loss:3.547111 +step:4163 train loss:3.530250 +step:4164 train loss:3.553797 +step:4165 train loss:3.509811 +step:4166 train loss:3.607849 +step:4167 train loss:3.577914 +step:4168 train loss:3.553846 +step:4169 train loss:3.516934 +step:4170 train loss:3.496587 +step:4171 train loss:3.492431 +step:4172 train loss:3.497809 +step:4173 train loss:3.533144 +step:4174 train loss:3.507340 +step:4175 train loss:3.497273 +step:4176 train loss:3.601805 +step:4177 train loss:3.494422 +step:4178 train loss:3.552348 +step:4179 train loss:3.527784 +step:4180 train loss:3.499194 +step:4181 train loss:3.542050 +step:4182 train loss:3.461036 +step:4183 train loss:3.482772 +step:4184 train loss:3.510085 +step:4185 train loss:3.546189 +step:4186 train loss:3.565961 +step:4187 train loss:3.509284 +step:4188 train loss:3.518971 +step:4189 train loss:3.596694 +step:4190 train loss:3.570974 +step:4191 train loss:3.488052 +step:4192 train loss:3.503852 +step:4193 train loss:3.503114 +step:4194 train loss:3.451393 +step:4195 train loss:3.554320 +step:4196 train loss:3.584368 +step:4197 train loss:3.463624 +step:4198 train loss:3.536721 +step:4199 train loss:3.448622 +step:4200 train loss:3.550923 +step:4201 train loss:3.520585 +step:4202 train loss:3.533174 +step:4203 train loss:3.547109 +step:4204 train loss:3.509733 +step:4205 train loss:3.550524 +step:4206 train loss:3.518847 +step:4207 train loss:3.535819 +step:4208 train loss:3.525618 +step:4209 train loss:3.520585 +step:4210 train loss:3.565436 +step:4211 train loss:3.613203 +step:4212 train loss:3.626523 +step:4213 train loss:3.482221 +step:4214 train loss:3.531009 +step:4215 train loss:3.494838 +step:4216 train loss:3.485043 +step:4217 train loss:3.470560 +step:4218 train loss:3.495321 +step:4219 train loss:3.469691 +step:4220 train loss:3.526282 +step:4221 train loss:3.528120 +step:4222 train loss:3.534562 +step:4223 train loss:3.501979 +step:4224 train loss:3.515628 +step:4225 train loss:3.490183 +step:4226 train loss:3.525025 +step:4227 train loss:3.549057 +step:4228 train loss:3.496528 +step:4229 train loss:3.496827 +step:4230 train loss:3.458138 +step:4231 train loss:3.506805 +step:4232 train loss:3.478536 +step:4233 train loss:3.537469 +step:4234 train loss:3.497254 +step:4235 train loss:3.527887 +step:4236 train loss:3.572004 +step:4237 train loss:3.534959 +step:4238 train loss:3.503685 +step:4239 train loss:3.573562 +step:4240 train loss:3.483272 +step:4241 train loss:3.577958 +step:4242 train loss:3.537731 +step:4243 train loss:3.501423 +step:4244 train loss:3.506824 +step:4245 train loss:3.518276 +step:4246 train loss:3.537138 +step:4247 train loss:3.546382 +step:4248 train loss:3.584740 +step:4249 train loss:3.508670 +step:4250 validation loss:3.497857 +step:4250 train loss:3.507886 +step:4251 train loss:3.510164 +step:4252 train loss:3.530034 +step:4253 train loss:3.529655 +step:4254 train loss:3.582767 +step:4255 train loss:3.536355 +step:4256 train loss:3.531350 +step:4257 train loss:3.516028 +step:4258 train loss:3.566959 +step:4259 train loss:3.562264 +step:4260 train loss:3.521300 +step:4261 train loss:3.538470 +step:4262 train loss:3.509114 +step:4263 train loss:3.521370 +step:4264 train loss:3.505698 +step:4265 train loss:3.494585 +step:4266 train loss:3.525612 +step:4267 train loss:3.463077 +step:4268 train loss:3.520462 +step:4269 train loss:3.460621 +step:4270 train loss:3.543808 +step:4271 train loss:3.575599 +step:4272 train loss:3.524946 +step:4273 train loss:3.511058 +step:4274 train loss:3.456841 +step:4275 train loss:3.553807 +step:4276 train loss:3.522006 +step:4277 train loss:3.588759 +step:4278 train loss:3.494159 +step:4279 train loss:3.551456 +step:4280 train loss:3.628647 +step:4281 train loss:3.651246 +step:4282 train loss:3.503933 +step:4283 train loss:3.524938 +step:4284 train loss:3.558425 +step:4285 train loss:3.563654 +step:4286 train loss:3.493025 +step:4287 train loss:3.531793 +step:4288 train loss:3.517872 +step:4289 train loss:3.614875 +step:4290 train loss:3.487231 +step:4291 train loss:3.505623 +step:4292 train loss:3.492552 +step:4293 train loss:3.513432 +step:4294 train loss:3.522539 +step:4295 train loss:3.519952 +step:4296 train loss:3.473767 +step:4297 train loss:3.526460 +step:4298 train loss:3.538233 +step:4299 train loss:3.508838 +step:4300 train loss:3.579417 +step:4301 train loss:3.611027 +step:4302 train loss:3.598721 +step:4303 train loss:3.537263 +step:4304 train loss:3.517990 +step:4305 train loss:3.594316 +step:4306 train loss:3.528461 +step:4307 train loss:3.560171 +step:4308 train loss:3.639086 +step:4309 train loss:3.563792 +step:4310 train loss:3.534190 +step:4311 train loss:3.561352 +step:4312 train loss:3.565277 +step:4313 train loss:3.564577 +step:4314 train loss:3.597603 +step:4315 train loss:3.652927 +step:4316 train loss:3.617644 +step:4317 train loss:3.577183 +step:4318 train loss:3.614798 +step:4319 train loss:3.609307 +step:4320 train loss:3.591516 +step:4321 train loss:3.691238 +step:4322 train loss:3.541764 +step:4323 train loss:3.613453 +step:4324 train loss:3.650200 +step:4325 train loss:3.580195 +step:4326 train loss:3.582654 +step:4327 train loss:3.531226 +step:4328 train loss:3.519627 +step:4329 train loss:3.529777 +step:4330 train loss:3.576433 +step:4331 train loss:3.523168 +step:4332 train loss:3.453788 +step:4333 train loss:3.539252 +step:4334 train loss:3.570744 +step:4335 train loss:3.525362 +step:4336 train loss:3.585915 +step:4337 train loss:3.537704 +step:4338 train loss:3.568432 +step:4339 train loss:3.517005 +step:4340 train loss:3.542110 +step:4341 train loss:3.539706 +step:4342 train loss:3.520155 +step:4343 train loss:3.572167 +step:4344 train loss:3.617490 +step:4345 train loss:3.686052 +step:4346 train loss:3.559866 +step:4347 train loss:3.533829 +step:4348 train loss:3.577414 +step:4349 train loss:3.547824 +step:4350 train loss:3.573170 +step:4351 train loss:3.536046 +step:4352 train loss:3.596015 +step:4353 train loss:3.599658 +step:4354 train loss:3.530006 +step:4355 train loss:3.524440 +step:4356 train loss:3.556417 +step:4357 train loss:3.504318 +step:4358 train loss:3.559655 +step:4359 train loss:3.591660 +step:4360 train loss:3.571968 +step:4361 train loss:3.539461 +step:4362 train loss:3.528954 +step:4363 train loss:3.547855 +step:4364 train loss:3.588780 +step:4365 train loss:3.588644 +step:4366 train loss:3.515635 +step:4367 train loss:3.535541 +step:4368 train loss:3.548661 +step:4369 train loss:3.474481 +step:4370 train loss:3.645823 +step:4371 train loss:3.708197 +step:4372 train loss:3.530788 +step:4373 train loss:3.551492 +step:4374 train loss:3.588049 +step:4375 train loss:3.566485 +step:4376 train loss:3.460387 +step:4377 train loss:3.536537 +step:4378 train loss:3.500609 +step:4379 train loss:3.546602 +step:4380 train loss:3.584763 +step:4381 train loss:3.544092 +step:4382 train loss:3.563208 +step:4383 train loss:3.547705 +step:4384 train loss:3.547615 +step:4385 train loss:3.560519 +step:4386 train loss:3.547650 +step:4387 train loss:3.515646 +step:4388 train loss:3.532865 +step:4389 train loss:3.573994 +step:4390 train loss:3.569803 +step:4391 train loss:3.536540 +step:4392 train loss:3.555821 +step:4393 train loss:3.661809 +step:4394 train loss:3.541572 +step:4395 train loss:3.569216 +step:4396 train loss:3.550924 +step:4397 train loss:3.523526 +step:4398 train loss:3.558952 +step:4399 train loss:3.630956 +step:4400 train loss:3.602616 +step:4401 train loss:3.517220 +step:4402 train loss:3.540360 +step:4403 train loss:3.557220 +step:4404 train loss:3.625785 +step:4405 train loss:3.583647 +step:4406 train loss:3.619351 +step:4407 train loss:3.547744 +step:4408 train loss:3.485396 +step:4409 train loss:3.583764 +step:4410 train loss:3.647062 +step:4411 train loss:3.549754 +step:4412 train loss:3.559175 +step:4413 train loss:3.515564 +step:4414 train loss:3.552817 +step:4415 train loss:3.555455 +step:4416 train loss:3.671652 +step:4417 train loss:3.539924 +step:4418 train loss:3.451069 +step:4419 train loss:3.635241 +step:4420 train loss:3.550889 +step:4421 train loss:3.572234 +step:4422 train loss:3.565108 +step:4423 train loss:3.522484 +step:4424 train loss:3.529325 +step:4425 train loss:3.536872 +step:4426 train loss:3.579942 +step:4427 train loss:3.560923 +step:4428 train loss:3.592432 +step:4429 train loss:3.676156 +step:4430 train loss:3.576530 +step:4431 train loss:3.526665 +step:4432 train loss:3.523375 +step:4433 train loss:3.620789 +step:4434 train loss:3.664819 +step:4435 train loss:3.537523 +step:4436 train loss:3.545492 +step:4437 train loss:3.505504 +step:4438 train loss:3.500723 +step:4439 train loss:3.537633 +step:4440 train loss:3.513201 +step:4441 train loss:3.552173 +step:4442 train loss:3.510268 +step:4443 train loss:3.529061 +step:4444 train loss:3.547787 +step:4445 train loss:3.550714 +step:4446 train loss:3.523157 +step:4447 train loss:3.803429 +step:4448 train loss:3.594814 +step:4449 train loss:3.562469 +step:4450 train loss:3.559823 +step:4451 train loss:3.587483 +step:4452 train loss:3.523893 +step:4453 train loss:3.525684 +step:4454 train loss:3.510808 +step:4455 train loss:3.539207 +step:4456 train loss:3.531368 +step:4457 train loss:3.497936 +step:4458 train loss:3.559555 +step:4459 train loss:3.566896 +step:4460 train loss:3.491626 +step:4461 train loss:3.544533 +step:4462 train loss:3.556527 +step:4463 train loss:3.564057 +step:4464 train loss:3.549816 +step:4465 train loss:3.543661 +step:4466 train loss:3.528767 +step:4467 train loss:3.473742 +step:4468 train loss:3.533893 +step:4469 train loss:3.591300 +step:4470 train loss:3.541669 +step:4471 train loss:3.550329 +step:4472 train loss:3.561678 +step:4473 train loss:3.556523 +step:4474 train loss:3.529175 +step:4475 train loss:3.540776 +step:4476 train loss:3.563221 +step:4477 train loss:3.508430 +step:4478 train loss:3.522804 +step:4479 train loss:3.525199 +step:4480 train loss:3.527419 +step:4481 train loss:3.567043 +step:4482 train loss:3.557798 +step:4483 train loss:3.595142 +step:4484 train loss:3.576683 +step:4485 train loss:3.599022 +step:4486 train loss:3.546599 +step:4487 train loss:3.572648 +step:4488 train loss:3.564207 +step:4489 train loss:3.542143 +step:4490 train loss:3.553314 +step:4491 train loss:3.545987 +step:4492 train loss:3.554692 +step:4493 train loss:3.606886 +step:4494 train loss:3.539398 +step:4495 train loss:3.571655 +step:4496 train loss:3.584335 +step:4497 train loss:3.569286 +step:4498 train loss:3.503871 +step:4499 train loss:3.558526 +step:4500 validation loss:3.486636 total_sharp:7.2278e-03 L1_sharp:6.2274e-03 L2_sharp:9.5793e-03 L3_sharp:7.9187e-03 L4_sharp:1.8979e-03 L5_sharp:1.2342e-03 L6_sharp:1.4745e-03 L7_sharp:1.4834e-03 L8_sharp:1.4554e-03 L9_sharp:8.7948e-04 L10_sharp:4.8583e-04 L11_sharp:6.0389e-04 L12_sharp:9.4290e-04 total_fnorm:2.3597e+00 total_l1_linf:2.0135e+04 total_spectral:2.3597e+00 L1_fnorm:5.5335e-01 L2_fnorm:4.3527e-01 L3_fnorm:4.1723e-01 L4_fnorm:5.2423e-01 L5_fnorm:5.8453e-01 L6_fnorm:5.9193e-01 L7_fnorm:5.9952e-01 L8_fnorm:6.0125e-01 L9_fnorm:5.9900e-01 L10_fnorm:5.9826e-01 L11_fnorm:5.8677e-01 L12_fnorm:5.9995e-01 L1_l1linf:4.3356e-01 L2_l1linf:4.0684e-01 L3_l1linf:5.4350e-01 L4_l1linf:4.1788e-01 L5_l1linf:4.0720e-01 L6_l1linf:4.1087e-01 L7_l1linf:4.0832e-01 L8_l1linf:4.1061e-01 L9_l1linf:4.1007e-01 L10_l1linf:4.1372e-01 L11_l1linf:4.1362e-01 L12_l1linf:4.0786e-01 L1_spectral:1.2046e-02 L2_spectral:1.2039e-02 L3_spectral:2.1305e-02 L4_spectral:1.2537e-02 L5_spectral:1.2049e-02 L6_spectral:1.2055e-02 L7_spectral:1.2053e-02 L8_spectral:1.2053e-02 L9_spectral:1.2050e-02 L10_spectral:1.2057e-02 L11_spectral:1.2049e-02 L12_spectral:1.2044e-02 v_norm:2.3597e+00 cos_v_-g_hvp:2.8142e-02 g_hvp_norm:7.7941e-01 cos_v_-g_t:3.2942e-02 g_t_norm:7.1238e-01 hv_norm:1.8802e+00 cos_v_hv:9.0713e-03 hg_norm:4.2844e+02 cos_g_hg:1.4762e-01 v_par:5.9644e-03 v_perp:2.3597e+00 L1_cos_v_neg_g:1.8537e-02 L1_v_norm:5.5335e-01 L2_cos_v_neg_g:3.4702e-02 L2_v_norm:4.3527e-01 L3_cos_v_neg_g:3.4802e-02 L3_v_norm:4.1723e-01 L4_cos_v_neg_g:3.1537e-02 L4_v_norm:5.2423e-01 L5_cos_v_neg_g:3.2806e-02 L5_v_norm:5.8453e-01 L6_cos_v_neg_g:3.2535e-02 L6_v_norm:5.9193e-01 L7_cos_v_neg_g:3.2298e-02 L7_v_norm:5.9952e-01 L8_cos_v_neg_g:3.1525e-02 L8_v_norm:6.0125e-01 L9_cos_v_neg_g:3.2129e-02 L9_v_norm:5.9900e-01 L10_cos_v_neg_g:3.6772e-02 L10_v_norm:5.9826e-01 L11_cos_v_neg_g:4.6830e-02 L11_v_norm:5.8677e-01 L12_cos_v_neg_g:8.0942e-02 L12_v_norm:5.9995e-01 +step:4500 train loss:3.598722 +step:4501 train loss:3.561990 +step:4502 train loss:3.499243 +step:4503 train loss:3.544070 +step:4504 train loss:3.569972 +step:4505 train loss:3.581534 +step:4506 train loss:3.558031 +step:4507 train loss:3.593351 +step:4508 train loss:3.522566 +step:4509 train loss:3.544879 +step:4510 train loss:3.558155 +step:4511 train loss:3.556804 +step:4512 train loss:3.618658 +step:4513 train loss:3.578494 +step:4514 train loss:3.512636 +step:4515 train loss:3.577605 +step:4516 train loss:3.526381 +step:4517 train loss:3.526806 +step:4518 train loss:3.524434 +step:4519 train loss:3.588057 +step:4520 train loss:3.555376 +step:4521 train loss:3.564863 +step:4522 train loss:3.567005 +step:4523 train loss:3.555534 +step:4524 train loss:3.533900 +step:4525 train loss:3.604533 +step:4526 train loss:3.622980 +step:4527 train loss:3.609070 +step:4528 train loss:3.581264 +step:4529 train loss:3.616123 +step:4530 train loss:3.528997 +step:4531 train loss:3.504151 +step:4532 train loss:3.545861 +step:4533 train loss:3.563961 +step:4534 train loss:3.548819 +step:4535 train loss:3.550316 +step:4536 train loss:3.555263 +step:4537 train loss:3.524999 +step:4538 train loss:3.514785 +step:4539 train loss:3.616232 +step:4540 train loss:3.540369 +step:4541 train loss:3.556108 +step:4542 train loss:3.605971 +step:4543 train loss:3.518086 +step:4544 train loss:3.546523 +step:4545 train loss:3.654088 +step:4546 train loss:3.575206 +step:4547 train loss:3.618354 +step:4548 train loss:3.534506 +step:4549 train loss:3.540224 +step:4550 train loss:3.614609 +step:4551 train loss:3.615010 +step:4552 train loss:3.545267 +step:4553 train loss:3.571028 +step:4554 train loss:3.508745 +step:4555 train loss:3.551586 +step:4556 train loss:3.549856 +step:4557 train loss:3.532128 +step:4558 train loss:3.593979 +step:4559 train loss:3.561439 +step:4560 train loss:3.585335 +step:4561 train loss:3.536039 +step:4562 train loss:3.549937 +step:4563 train loss:3.553349 +step:4564 train loss:3.612647 +step:4565 train loss:3.541548 +step:4566 train loss:3.620650 +step:4567 train loss:3.556107 +step:4568 train loss:3.570363 +step:4569 train loss:3.649141 +step:4570 train loss:3.552343 +step:4571 train loss:3.512446 +step:4572 train loss:3.544381 +step:4573 train loss:3.574881 +step:4574 train loss:3.584387 +step:4575 train loss:3.603025 +step:4576 train loss:3.542511 +step:4577 train loss:3.547751 +step:4578 train loss:3.558638 +step:4579 train loss:3.563354 +step:4580 train loss:3.604593 +step:4581 train loss:3.643320 +step:4582 train loss:3.582987 +step:4583 train loss:3.557515 +step:4584 train loss:3.527447 +step:4585 train loss:3.664821 +step:4586 train loss:3.529183 +step:4587 train loss:3.523496 +step:4588 train loss:3.506173 +step:4589 train loss:3.603036 +step:4590 train loss:3.527921 +step:4591 train loss:3.505941 +step:4592 train loss:3.495445 +step:4593 train loss:3.477923 +step:4594 train loss:3.512648 +step:4595 train loss:3.554802 +step:4596 train loss:3.545972 +step:4597 train loss:3.540804 +step:4598 train loss:3.488152 +step:4599 train loss:3.542730 +step:4600 train loss:3.583993 +step:4601 train loss:3.513619 +step:4602 train loss:3.582429 +step:4603 train loss:3.556471 +step:4604 train loss:3.564774 +step:4605 train loss:3.529733 +step:4606 train loss:3.583252 +step:4607 train loss:3.523641 +step:4608 train loss:3.514943 +step:4609 train loss:3.560559 +step:4610 train loss:3.609530 +step:4611 train loss:3.556372 +step:4612 train loss:3.537443 +step:4613 train loss:3.480358 +step:4614 train loss:3.553626 +step:4615 train loss:3.511826 +step:4616 train loss:3.562486 +step:4617 train loss:3.513638 +step:4618 train loss:3.571099 +step:4619 train loss:3.584316 +step:4620 train loss:3.514516 +step:4621 train loss:3.525645 +step:4622 train loss:3.508205 +step:4623 train loss:3.510606 +step:4624 train loss:3.540063 +step:4625 train loss:3.574451 +step:4626 train loss:3.540371 +step:4627 train loss:3.524605 +step:4628 train loss:3.575851 +step:4629 train loss:3.532050 +step:4630 train loss:3.526474 +step:4631 train loss:3.568487 +step:4632 train loss:3.491416 +step:4633 train loss:3.592997 +step:4634 train loss:3.489707 +step:4635 train loss:3.537288 +step:4636 train loss:3.593477 +step:4637 train loss:3.583217 +step:4638 train loss:3.550148 +step:4639 train loss:3.524301 +step:4640 train loss:3.505083 +step:4641 train loss:3.546709 +step:4642 train loss:3.524489 +step:4643 train loss:3.530992 +step:4644 train loss:3.569863 +step:4645 train loss:3.563462 +step:4646 train loss:3.470589 +step:4647 train loss:3.536155 +step:4648 train loss:3.463790 +step:4649 train loss:3.463645 +step:4650 train loss:3.542024 +step:4651 train loss:3.538295 +step:4652 train loss:3.504761 +step:4653 train loss:3.526921 +step:4654 train loss:3.510123 +step:4655 train loss:3.529685 +step:4656 train loss:3.576844 +step:4657 train loss:3.512435 +step:4658 train loss:3.537929 +step:4659 train loss:3.492539 +step:4660 train loss:3.571979 +step:4661 train loss:3.606093 +step:4662 train loss:3.563884 +step:4663 train loss:3.504566 +step:4664 train loss:3.518827 +step:4665 train loss:3.494356 +step:4666 train loss:3.517807 +step:4667 train loss:3.575453 +step:4668 train loss:3.566168 +step:4669 train loss:3.540539 +step:4670 train loss:3.479974 +step:4671 train loss:3.570872 +step:4672 train loss:3.591493 +step:4673 train loss:3.534742 +step:4674 train loss:3.533329 +step:4675 train loss:3.532936 +step:4676 train loss:3.535244 +step:4677 train loss:3.511186 +step:4678 train loss:3.552890 +step:4679 train loss:3.548326 +step:4680 train loss:3.548275 +step:4681 train loss:3.506518 +step:4682 train loss:3.558907 +step:4683 train loss:3.538028 +step:4684 train loss:3.578869 +step:4685 train loss:3.545312 +step:4686 train loss:3.553686 +step:4687 train loss:3.556313 +step:4688 train loss:3.520645 +step:4689 train loss:3.573805 +step:4690 train loss:3.558738 +step:4691 train loss:3.592685 +step:4692 train loss:3.554740 +step:4693 train loss:3.535222 +step:4694 train loss:3.557477 +step:4695 train loss:3.557701 +step:4696 train loss:3.540852 +step:4697 train loss:3.562408 +step:4698 train loss:3.522219 +step:4699 train loss:3.506161 +step:4700 train loss:3.519303 +step:4701 train loss:3.554558 +step:4702 train loss:3.540187 +step:4703 train loss:3.585671 +step:4704 train loss:3.629372 +step:4705 train loss:3.641708 +step:4706 train loss:3.601103 +step:4707 train loss:3.593191 +step:4708 train loss:3.547040 +step:4709 train loss:3.564736 +step:4710 train loss:3.495268 +step:4711 train loss:3.539069 +step:4712 train loss:3.557980 +step:4713 train loss:3.548452 +step:4714 train loss:3.536932 +step:4715 train loss:3.518588 +step:4716 train loss:3.562935 +step:4717 train loss:3.496373 +step:4718 train loss:3.570132 +step:4719 train loss:3.550471 +step:4720 train loss:3.532704 +step:4721 train loss:3.589648 +step:4722 train loss:3.523686 +step:4723 train loss:3.555325 +step:4724 train loss:3.494511 +step:4725 train loss:3.527827 +step:4726 train loss:3.566051 +step:4727 train loss:3.560745 +step:4728 train loss:3.517464 +step:4729 train loss:3.552629 +step:4730 train loss:3.592809 +step:4731 train loss:3.542970 +step:4732 train loss:3.573479 +step:4733 train loss:3.660360 +step:4734 train loss:3.537349 +step:4735 train loss:3.495413 +step:4736 train loss:3.550777 +step:4737 train loss:3.618234 +step:4738 train loss:3.562383 +step:4739 train loss:3.538307 +step:4740 train loss:3.525408 +step:4741 train loss:3.581947 +step:4742 train loss:3.587891 +step:4743 train loss:3.591128 +step:4744 train loss:3.562815 +step:4745 train loss:3.519202 +step:4746 train loss:3.571432 +step:4747 train loss:3.586227 +step:4748 train loss:3.575261 +step:4749 train loss:3.529683 +step:4750 validation loss:3.481567 +step:4750 train loss:3.551832 +step:4751 train loss:3.615850 +step:4752 train loss:3.587918 +step:4753 train loss:3.572520 +step:4754 train loss:3.573931 +step:4755 train loss:3.527319 +step:4756 train loss:3.496880 +step:4757 train loss:3.537629 +step:4758 train loss:3.533348 +step:4759 train loss:3.516827 +step:4760 train loss:3.558385 +step:4761 train loss:3.564306 +step:4762 train loss:3.651818 +step:4763 train loss:3.485402 +step:4764 train loss:3.572904 +step:4765 train loss:3.656887 +step:4766 train loss:3.637339 +step:4767 train loss:3.534508 +step:4768 train loss:3.539500 +step:4769 train loss:3.523142 +step:4770 train loss:3.541706 +step:4771 train loss:3.515383 +step:4772 train loss:3.503091 +step:4773 train loss:3.552915 +step:4774 train loss:3.520603 +step:4775 train loss:3.541075 +step:4776 train loss:3.524526 +step:4777 train loss:3.534509 +step:4778 train loss:3.518371 +step:4779 train loss:3.551317 +step:4780 train loss:3.542165 +step:4781 train loss:3.560427 +step:4782 train loss:3.647441 +step:4783 train loss:3.531245 +step:4784 train loss:3.529127 +step:4785 train loss:3.523135 +step:4786 train loss:3.574593 +step:4787 train loss:3.514973 +step:4788 train loss:3.516618 +step:4789 train loss:3.520016 +step:4790 train loss:3.544375 +step:4791 train loss:3.580388 +step:4792 train loss:3.526965 +step:4793 train loss:3.506346 +step:4794 train loss:3.460804 +step:4795 train loss:3.513356 +step:4796 train loss:3.536211 +step:4797 train loss:3.564535 +step:4798 train loss:3.572605 +step:4799 train loss:3.505512 +step:4800 train loss:3.543885 +step:4801 train loss:3.548944 +step:4802 train loss:3.501749 +step:4803 train loss:3.526429 +step:4804 train loss:3.599773 +step:4805 train loss:3.563584 +step:4806 train loss:3.528226 +step:4807 train loss:3.581639 +step:4808 train loss:3.517920 +step:4809 train loss:3.546341 +step:4810 train loss:3.526515 +step:4811 train loss:3.563187 +step:4812 train loss:3.638200 +step:4813 train loss:3.694216 +step:4814 train loss:3.560719 +step:4815 train loss:3.564039 +step:4816 train loss:3.539735 +step:4817 train loss:3.490463 +step:4818 train loss:3.493819 +step:4819 train loss:3.597628 +step:4820 train loss:3.577277 +step:4821 train loss:3.576374 +step:4822 train loss:3.612988 +step:4823 train loss:3.564715 +step:4824 train loss:3.511179 +step:4825 train loss:3.508649 +step:4826 train loss:3.544171 +step:4827 train loss:3.521834 +step:4828 train loss:3.515311 +step:4829 train loss:3.576429 +step:4830 train loss:3.577204 +step:4831 train loss:3.524058 +step:4832 train loss:3.576933 +step:4833 train loss:3.539547 +step:4834 train loss:3.514253 +step:4835 train loss:3.523882 +step:4836 train loss:3.547428 +step:4837 train loss:3.601777 +step:4838 train loss:3.692037 +step:4839 train loss:3.541714 +step:4840 train loss:3.515561 +step:4841 train loss:3.564083 +step:4842 train loss:3.511328 +step:4843 train loss:3.496224 +step:4844 train loss:3.538033 +step:4845 train loss:3.532998 +step:4846 train loss:3.517697 +step:4847 train loss:3.626966 +step:4848 train loss:3.550957 +step:4849 train loss:3.533144 +step:4850 train loss:3.524591 +step:4851 train loss:3.530212 +step:4852 train loss:3.504613 +step:4853 train loss:3.528754 +step:4854 train loss:3.554705 +step:4855 train loss:3.579457 +step:4856 train loss:3.631056 +step:4857 train loss:3.586863 +step:4858 train loss:3.554083 +step:4859 train loss:3.503067 +step:4860 train loss:3.567867 +step:4861 train loss:3.469307 +step:4862 train loss:3.573368 +step:4863 train loss:3.561043 +step:4864 train loss:3.543637 +step:4865 train loss:3.518153 +step:4866 train loss:3.500081 +step:4867 train loss:3.559654 +step:4868 train loss:3.535965 +step:4869 train loss:3.490803 +step:4870 train loss:3.544774 +step:4871 train loss:3.499340 +step:4872 train loss:3.526937 +step:4873 train loss:3.582581 +step:4874 train loss:3.531755 +step:4875 train loss:3.569778 +step:4876 train loss:3.569430 +step:4877 train loss:3.564517 +step:4878 train loss:3.541898 +step:4879 train loss:3.552180 +step:4880 train loss:3.558029 +step:4881 train loss:3.489875 +step:4882 train loss:3.504957 +step:4883 train loss:3.556010 +step:4884 train loss:3.510536 +step:4885 train loss:3.554489 +step:4886 train loss:3.530852 +step:4887 train loss:3.623433 +step:4888 train loss:3.542755 +step:4889 train loss:3.571221 +step:4890 train loss:3.495757 +step:4891 train loss:3.532174 +step:4892 train loss:3.565337 +step:4893 train loss:3.552408 +step:4894 train loss:3.527640 +step:4895 train loss:3.593898 +step:4896 train loss:3.466923 +step:4897 train loss:3.514532 +step:4898 train loss:3.615544 +step:4899 train loss:3.605293 +step:4900 train loss:3.562628 +step:4901 train loss:3.478494 +step:4902 train loss:3.536861 +step:4903 train loss:3.586066 +step:4904 train loss:3.525112 +step:4905 train loss:3.525518 +step:4906 train loss:3.541372 +step:4907 train loss:3.530247 +step:4908 train loss:3.507227 +step:4909 train loss:3.552875 +step:4910 train loss:3.519973 +step:4911 train loss:3.504451 +step:4912 train loss:3.535269 +step:4913 train loss:3.522974 +step:4914 train loss:3.504065 +step:4915 train loss:3.549451 +step:4916 train loss:3.577802 +step:4917 train loss:3.518187 +step:4918 train loss:3.572728 +step:4919 train loss:3.535489 +step:4920 train loss:3.567650 +step:4921 train loss:3.536070 +step:4922 train loss:3.563947 +step:4923 train loss:3.473275 +step:4924 train loss:3.492637 +step:4925 train loss:3.476400 +step:4926 train loss:3.554405 +step:4927 train loss:3.567021 +step:4928 train loss:3.501034 +step:4929 train loss:3.566025 +step:4930 train loss:3.511844 +step:4931 train loss:3.528239 +step:4932 train loss:3.544487 +step:4933 train loss:3.505578 +step:4934 train loss:3.594356 +step:4935 train loss:3.521453 +step:4936 train loss:3.480791 +step:4937 train loss:3.530264 +step:4938 train loss:3.517553 +step:4939 train loss:3.526782 +step:4940 train loss:3.513970 +step:4941 train loss:3.504924 +step:4942 train loss:3.597052 +step:4943 train loss:3.514558 +step:4944 train loss:3.529690 +step:4945 train loss:3.518534 +step:4946 train loss:3.552062 +step:4947 train loss:3.563373 +step:4948 train loss:3.520614 +step:4949 train loss:3.511382 +step:4950 train loss:3.653993 +step:4951 train loss:3.555125 +step:4952 train loss:3.564648 +step:4953 train loss:3.537419 +step:4954 train loss:3.553968 +step:4955 train loss:3.537725 +step:4956 train loss:3.546118 +step:4957 train loss:3.555470 +step:4958 train loss:3.519012 +step:4959 train loss:3.480973 +step:4960 train loss:3.535865 +step:4961 train loss:3.526626 +step:4962 train loss:3.492493 +step:4963 train loss:3.570569 +step:4964 train loss:3.534398 +step:4965 train loss:3.498800 +step:4966 train loss:3.596919 +step:4967 train loss:3.614530 +step:4968 train loss:3.506470 +step:4969 train loss:3.517627 +step:4970 train loss:3.550215 +step:4971 train loss:3.494093 +step:4972 train loss:3.572934 +step:4973 train loss:3.571294 +step:4974 train loss:3.529398 +step:4975 train loss:3.559131 +step:4976 train loss:3.523623 +step:4977 train loss:3.485206 +step:4978 train loss:3.535440 +step:4979 train loss:3.539423 +step:4980 train loss:3.485432 +step:4981 train loss:3.461444 +step:4982 train loss:3.730903 +step:4983 train loss:3.571821 +step:4984 train loss:3.537517 +step:4985 train loss:3.499507 +step:4986 train loss:3.484334 +step:4987 train loss:3.557464 +step:4988 train loss:3.535542 +step:4989 train loss:3.544093 +step:4990 train loss:3.517739 +step:4991 train loss:3.490870 +step:4992 train loss:3.531203 +step:4993 train loss:3.496602 +step:4994 train loss:3.523747 +step:4995 train loss:3.537459 +step:4996 train loss:3.542824 +step:4997 train loss:3.575583 +step:4998 train loss:3.506622 +step:4999 train loss:3.507538 +step:5000 validation loss:3.478960 total_sharp:2.0752e-02 L1_sharp:1.2010e-01 L2_sharp:4.8440e-03 L3_sharp:4.7446e-03 L4_sharp:1.3879e-03 L5_sharp:9.9015e-04 L6_sharp:1.2273e-03 L7_sharp:1.0552e-03 L8_sharp:9.5278e-04 L9_sharp:7.7979e-04 L10_sharp:4.5197e-04 L11_sharp:5.7706e-04 L12_sharp:1.5513e-03 total_fnorm:2.3626e+00 total_l1_linf:2.0147e+04 total_spectral:2.3626e+00 L1_fnorm:5.6120e-01 L2_fnorm:4.2836e-01 L3_fnorm:4.1051e-01 L4_fnorm:5.3060e-01 L5_fnorm:5.8490e-01 L6_fnorm:5.9071e-01 L7_fnorm:5.9917e-01 L8_fnorm:6.0039e-01 L9_fnorm:5.9721e-01 L10_fnorm:5.9697e-01 L11_fnorm:5.8348e-01 L12_fnorm:5.9946e-01 L1_l1linf:4.3341e-01 L2_l1linf:3.8180e-01 L3_l1linf:4.4705e-01 L4_l1linf:3.9770e-01 L5_l1linf:4.0753e-01 L6_l1linf:4.0595e-01 L7_l1linf:4.0490e-01 L8_l1linf:4.0865e-01 L9_l1linf:4.0643e-01 L10_l1linf:4.1145e-01 L11_l1linf:4.2598e-01 L12_l1linf:4.2403e-01 L1_spectral:1.2043e-02 L2_spectral:1.3862e-02 L3_spectral:1.8366e-02 L4_spectral:1.3395e-02 L5_spectral:1.2045e-02 L6_spectral:1.2049e-02 L7_spectral:1.2056e-02 L8_spectral:1.2057e-02 L9_spectral:1.2048e-02 L10_spectral:1.2060e-02 L11_spectral:1.2044e-02 L12_spectral:1.2043e-02 v_norm:2.3626e+00 cos_v_-g_hvp:2.6644e-02 g_hvp_norm:7.2908e-01 cos_v_-g_t:3.0406e-02 g_t_norm:7.2583e-01 hv_norm:1.4148e+01 cos_v_hv:3.4654e-03 hg_norm:1.0345e+04 cos_g_hg:1.6396e-01 v_par:6.2835e-03 v_perp:2.3626e+00 L1_cos_v_neg_g:1.5258e-02 L1_v_norm:5.6120e-01 L2_cos_v_neg_g:2.7997e-02 L2_v_norm:4.2836e-01 L3_cos_v_neg_g:2.9345e-02 L3_v_norm:4.1051e-01 L4_cos_v_neg_g:2.6560e-02 L4_v_norm:5.3060e-01 L5_cos_v_neg_g:2.8530e-02 L5_v_norm:5.8490e-01 L6_cos_v_neg_g:2.7397e-02 L6_v_norm:5.9071e-01 L7_cos_v_neg_g:2.7251e-02 L7_v_norm:5.9917e-01 L8_cos_v_neg_g:2.6740e-02 L8_v_norm:6.0039e-01 L9_cos_v_neg_g:2.8871e-02 L9_v_norm:5.9721e-01 L10_cos_v_neg_g:3.2560e-02 L10_v_norm:5.9697e-01 L11_cos_v_neg_g:4.2403e-02 L11_v_norm:5.8348e-01 L12_cos_v_neg_g:6.6280e-02 L12_v_norm:5.9946e-01 +step:5000 train loss:3.565170 +step:5001 train loss:3.617849 +step:5002 train loss:3.508012 +step:5003 train loss:3.462935 +step:5004 train loss:3.532132 +step:5005 train loss:3.520102 +step:5006 train loss:3.487030 +step:5007 train loss:3.501133 +step:5008 train loss:3.490177 +step:5009 train loss:3.582158 +step:5010 train loss:3.565918 +step:5011 train loss:3.534835 +step:5012 train loss:3.555777 +step:5013 train loss:3.554291 +step:5014 train loss:3.633513 +step:5015 train loss:3.503890 +step:5016 train loss:3.463843 +step:5017 train loss:3.486759 +step:5018 train loss:3.465084 +step:5019 train loss:3.563417 +step:5020 train loss:3.530973 +step:5021 train loss:3.502800 +step:5022 train loss:3.593838 +step:5023 train loss:3.521643 +step:5024 train loss:3.486043 +step:5025 train loss:3.514879 +step:5026 train loss:3.508871 +step:5027 train loss:3.595578 +step:5028 train loss:3.530961 +step:5029 train loss:3.554741 +step:5030 train loss:3.564033 +step:5031 train loss:3.560527 +step:5032 train loss:3.536489 +step:5033 train loss:3.479289 +step:5034 train loss:3.535892 +step:5035 train loss:3.509515 +step:5036 train loss:3.515764 +step:5037 train loss:3.550291 +step:5038 train loss:3.594590 +step:5039 train loss:3.468254 +step:5040 train loss:3.522140 +step:5041 train loss:3.592882 +step:5042 train loss:3.541901 +step:5043 train loss:3.576116 +step:5044 train loss:3.526510 +step:5045 train loss:3.539401 +step:5046 train loss:3.577497 +step:5047 train loss:3.646629 +step:5048 train loss:3.532336 +step:5049 train loss:3.553894 +step:5050 train loss:3.518553 +step:5051 train loss:3.586342 +step:5052 train loss:3.549929 +step:5053 train loss:3.575619 +step:5054 train loss:3.529538 +step:5055 train loss:3.533981 +step:5056 train loss:3.524899 +step:5057 train loss:3.540792 +step:5058 train loss:3.512532 +step:5059 train loss:3.494893 +step:5060 train loss:3.628403 +step:5061 train loss:3.532065 +step:5062 train loss:3.652175 +step:5063 train loss:3.525700 +step:5064 train loss:3.565784 +step:5065 train loss:3.617344 +step:5066 train loss:3.584969 +step:5067 train loss:3.545973 +step:5068 train loss:3.607735 +step:5069 train loss:3.543657 +step:5070 train loss:3.567287 +step:5071 train loss:3.591200 +step:5072 train loss:3.551611 +step:5073 train loss:3.509587 +step:5074 train loss:3.610441 +step:5075 train loss:3.638924 +step:5076 train loss:3.557369 +step:5077 train loss:3.517577 +step:5078 train loss:3.471858 +step:5079 train loss:3.506349 +step:5080 train loss:3.541447 +step:5081 train loss:3.540501 +step:5082 train loss:3.578384 +step:5083 train loss:3.557351 +step:5084 train loss:3.558661 +step:5085 train loss:3.530688 +step:5086 train loss:3.560279 +step:5087 train loss:3.530563 +step:5088 train loss:3.480242 +step:5089 train loss:3.609938 +step:5090 train loss:3.678670 +step:5091 train loss:3.556337 +step:5092 train loss:3.505952 +step:5093 train loss:3.515647 +step:5094 train loss:3.511168 +step:5095 train loss:3.539283 +step:5096 train loss:3.471605 +step:5097 train loss:3.511004 +step:5098 train loss:3.535125 +step:5099 train loss:3.555951 +step:5100 train loss:3.566092 +step:5101 train loss:3.578015 +step:5102 train loss:3.561541 +step:5103 train loss:3.629426 +step:5104 train loss:3.523035 +step:5105 train loss:3.577775 +step:5106 train loss:3.565675 +step:5107 train loss:3.530682 +step:5108 train loss:3.553227 +step:5109 train loss:3.560182 +step:5110 train loss:3.561980 +step:5111 train loss:3.539457 +step:5112 train loss:3.503272 +step:5113 train loss:3.517572 +step:5114 train loss:3.538280 +step:5115 train loss:3.563714 +step:5116 train loss:3.509391 +step:5117 train loss:3.540502 +step:5118 train loss:3.515949 +step:5119 train loss:3.553624 +step:5120 train loss:3.545380 +step:5121 train loss:3.547104 +step:5122 train loss:3.584685 +step:5123 train loss:3.583719 +step:5124 train loss:3.554732 +step:5125 train loss:3.537963 +step:5126 train loss:3.558789 +step:5127 train loss:3.614485 +step:5128 train loss:3.518396 +step:5129 train loss:3.562837 +step:5130 train loss:3.503536 +step:5131 train loss:3.529147 +step:5132 train loss:3.535227 +step:5133 train loss:3.508731 +step:5134 train loss:3.490685 +step:5135 train loss:3.554788 +step:5136 train loss:3.581352 +step:5137 train loss:3.502863 +step:5138 train loss:3.532934 +step:5139 train loss:3.569441 +step:5140 train loss:3.548579 +step:5141 train loss:3.585873 +step:5142 train loss:3.547924 +step:5143 train loss:3.541883 +step:5144 train loss:3.570509 +step:5145 train loss:3.545374 +step:5146 train loss:3.550770 +step:5147 train loss:3.518833 +step:5148 train loss:3.544828 +step:5149 train loss:3.520677 +step:5150 train loss:3.561079 +step:5151 train loss:3.571233 +step:5152 train loss:3.594896 +step:5153 train loss:3.565167 +step:5154 train loss:3.519242 +step:5155 train loss:3.543755 +step:5156 train loss:3.493278 +step:5157 train loss:3.513353 +step:5158 train loss:3.533994 +step:5159 train loss:3.516406 +step:5160 train loss:3.517816 +step:5161 train loss:3.564249 +step:5162 train loss:3.546772 +step:5163 train loss:3.501293 +step:5164 train loss:3.522807 +step:5165 train loss:3.555554 +step:5166 train loss:3.489469 +step:5167 train loss:3.492634 +step:5168 train loss:3.494126 +step:5169 train loss:3.485647 +step:5170 train loss:3.649991 +step:5171 train loss:3.518996 +step:5172 train loss:3.530766 +step:5173 train loss:3.509795 +step:5174 train loss:3.465010 +step:5175 train loss:3.553486 +step:5176 train loss:3.590056 +step:5177 train loss:3.595859 +step:5178 train loss:3.522188 +step:5179 train loss:3.557989 +step:5180 train loss:3.549785 +step:5181 train loss:3.541624 +step:5182 train loss:3.603402 +step:5183 train loss:3.528783 +step:5184 train loss:3.489786 +step:5185 train loss:3.517914 +step:5186 train loss:3.558298 +step:5187 train loss:3.648414 +step:5188 train loss:3.573895 +step:5189 train loss:3.527666 +step:5190 train loss:3.574379 +step:5191 train loss:3.644696 +step:5192 train loss:3.555409 +step:5193 train loss:3.537981 +step:5194 train loss:3.531955 +step:5195 train loss:3.524453 +step:5196 train loss:3.586904 +step:5197 train loss:3.577798 +step:5198 train loss:3.504984 +step:5199 train loss:3.536945 +step:5200 train loss:3.534516 +step:5201 train loss:3.567471 +step:5202 train loss:3.511526 +step:5203 train loss:3.484531 +step:5204 train loss:3.503922 +step:5205 train loss:3.624543 +step:5206 train loss:3.521322 +step:5207 train loss:3.537475 +step:5208 train loss:3.540991 +step:5209 train loss:3.512587 +step:5210 train loss:3.487954 +step:5211 train loss:3.552600 +step:5212 train loss:3.551754 +step:5213 train loss:3.547369 +step:5214 train loss:3.561863 +step:5215 train loss:3.584170 +step:5216 train loss:3.527654 +step:5217 train loss:3.523472 +step:5218 train loss:3.473215 +step:5219 train loss:3.562737 +step:5220 train loss:3.559191 +step:5221 train loss:3.502977 +step:5222 train loss:3.572204 +step:5223 train loss:3.527854 +step:5224 train loss:3.535354 +step:5225 train loss:3.458366 +step:5226 train loss:3.538552 +step:5227 train loss:3.497571 +step:5228 train loss:3.468970 +step:5229 train loss:3.498305 +step:5230 train loss:3.558568 +step:5231 train loss:3.463443 +step:5232 train loss:3.508031 +step:5233 train loss:3.499465 +step:5234 train loss:3.444715 +step:5235 train loss:3.498419 +step:5236 train loss:3.438692 +step:5237 train loss:3.495794 +step:5238 train loss:3.480479 +step:5239 train loss:3.539621 +step:5240 train loss:3.473150 +step:5241 train loss:3.477931 +step:5242 train loss:3.520868 +step:5243 train loss:3.520020 +step:5244 train loss:3.521551 +step:5245 train loss:3.527636 +step:5246 train loss:3.494074 +step:5247 train loss:3.582169 +step:5248 train loss:3.540331 +step:5249 train loss:3.542858 +step:5250 validation loss:3.471191 +step:5250 train loss:3.503378 +step:5251 train loss:3.542808 +step:5252 train loss:3.519107 +step:5253 train loss:3.481123 +step:5254 train loss:3.522331 +step:5255 train loss:3.471074 +step:5256 train loss:3.580672 +step:5257 train loss:3.519621 +step:5258 train loss:3.516204 +step:5259 train loss:3.531056 +step:5260 train loss:3.506763 +step:5261 train loss:3.557966 +step:5262 train loss:3.545931 +step:5263 train loss:3.545792 +step:5264 train loss:3.504782 +step:5265 train loss:3.554928 +step:5266 train loss:3.520634 +step:5267 train loss:3.547233 +step:5268 train loss:3.537610 +step:5269 train loss:3.537793 +step:5270 train loss:3.508406 +step:5271 train loss:3.541754 +step:5272 train loss:3.579822 +step:5273 train loss:3.600201 +step:5274 train loss:3.577208 +step:5275 train loss:3.589479 +step:5276 train loss:3.599378 +step:5277 train loss:3.523335 +step:5278 train loss:3.542960 +step:5279 train loss:3.565932 +step:5280 train loss:3.569164 +step:5281 train loss:3.522444 +step:5282 train loss:3.487162 +step:5283 train loss:3.591684 +step:5284 train loss:3.509756 +step:5285 train loss:3.536197 +step:5286 train loss:3.492603 +step:5287 train loss:3.513577 +step:5288 train loss:3.526098 +step:5289 train loss:3.552192 +step:5290 train loss:3.539247 +step:5291 train loss:3.539027 +step:5292 train loss:3.588790 +step:5293 train loss:3.514367 +step:5294 train loss:3.504094 +step:5295 train loss:3.536806 +step:5296 train loss:3.500935 +step:5297 train loss:3.537526 +step:5298 train loss:3.490885 +step:5299 train loss:3.486596 +step:5300 train loss:3.504419 +step:5301 train loss:3.528808 +step:5302 train loss:3.502203 +step:5303 train loss:3.512210 +step:5304 train loss:3.495664 +step:5305 train loss:3.491563 +step:5306 train loss:3.559523 +step:5307 train loss:3.499010 +step:5308 train loss:3.510070 +step:5309 train loss:3.460770 +step:5310 train loss:3.515160 +step:5311 train loss:3.497977 +step:5312 train loss:3.487494 +step:5313 train loss:3.504178 +step:5314 train loss:3.502968 +step:5315 train loss:3.523773 +step:5316 train loss:3.521189 +step:5317 train loss:3.479848 +step:5318 train loss:3.560785 +step:5319 train loss:3.496784 +step:5320 train loss:3.541479 +step:5321 train loss:3.539213 +step:5322 train loss:3.555315 +step:5323 train loss:3.490620 +step:5324 train loss:3.494205 +step:5325 train loss:3.503868 +step:5326 train loss:3.531812 +step:5327 train loss:3.566648 +step:5328 train loss:3.541852 +step:5329 train loss:3.493850 +step:5330 train loss:3.501859 +step:5331 train loss:3.581692 +step:5332 train loss:3.616995 +step:5333 train loss:3.590103 +step:5334 train loss:3.601584 +step:5335 train loss:3.675047 +step:5336 train loss:3.728575 +step:5337 train loss:3.631704 +step:5338 train loss:3.629555 +step:5339 train loss:3.697224 +step:5340 train loss:3.701187 +step:5341 train loss:3.643155 +step:5342 train loss:3.714960 +step:5343 train loss:3.645740 +step:5344 train loss:3.612800 +step:5345 train loss:3.647954 +step:5346 train loss:3.569726 +step:5347 train loss:3.593951 +step:5348 train loss:3.662858 +step:5349 train loss:3.624952 +step:5350 train loss:3.601711 +step:5351 train loss:3.656169 +step:5352 train loss:3.623560 +step:5353 train loss:3.595556 +step:5354 train loss:3.578201 +step:5355 train loss:3.517512 +step:5356 train loss:3.635657 +step:5357 train loss:3.610337 +step:5358 train loss:3.729437 +step:5359 train loss:3.641035 +step:5360 train loss:3.594138 +step:5361 train loss:3.574267 +step:5362 train loss:3.577067 +step:5363 train loss:3.649901 +step:5364 train loss:3.661589 +step:5365 train loss:3.585656 +step:5366 train loss:3.661249 +step:5367 train loss:3.697150 +step:5368 train loss:3.633142 +step:5369 train loss:3.675487 +step:5370 train loss:3.635456 +step:5371 train loss:3.598527 +step:5372 train loss:3.625158 +step:5373 train loss:3.596759 +step:5374 train loss:3.568859 +step:5375 train loss:3.596650 +step:5376 train loss:3.548925 +step:5377 train loss:3.584518 +step:5378 train loss:3.613660 +step:5379 train loss:3.619473 +step:5380 train loss:3.625710 +step:5381 train loss:3.661888 +step:5382 train loss:3.679125 +step:5383 train loss:3.618910 +step:5384 train loss:3.517268 +step:5385 train loss:3.600501 +step:5386 train loss:3.594234 +step:5387 train loss:3.561320 +step:5388 train loss:3.594926 +step:5389 train loss:3.641515 +step:5390 train loss:3.623670 +step:5391 train loss:3.588697 +step:5392 train loss:3.641417 +step:5393 train loss:3.647881 +step:5394 train loss:3.618174 +step:5395 train loss:3.587238 +step:5396 train loss:3.654513 +step:5397 train loss:3.612972 +step:5398 train loss:3.612156 +step:5399 train loss:3.567865 +step:5400 train loss:3.564503 +step:5401 train loss:3.562794 +step:5402 train loss:3.593135 +step:5403 train loss:3.600643 +step:5404 train loss:3.600091 +step:5405 train loss:3.543213 +step:5406 train loss:3.520431 +step:5407 train loss:3.554887 +step:5408 train loss:3.553640 +step:5409 train loss:3.625853 +step:5410 train loss:3.581371 +step:5411 train loss:3.546991 +step:5412 train loss:3.562382 +step:5413 train loss:3.571191 +step:5414 train loss:3.581797 +step:5415 train loss:3.588078 +step:5416 train loss:3.572091 +step:5417 train loss:3.539979 +step:5418 train loss:3.596131 +step:5419 train loss:3.575095 +step:5420 train loss:3.529850 +step:5421 train loss:3.502427 +step:5422 train loss:3.536687 +step:5423 train loss:3.558608 +step:5424 train loss:3.545609 +step:5425 train loss:3.552253 +step:5426 train loss:3.591800 +step:5427 train loss:3.550717 +step:5428 train loss:3.595044 +step:5429 train loss:3.519223 +step:5430 train loss:3.553699 +step:5431 train loss:3.577518 +step:5432 train loss:3.572940 +step:5433 train loss:3.546600 +step:5434 train loss:3.571742 +step:5435 train loss:3.521239 +step:5436 train loss:3.552855 +step:5437 train loss:3.540130 +step:5438 train loss:3.592451 +step:5439 train loss:3.529773 +step:5440 train loss:3.541620 +step:5441 train loss:3.598483 +step:5442 train loss:3.547161 +step:5443 train loss:3.499399 +step:5444 train loss:3.571487 +step:5445 train loss:3.605202 +step:5446 train loss:3.621450 +step:5447 train loss:3.547256 +step:5448 train loss:3.566976 +step:5449 train loss:3.570008 +step:5450 train loss:3.661554 +step:5451 train loss:3.588249 +step:5452 train loss:3.610703 +step:5453 train loss:3.609649 +step:5454 train loss:3.580716 +step:5455 train loss:3.564356 +step:5456 train loss:3.564210 +step:5457 train loss:3.532428 +step:5458 train loss:3.548589 +step:5459 train loss:3.575752 +step:5460 train loss:3.594656 +step:5461 train loss:3.574166 +step:5462 train loss:3.587002 +step:5463 train loss:3.578183 +step:5464 train loss:3.542016 +step:5465 train loss:3.551699 +step:5466 train loss:3.558539 +step:5467 train loss:3.591347 +step:5468 train loss:3.608693 +step:5469 train loss:3.561090 +step:5470 train loss:3.588235 +step:5471 train loss:3.561169 +step:5472 train loss:3.549088 +step:5473 train loss:3.574287 +step:5474 train loss:3.549550 +step:5475 train loss:3.573781 +step:5476 train loss:3.581765 +step:5477 train loss:3.579554 +step:5478 train loss:3.585337 +step:5479 train loss:3.639565 +step:5480 train loss:3.603528 +step:5481 train loss:3.599195 +step:5482 train loss:3.554123 +step:5483 train loss:3.524553 +step:5484 train loss:3.559803 +step:5485 train loss:3.542919 +step:5486 train loss:3.547479 +step:5487 train loss:3.563149 +step:5488 train loss:3.572734 +step:5489 train loss:3.533057 +step:5490 train loss:3.588985 +step:5491 train loss:3.584950 +step:5492 train loss:3.559725 +step:5493 train loss:3.622669 +step:5494 train loss:3.566930 +step:5495 train loss:3.545165 +step:5496 train loss:3.539150 +step:5497 train loss:3.598338 +step:5498 train loss:3.605617 +step:5499 train loss:3.554498 +step:5500 validation loss:3.495790 total_sharp:-1.0959e-03 L1_sharp:-7.8600e-02 L2_sharp:-5.2788e-03 L3_sharp:5.2922e-03 L4_sharp:1.7430e-03 L5_sharp:1.3830e-03 L6_sharp:1.3153e-03 L7_sharp:1.5432e-03 L8_sharp:1.1708e-03 L9_sharp:7.3968e-04 L10_sharp:4.7912e-04 L11_sharp:5.3660e-04 L12_sharp:3.4769e-04 total_fnorm:2.2989e+00 total_l1_linf:1.9451e+04 total_spectral:2.2989e+00 L1_fnorm:5.0867e-01 L2_fnorm:3.5940e-01 L3_fnorm:3.6829e-01 L4_fnorm:4.8906e-01 L5_fnorm:5.6597e-01 L6_fnorm:5.8580e-01 L7_fnorm:5.9368e-01 L8_fnorm:5.9810e-01 L9_fnorm:5.9659e-01 L10_fnorm:5.9605e-01 L11_fnorm:5.7685e-01 L12_fnorm:5.9823e-01 L1_l1linf:3.9374e-01 L2_l1linf:3.9342e-01 L3_l1linf:3.9956e-01 L4_l1linf:4.1222e-01 L5_l1linf:3.9498e-01 L6_l1linf:3.9915e-01 L7_l1linf:3.9975e-01 L8_l1linf:4.0330e-01 L9_l1linf:4.0333e-01 L10_l1linf:4.0713e-01 L11_l1linf:4.2860e-01 L12_l1linf:4.3097e-01 L1_spectral:1.2036e-02 L2_spectral:1.5696e-02 L3_spectral:1.6523e-02 L4_spectral:1.5762e-02 L5_spectral:1.2043e-02 L6_spectral:1.2043e-02 L7_spectral:1.2048e-02 L8_spectral:1.2051e-02 L9_spectral:1.2053e-02 L10_spectral:1.2053e-02 L11_spectral:1.2045e-02 L12_spectral:1.2047e-02 v_norm:2.2989e+00 cos_v_-g_hvp:1.9714e-02 g_hvp_norm:1.0168e+00 cos_v_-g_t:2.0851e-02 g_t_norm:1.1459e+00 hv_norm:2.8594e+01 cos_v_hv:-8.8107e-05 hg_norm:9.4382e+04 cos_g_hg:-9.6510e-02 v_par:6.3602e-03 v_perp:2.2989e+00 L1_cos_v_neg_g:1.3745e-02 L1_v_norm:5.0867e-01 L2_cos_v_neg_g:1.9665e-02 L2_v_norm:3.5940e-01 L3_cos_v_neg_g:2.7678e-02 L3_v_norm:3.6829e-01 L4_cos_v_neg_g:2.4960e-02 L4_v_norm:4.8906e-01 L5_cos_v_neg_g:2.9982e-02 L5_v_norm:5.6597e-01 L6_cos_v_neg_g:2.9845e-02 L6_v_norm:5.8580e-01 L7_cos_v_neg_g:2.8936e-02 L7_v_norm:5.9368e-01 L8_cos_v_neg_g:2.9298e-02 L8_v_norm:5.9810e-01 L9_cos_v_neg_g:3.1394e-02 L9_v_norm:5.9659e-01 L10_cos_v_neg_g:3.4986e-02 L10_v_norm:5.9605e-01 L11_cos_v_neg_g:4.5292e-02 L11_v_norm:5.7685e-01 L12_cos_v_neg_g:7.7775e-02 L12_v_norm:5.9823e-01 +step:5500 train loss:3.567499 +step:5501 train loss:3.588139 +step:5502 train loss:3.540164 +step:5503 train loss:3.647914 +step:5504 train loss:3.569633 +step:5505 train loss:3.532562 +step:5506 train loss:3.560969 +step:5507 train loss:3.553016 +step:5508 train loss:3.543867 +step:5509 train loss:3.570381 +step:5510 train loss:3.579346 +step:5511 train loss:3.498406 +step:5512 train loss:3.540757 +step:5513 train loss:3.647191 +step:5514 train loss:3.517622 +step:5515 train loss:3.566403 +step:5516 train loss:3.616116 +step:5517 train loss:3.547357 +step:5518 train loss:3.553451 +step:5519 train loss:3.567529 +step:5520 train loss:3.542213 +step:5521 train loss:3.563925 +step:5522 train loss:3.556623 +step:5523 train loss:3.550700 +step:5524 train loss:3.599818 +step:5525 train loss:3.535213 +step:5526 train loss:3.583501 +step:5527 train loss:3.561125 +step:5528 train loss:3.544523 +step:5529 train loss:3.542526 +step:5530 train loss:3.631001 +step:5531 train loss:3.725937 +step:5532 train loss:3.542398 +step:5533 train loss:3.552467 +step:5534 train loss:3.585371 +step:5535 train loss:3.554698 +step:5536 train loss:3.556511 +step:5537 train loss:3.560409 +step:5538 train loss:3.560340 +step:5539 train loss:3.519130 +step:5540 train loss:3.570143 +step:5541 train loss:3.569305 +step:5542 train loss:3.566950 +step:5543 train loss:3.562215 +step:5544 train loss:3.550595 +step:5545 train loss:3.631244 +step:5546 train loss:3.536434 +step:5547 train loss:3.567770 +step:5548 train loss:3.545765 +step:5549 train loss:3.475227 +step:5550 train loss:3.523960 +step:5551 train loss:3.529964 +step:5552 train loss:3.542184 +step:5553 train loss:3.552003 +step:5554 train loss:3.568372 +step:5555 train loss:3.556358 +step:5556 train loss:3.534497 +step:5557 train loss:3.524426 +step:5558 train loss:3.542169 +step:5559 train loss:3.555101 +step:5560 train loss:3.505170 +step:5561 train loss:3.541690 +step:5562 train loss:3.548858 +step:5563 train loss:3.579253 +step:5564 train loss:3.594227 +step:5565 train loss:3.511942 +step:5566 train loss:3.534961 +step:5567 train loss:3.520051 +step:5568 train loss:3.553710 +step:5569 train loss:3.554121 +step:5570 train loss:3.566709 +step:5571 train loss:3.564018 +step:5572 train loss:3.544914 +step:5573 train loss:3.565380 +step:5574 train loss:3.564820 +step:5575 train loss:3.514825 +step:5576 train loss:3.524721 +step:5577 train loss:3.571361 +step:5578 train loss:3.539723 +step:5579 train loss:3.543506 +step:5580 train loss:3.565885 +step:5581 train loss:3.517079 +step:5582 train loss:3.510236 +step:5583 train loss:3.528304 +step:5584 train loss:3.559290 +step:5585 train loss:3.510606 +step:5586 train loss:3.559575 +step:5587 train loss:3.550990 +step:5588 train loss:3.529819 +step:5589 train loss:3.529815 +step:5590 train loss:3.508686 +step:5591 train loss:3.537285 +step:5592 train loss:3.590363 +step:5593 train loss:3.599896 +step:5594 train loss:3.553715 +step:5595 train loss:3.531911 +step:5596 train loss:3.589642 +step:5597 train loss:3.563569 +step:5598 train loss:3.576603 +step:5599 train loss:3.679790 +step:5600 train loss:3.575421 +step:5601 train loss:3.622141 +step:5602 train loss:3.537516 +step:5603 train loss:3.561168 +step:5604 train loss:3.489781 +step:5605 train loss:3.484777 +step:5606 train loss:3.501907 +step:5607 train loss:3.512368 +step:5608 train loss:3.585386 +step:5609 train loss:3.583607 +step:5610 train loss:3.612877 +step:5611 train loss:3.650164 +step:5612 train loss:3.577545 +step:5613 train loss:3.541503 +step:5614 train loss:3.559062 +step:5615 train loss:3.614121 +step:5616 train loss:3.537030 +step:5617 train loss:3.496174 +step:5618 train loss:3.512943 +step:5619 train loss:3.565348 +step:5620 train loss:3.553025 +step:5621 train loss:3.573443 +step:5622 train loss:3.529375 +step:5623 train loss:3.586004 +step:5624 train loss:3.522103 +step:5625 train loss:3.550650 +step:5626 train loss:3.546384 +step:5627 train loss:3.566855 +step:5628 train loss:3.580970 +step:5629 train loss:3.562775 +step:5630 train loss:3.554580 +step:5631 train loss:3.570450 +step:5632 train loss:3.602565 +step:5633 train loss:3.554849 +step:5634 train loss:3.533744 +step:5635 train loss:3.555842 +step:5636 train loss:3.569889 +step:5637 train loss:3.530153 +step:5638 train loss:3.521234 +step:5639 train loss:3.498609 +step:5640 train loss:3.535666 +step:5641 train loss:3.589491 +step:5642 train loss:3.534085 +step:5643 train loss:3.551196 +step:5644 train loss:3.586079 +step:5645 train loss:3.543056 +step:5646 train loss:3.541471 +step:5647 train loss:3.498172 +step:5648 train loss:3.569736 +step:5649 train loss:3.576782 +step:5650 train loss:3.529068 +step:5651 train loss:3.621612 +step:5652 train loss:3.503749 +step:5653 train loss:3.501328 +step:5654 train loss:3.560187 +step:5655 train loss:3.553608 +step:5656 train loss:3.577057 +step:5657 train loss:3.549403 +step:5658 train loss:3.569654 +step:5659 train loss:3.580075 +step:5660 train loss:3.517797 +step:5661 train loss:3.536964 +step:5662 train loss:3.646167 +step:5663 train loss:3.553059 +step:5664 train loss:3.538600 +step:5665 train loss:3.547686 +step:5666 train loss:3.527976 +step:5667 train loss:3.527220 +step:5668 train loss:3.528460 +step:5669 train loss:3.559705 +step:5670 train loss:3.555472 +step:5671 train loss:3.518673 +step:5672 train loss:3.540365 +step:5673 train loss:3.518256 +step:5674 train loss:3.539639 +step:5675 train loss:3.529096 +step:5676 train loss:3.523872 +step:5677 train loss:3.535554 +step:5678 train loss:3.552226 +step:5679 train loss:3.534442 +step:5680 train loss:3.512393 +step:5681 train loss:3.557866 +step:5682 train loss:3.569285 +step:5683 train loss:3.529076 +step:5684 train loss:3.588722 +step:5685 train loss:3.628668 +step:5686 train loss:3.508574 +step:5687 train loss:3.528970 +step:5688 train loss:3.527048 +step:5689 train loss:3.578409 +step:5690 train loss:3.489660 +step:5691 train loss:3.495231 +step:5692 train loss:3.525804 +step:5693 train loss:3.475821 +step:5694 train loss:3.552137 +step:5695 train loss:3.533600 +step:5696 train loss:3.477215 +step:5697 train loss:3.543788 +step:5698 train loss:3.542939 +step:5699 train loss:3.582740 +step:5700 train loss:3.521954 +step:5701 train loss:3.566973 +step:5702 train loss:3.525402 +step:5703 train loss:3.551947 +step:5704 train loss:3.520215 +step:5705 train loss:3.546726 +step:5706 train loss:3.542755 +step:5707 train loss:3.556451 +step:5708 train loss:3.525475 +step:5709 train loss:3.604903 +step:5710 train loss:3.595911 +step:5711 train loss:3.519878 +step:5712 train loss:3.553348 +step:5713 train loss:3.526286 +step:5714 train loss:3.533509 +step:5715 train loss:3.546642 +step:5716 train loss:3.555306 +step:5717 train loss:3.574327 +step:5718 train loss:3.523174 +step:5719 train loss:3.530955 +step:5720 train loss:3.516142 +step:5721 train loss:3.501502 +step:5722 train loss:3.582281 +step:5723 train loss:3.515442 +step:5724 train loss:3.493476 +step:5725 train loss:3.579495 +step:5726 train loss:3.561357 +step:5727 train loss:3.518947 +step:5728 train loss:3.560873 +step:5729 train loss:3.565707 +step:5730 train loss:3.540081 +step:5731 train loss:3.492289 +step:5732 train loss:3.525393 +step:5733 train loss:3.533978 +step:5734 train loss:3.519211 +step:5735 train loss:3.640598 +step:5736 train loss:3.504120 +step:5737 train loss:3.539780 +step:5738 train loss:3.553533 +step:5739 train loss:3.536766 +step:5740 train loss:3.619277 +step:5741 train loss:3.534132 +step:5742 train loss:3.501120 +step:5743 train loss:3.512615 +step:5744 train loss:3.526773 +step:5745 train loss:3.525467 +step:5746 train loss:3.590578 +step:5747 train loss:3.499132 +step:5748 train loss:3.530677 +step:5749 train loss:3.498767 +step:5750 validation loss:3.483540 +step:5750 train loss:3.516129 +step:5751 train loss:3.548018 +step:5752 train loss:3.573988 +step:5753 train loss:3.525677 +step:5754 train loss:3.570725 +step:5755 train loss:3.530690 +step:5756 train loss:3.602773 +step:5757 train loss:3.477572 +step:5758 train loss:3.530454 +step:5759 train loss:3.536492 +step:5760 train loss:3.529325 +step:5761 train loss:3.612748 +step:5762 train loss:3.587824 +step:5763 train loss:3.592262 +step:5764 train loss:3.545552 +step:5765 train loss:3.483475 +step:5766 train loss:3.556936 +step:5767 train loss:3.504029 +step:5768 train loss:3.576380 +step:5769 train loss:3.586273 +step:5770 train loss:3.557316 +step:5771 train loss:3.570731 +step:5772 train loss:3.523857 +step:5773 train loss:3.437634 +step:5774 train loss:3.492916 +step:5775 train loss:3.488695 +step:5776 train loss:3.550038 +step:5777 train loss:3.599625 +step:5778 train loss:3.658937 +step:5779 train loss:3.609995 +step:5780 train loss:3.601769 +step:5781 train loss:3.609146 +step:5782 train loss:3.561434 +step:5783 train loss:3.534641 +step:5784 train loss:3.501665 +step:5785 train loss:3.543971 +step:5786 train loss:3.544513 +step:5787 train loss:3.508789 +step:5788 train loss:3.553348 +step:5789 train loss:3.544781 +step:5790 train loss:3.497749 +step:5791 train loss:3.553320 +step:5792 train loss:3.577351 +step:5793 train loss:3.533934 +step:5794 train loss:3.554269 +step:5795 train loss:3.541332 +step:5796 train loss:3.558854 +step:5797 train loss:3.556148 +step:5798 train loss:3.590931 +step:5799 train loss:3.525201 +step:5800 train loss:3.551817 +step:5801 train loss:3.529136 +step:5802 train loss:3.554276 +step:5803 train loss:3.529695 +step:5804 train loss:3.553573 +step:5805 train loss:3.520713 +step:5806 train loss:3.580498 +step:5807 train loss:3.524446 +step:5808 train loss:3.542098 +step:5809 train loss:3.485914 +step:5810 train loss:3.603656 +step:5811 train loss:3.559630 +step:5812 train loss:3.594245 +step:5813 train loss:3.556581 +step:5814 train loss:3.699866 +step:5815 train loss:3.628171 +step:5816 train loss:3.556277 +step:5817 train loss:3.579229 +step:5818 train loss:3.483702 +step:5819 train loss:3.535617 +step:5820 train loss:3.582551 +step:5821 train loss:3.502851 +step:5822 train loss:3.591119 +step:5823 train loss:3.547632 +step:5824 train loss:3.536560 +step:5825 train loss:3.522714 +step:5826 train loss:3.590214 +step:5827 train loss:3.519613 +step:5828 train loss:3.555205 +step:5829 train loss:3.566214 +step:5830 train loss:3.555525 +step:5831 train loss:3.581226 +step:5832 train loss:3.458179 +step:5833 train loss:3.601235 +step:5834 train loss:3.571286 +step:5835 train loss:3.516663 +step:5836 train loss:3.573859 +step:5837 train loss:3.544375 +step:5838 train loss:3.539298 +step:5839 train loss:3.536155 +step:5840 train loss:3.615239 +step:5841 train loss:3.600854 +step:5842 train loss:3.567111 +step:5843 train loss:3.520991 +step:5844 train loss:3.526638 +step:5845 train loss:3.567266 +step:5846 train loss:3.537404 +step:5847 train loss:3.502324 +step:5848 train loss:3.553780 +step:5849 train loss:3.492780 +step:5850 train loss:3.572246 +step:5851 train loss:3.604692 +step:5852 train loss:3.586311 +step:5853 train loss:3.573370 +step:5854 train loss:3.562050 +step:5855 train loss:3.528935 +step:5856 train loss:3.559387 +step:5857 train loss:3.590936 +step:5858 train loss:3.542315 +step:5859 train loss:3.563429 +step:5860 train loss:3.545700 +step:5861 train loss:3.545616 +step:5862 train loss:3.508462 +step:5863 train loss:3.521264 +step:5864 train loss:3.568329 +step:5865 train loss:3.518741 +step:5866 train loss:3.501588 +step:5867 train loss:3.522023 +step:5868 train loss:3.518535 +step:5869 train loss:3.532128 +step:5870 train loss:3.534081 +step:5871 train loss:3.555444 +step:5872 train loss:3.525571 +step:5873 train loss:3.498442 +step:5874 train loss:3.534504 +step:5875 train loss:3.525629 +step:5876 train loss:3.556529 +step:5877 train loss:3.519150 +step:5878 train loss:3.554999 +step:5879 train loss:3.554057 +step:5880 train loss:3.556037 +step:5881 train loss:3.576347 +step:5882 train loss:3.508660 +step:5883 train loss:3.561274 +step:5884 train loss:3.555926 +step:5885 train loss:3.562824 +step:5886 train loss:3.562953 +step:5887 train loss:3.536098 +step:5888 train loss:3.569908 +step:5889 train loss:3.549192 +step:5890 train loss:3.505523 +step:5891 train loss:3.490538 +step:5892 train loss:3.550565 +step:5893 train loss:3.528942 +step:5894 train loss:3.506779 +step:5895 train loss:3.566859 +step:5896 train loss:3.556305 +step:5897 train loss:3.513882 +step:5898 train loss:3.558907 +step:5899 train loss:3.525539 +step:5900 train loss:3.554125 +step:5901 train loss:3.509266 +step:5902 train loss:3.524348 +step:5903 train loss:3.619883 +step:5904 train loss:3.548253 +step:5905 train loss:3.572003 +step:5906 train loss:3.513686 +step:5907 train loss:3.545250 +step:5908 train loss:3.498116 +step:5909 train loss:3.530073 +step:5910 train loss:3.491201 +step:5911 train loss:3.612869 +step:5912 train loss:3.635805 +step:5913 train loss:3.541224 +step:5914 train loss:3.512217 +step:5915 train loss:3.512331 +step:5916 train loss:3.533448 +step:5917 train loss:3.512860 +step:5918 train loss:3.487487 +step:5919 train loss:3.543612 +step:5920 train loss:3.506070 +step:5921 train loss:3.493075 +step:5922 train loss:3.514207 +step:5923 train loss:3.554939 +step:5924 train loss:3.564753 +step:5925 train loss:3.595530 +step:5926 train loss:3.524518 +step:5927 train loss:3.533434 +step:5928 train loss:3.558856 +step:5929 train loss:3.545827 +step:5930 train loss:3.551872 +step:5931 train loss:3.563438 +step:5932 train loss:3.557755 +step:5933 train loss:3.602512 +step:5934 train loss:3.516463 +step:5935 train loss:3.507221 +step:5936 train loss:3.511756 +step:5937 train loss:3.488520 +step:5938 train loss:3.534990 +step:5939 train loss:3.560006 +step:5940 train loss:3.583332 +step:5941 train loss:3.529553 +step:5942 train loss:3.581954 +step:5943 train loss:3.545784 +step:5944 train loss:3.516325 +step:5945 train loss:3.524402 +step:5946 train loss:3.514160 +step:5947 train loss:3.507202 +step:5948 train loss:3.589014 +step:5949 train loss:3.528841 +step:5950 train loss:3.538020 +step:5951 train loss:3.582899 +step:5952 train loss:3.453796 +step:5953 train loss:3.591879 +step:5954 train loss:3.503723 +step:5955 train loss:3.487537 +step:5956 train loss:3.557084 +step:5957 train loss:3.499596 +step:5958 train loss:3.565370 +step:5959 train loss:3.489946 +step:5960 train loss:3.510189 +step:5961 train loss:3.514797 +step:5962 train loss:3.509010 +step:5963 train loss:3.587682 +step:5964 train loss:3.528950 +step:5965 train loss:3.561507 +step:5966 train loss:3.523018 +step:5967 train loss:3.519557 +step:5968 train loss:3.534414 +step:5969 train loss:3.538957 +step:5970 train loss:3.558494 +step:5971 train loss:3.532848 +step:5972 train loss:3.542721 +step:5973 train loss:3.555061 +step:5974 train loss:3.527312 +step:5975 train loss:3.539885 +step:5976 train loss:3.549209 +step:5977 train loss:3.507076 +step:5978 train loss:3.592571 +step:5979 train loss:3.652638 +step:5980 train loss:3.586233 +step:5981 train loss:3.547404 +step:5982 train loss:3.572955 +step:5983 train loss:3.535101 +step:5984 train loss:3.521591 +step:5985 train loss:3.562202 +step:5986 train loss:3.541497 +step:5987 train loss:3.563255 +step:5988 train loss:3.502376 +step:5989 train loss:3.542057 +step:5990 train loss:3.540565 +step:5991 train loss:3.587505 +step:5992 train loss:3.563235 +step:5993 train loss:3.576772 +step:5994 train loss:3.589018 +step:5995 train loss:3.542649 +step:5996 train loss:3.521893 +step:5997 train loss:3.486563 +step:5998 train loss:3.571327 +step:5999 train loss:3.536786 +step:6000 validation loss:3.483251 total_sharp:-3.4348e-02 L1_sharp:-3.6372e-01 L2_sharp:-1.2766e-01 L3_sharp:5.3282e-03 L4_sharp:1.9999e-03 L5_sharp:1.4085e-03 L6_sharp:1.3306e-03 L7_sharp:1.2467e-03 L8_sharp:1.1870e-03 L9_sharp:7.6710e-04 L10_sharp:4.6492e-04 L11_sharp:4.7101e-04 L12_sharp:2.9391e-04 total_fnorm:2.2810e+00 total_l1_linf:1.9264e+04 total_spectral:2.2810e+00 L1_fnorm:5.2653e-01 L2_fnorm:3.1591e-01 L3_fnorm:3.5912e-01 L4_fnorm:4.5442e-01 L5_fnorm:5.5599e-01 L6_fnorm:5.8218e-01 L7_fnorm:5.9514e-01 L8_fnorm:5.9573e-01 L9_fnorm:5.9564e-01 L10_fnorm:5.9340e-01 L11_fnorm:5.7462e-01 L12_fnorm:5.9866e-01 L1_l1linf:3.6375e-01 L2_l1linf:5.1046e-01 L3_l1linf:4.6346e-01 L4_l1linf:4.6964e-01 L5_l1linf:4.4412e-01 L6_l1linf:4.1468e-01 L7_l1linf:3.9861e-01 L8_l1linf:4.0318e-01 L9_l1linf:4.0658e-01 L10_l1linf:4.2811e-01 L11_l1linf:4.4540e-01 L12_l1linf:4.4376e-01 L1_spectral:1.2042e-02 L2_spectral:1.4502e-02 L3_spectral:1.3494e-02 L4_spectral:1.7228e-02 L5_spectral:1.2042e-02 L6_spectral:1.2045e-02 L7_spectral:1.2046e-02 L8_spectral:1.2050e-02 L9_spectral:1.2055e-02 L10_spectral:1.2046e-02 L11_spectral:1.2043e-02 L12_spectral:1.2046e-02 v_norm:2.2810e+00 cos_v_-g_hvp:2.0712e-02 g_hvp_norm:8.7969e-01 cos_v_-g_t:2.4786e-02 g_t_norm:8.5288e-01 hv_norm:7.1840e+01 cos_v_hv:-1.0906e-03 hg_norm:3.4171e+04 cos_g_hg:-6.4804e-02 v_par:4.2806e-03 v_perp:2.2810e+00 L1_cos_v_neg_g:1.1821e-02 L1_v_norm:5.2653e-01 L2_cos_v_neg_g:2.0406e-02 L2_v_norm:3.1591e-01 L3_cos_v_neg_g:2.8465e-02 L3_v_norm:3.5912e-01 L4_cos_v_neg_g:2.2806e-02 L4_v_norm:4.5442e-01 L5_cos_v_neg_g:2.5838e-02 L5_v_norm:5.5599e-01 L6_cos_v_neg_g:2.6333e-02 L6_v_norm:5.8218e-01 L7_cos_v_neg_g:2.7641e-02 L7_v_norm:5.9514e-01 L8_cos_v_neg_g:2.7342e-02 L8_v_norm:5.9573e-01 L9_cos_v_neg_g:2.7362e-02 L9_v_norm:5.9564e-01 L10_cos_v_neg_g:3.0214e-02 L10_v_norm:5.9340e-01 L11_cos_v_neg_g:3.9556e-02 L11_v_norm:5.7462e-01 L12_cos_v_neg_g:7.2232e-02 L12_v_norm:5.9866e-01 +step:6000 train loss:3.521019 +step:6001 train loss:3.546093 +step:6002 train loss:3.547477 +step:6003 train loss:3.480817 +step:6004 train loss:3.425747 +step:6005 train loss:3.462790 +step:6006 train loss:3.553807 +step:6007 train loss:3.507946 +step:6008 train loss:3.553906 +step:6009 train loss:3.590485 +step:6010 train loss:3.554718 +step:6011 train loss:3.542720 +step:6012 train loss:3.529660 +step:6013 train loss:3.548699 +step:6014 train loss:3.499265 +step:6015 train loss:3.476892 +step:6016 train loss:3.517589 +step:6017 train loss:3.533683 +step:6018 train loss:3.565385 +step:6019 train loss:3.516715 +step:6020 train loss:3.567966 +step:6021 train loss:3.548770 +step:6022 train loss:3.555974 +step:6023 train loss:3.525639 +step:6024 train loss:3.605767 +step:6025 train loss:3.508465 +step:6026 train loss:3.575412 +step:6027 train loss:3.519005 +step:6028 train loss:3.571809 +step:6029 train loss:3.647293 +step:6030 train loss:3.523807 +step:6031 train loss:3.506591 +step:6032 train loss:3.582724 +step:6033 train loss:3.547916 +step:6034 train loss:3.551654 +step:6035 train loss:3.607377 +step:6036 train loss:3.556692 +step:6037 train loss:3.557469 +step:6038 train loss:3.538903 +step:6039 train loss:3.603993 +step:6040 train loss:3.531894 +step:6041 train loss:3.603481 +step:6042 train loss:3.537251 +step:6043 train loss:3.662099 +step:6044 train loss:3.695001 +step:6045 train loss:3.657463 +step:6046 train loss:3.693570 +step:6047 train loss:3.845282 +step:6048 train loss:3.642018 +step:6049 train loss:3.647670 +step:6050 train loss:3.620564 +step:6051 train loss:3.592863 +step:6052 train loss:3.657116 +step:6053 train loss:3.602002 +step:6054 train loss:3.628940 +step:6055 train loss:3.743039 +step:6056 train loss:3.741454 +step:6057 train loss:3.535033 +step:6058 train loss:3.589305 +step:6059 train loss:3.627773 +step:6060 train loss:3.609757 +step:6061 train loss:3.614861 +step:6062 train loss:3.587013 +step:6063 train loss:3.583443 +step:6064 train loss:3.539121 +step:6065 train loss:3.599942 +step:6066 train loss:3.584822 +step:6067 train loss:3.588873 +step:6068 train loss:3.505200 +step:6069 train loss:3.622965 +step:6070 train loss:3.556396 +step:6071 train loss:3.594147 +step:6072 train loss:3.635123 +step:6073 train loss:3.586207 +step:6074 train loss:3.566941 +step:6075 train loss:3.662916 +step:6076 train loss:3.589790 +step:6077 train loss:3.508633 +step:6078 train loss:3.584671 +step:6079 train loss:3.593064 +step:6080 train loss:3.609683 +step:6081 train loss:3.602449 +step:6082 train loss:3.555479 +step:6083 train loss:3.587342 +step:6084 train loss:3.528195 +step:6085 train loss:3.550541 +step:6086 train loss:3.580604 +step:6087 train loss:3.560899 +step:6088 train loss:3.529449 +step:6089 train loss:3.499769 +step:6090 train loss:3.568560 +step:6091 train loss:3.550709 +step:6092 train loss:3.535491 +step:6093 train loss:3.556915 +step:6094 train loss:3.591672 +step:6095 train loss:3.585340 +step:6096 train loss:3.572983 +step:6097 train loss:3.543414 +step:6098 train loss:3.573889 +step:6099 train loss:3.545001 +step:6100 train loss:3.544730 +step:6101 train loss:3.623191 +step:6102 train loss:3.576289 +step:6103 train loss:3.577337 +step:6104 train loss:3.538291 +step:6105 train loss:3.609167 +step:6106 train loss:3.584063 +step:6107 train loss:3.594041 +step:6108 train loss:3.622460 +step:6109 train loss:3.621800 +step:6110 train loss:3.630355 +step:6111 train loss:3.679808 +step:6112 train loss:3.817270 +step:6113 train loss:3.560670 +step:6114 train loss:3.633866 +step:6115 train loss:3.580989 +step:6116 train loss:3.595060 +step:6117 train loss:3.557544 +step:6118 train loss:3.577292 +step:6119 train loss:3.594213 +step:6120 train loss:3.570484 +step:6121 train loss:3.608331 +step:6122 train loss:3.560229 +step:6123 train loss:3.584952 +step:6124 train loss:3.603157 +step:6125 train loss:3.568622 +step:6126 train loss:3.575549 +step:6127 train loss:3.602181 +step:6128 train loss:3.556205 +step:6129 train loss:3.736848 +step:6130 train loss:3.789392 +step:6131 train loss:3.554239 +step:6132 train loss:3.599663 +step:6133 train loss:3.622182 +step:6134 train loss:3.619867 +step:6135 train loss:3.644035 +step:6136 train loss:3.642533 +step:6137 train loss:3.669264 +step:6138 train loss:3.574571 +step:6139 train loss:3.554780 +step:6140 train loss:3.556849 +step:6141 train loss:3.608192 +step:6142 train loss:3.570487 +step:6143 train loss:3.679857 +step:6144 train loss:3.682115 +step:6145 train loss:3.538499 +step:6146 train loss:3.521582 +step:6147 train loss:3.577505 +step:6148 train loss:3.512257 +step:6149 train loss:3.589809 +step:6150 train loss:3.582911 +step:6151 train loss:3.568197 +step:6152 train loss:3.540233 +step:6153 train loss:3.554245 +step:6154 train loss:3.601419 +step:6155 train loss:3.618001 +step:6156 train loss:3.568829 +step:6157 train loss:3.600153 +step:6158 train loss:3.615052 +step:6159 train loss:3.564007 +step:6160 train loss:3.573931 +step:6161 train loss:3.581201 +step:6162 train loss:3.555426 +step:6163 train loss:3.565652 +step:6164 train loss:3.645424 +step:6165 train loss:3.630449 +step:6166 train loss:3.598718 +step:6167 train loss:3.529099 +step:6168 train loss:3.569593 +step:6169 train loss:3.575880 +step:6170 train loss:3.583853 +step:6171 train loss:3.537240 +step:6172 train loss:3.581414 +step:6173 train loss:3.550630 +step:6174 train loss:3.552373 +step:6175 train loss:3.572992 +step:6176 train loss:3.585387 +step:6177 train loss:3.545707 +step:6178 train loss:3.547379 +step:6179 train loss:3.556148 +step:6180 train loss:3.577281 +step:6181 train loss:3.540519 +step:6182 train loss:3.563584 +step:6183 train loss:3.522620 +step:6184 train loss:3.516731 +step:6185 train loss:3.565436 +step:6186 train loss:3.533746 +step:6187 train loss:3.555140 +step:6188 train loss:3.559558 +step:6189 train loss:3.528758 +step:6190 train loss:3.562290 +step:6191 train loss:3.535972 +step:6192 train loss:3.581938 +step:6193 train loss:3.588669 +step:6194 train loss:3.521831 +step:6195 train loss:3.472608 +step:6196 train loss:3.567264 +step:6197 train loss:3.535603 +step:6198 train loss:3.582633 +step:6199 train loss:3.570073 +step:6200 train loss:3.553021 +step:6201 train loss:3.523996 +step:6202 train loss:3.564732 +step:6203 train loss:3.585005 +step:6204 train loss:3.516955 +step:6205 train loss:3.515654 +step:6206 train loss:3.558022 +step:6207 train loss:3.560861 +step:6208 train loss:3.565597 +step:6209 train loss:3.510592 +step:6210 train loss:3.551093 +step:6211 train loss:3.502603 +step:6212 train loss:3.546803 +step:6213 train loss:3.527948 +step:6214 train loss:3.708030 +step:6215 train loss:3.537876 +step:6216 train loss:3.547582 +step:6217 train loss:3.531531 +step:6218 train loss:3.503582 +step:6219 train loss:3.486575 +step:6220 train loss:3.534394 +step:6221 train loss:3.569038 +step:6222 train loss:3.550379 +step:6223 train loss:3.541121 +step:6224 train loss:3.529353 +step:6225 train loss:3.552122 +step:6226 train loss:3.547127 +step:6227 train loss:3.509908 +step:6228 train loss:3.525557 +step:6229 train loss:3.538934 +step:6230 train loss:3.592907 +step:6231 train loss:3.540370 +step:6232 train loss:3.549416 +step:6233 train loss:3.595488 +step:6234 train loss:3.570079 +step:6235 train loss:3.523276 +step:6236 train loss:3.514448 +step:6237 train loss:3.532444 +step:6238 train loss:3.556196 +step:6239 train loss:3.572615 +step:6240 train loss:3.545098 +step:6241 train loss:3.509012 +step:6242 train loss:3.582296 +step:6243 train loss:3.528738 +step:6244 train loss:3.571808 +step:6245 train loss:3.477285 +step:6246 train loss:3.487746 +step:6247 train loss:3.574398 +step:6248 train loss:3.539684 +step:6249 train loss:3.530899 +step:6250 validation loss:3.493958 +step:6250 train loss:3.564948 +step:6251 train loss:3.611666 +step:6252 train loss:3.720679 +step:6253 train loss:3.517549 +step:6254 train loss:3.504532 +step:6255 train loss:3.573381 +step:6256 train loss:3.574557 +step:6257 train loss:3.561750 +step:6258 train loss:3.549526 +step:6259 train loss:3.545053 +step:6260 train loss:3.595982 +step:6261 train loss:3.537618 +step:6262 train loss:3.612628 +step:6263 train loss:3.505574 +step:6264 train loss:3.675745 +step:6265 train loss:3.664463 +step:6266 train loss:3.558992 +step:6267 train loss:3.510538 +step:6268 train loss:3.573758 +step:6269 train loss:3.569716 +step:6270 train loss:3.584959 +step:6271 train loss:3.556547 +step:6272 train loss:3.524834 +step:6273 train loss:3.466510 +step:6274 train loss:3.528469 +step:6275 train loss:3.483912 +step:6276 train loss:3.521188 +step:6277 train loss:3.482311 +step:6278 train loss:3.537474 +step:6279 train loss:3.522617 +step:6280 train loss:3.526217 +step:6281 train loss:3.525353 +step:6282 train loss:3.673832 +step:6283 train loss:3.781605 +step:6284 train loss:3.486960 +step:6285 train loss:3.500911 +step:6286 train loss:3.534141 +step:6287 train loss:3.519439 +step:6288 train loss:3.529176 +step:6289 train loss:3.519812 +step:6290 train loss:3.561505 +step:6291 train loss:3.525731 +step:6292 train loss:3.571421 +step:6293 train loss:3.547193 +step:6294 train loss:3.540573 +step:6295 train loss:3.540990 +step:6296 train loss:3.524485 +step:6297 train loss:3.524569 +step:6298 train loss:3.478691 +step:6299 train loss:3.552808 +step:6300 train loss:3.513514 +step:6301 train loss:3.544964 +step:6302 train loss:3.558638 +step:6303 train loss:3.549481 +step:6304 train loss:3.518201 +step:6305 train loss:3.508672 +step:6306 train loss:3.570909 +step:6307 train loss:3.621665 +step:6308 train loss:3.554135 +step:6309 train loss:3.626230 +step:6310 train loss:3.601428 +step:6311 train loss:3.519169 +step:6312 train loss:3.610230 +step:6313 train loss:3.568213 +step:6314 train loss:3.505459 +step:6315 train loss:3.495188 +step:6316 train loss:3.554488 +step:6317 train loss:3.500572 +step:6318 train loss:3.511379 +step:6319 train loss:3.552915 +step:6320 train loss:3.508051 +step:6321 train loss:3.554918 +step:6322 train loss:3.591409 +step:6323 train loss:3.582785 +step:6324 train loss:3.528076 +step:6325 train loss:3.555759 +step:6326 train loss:3.558618 +step:6327 train loss:3.553248 +step:6328 train loss:3.539469 +step:6329 train loss:3.556307 +step:6330 train loss:3.586234 +step:6331 train loss:3.592960 +step:6332 train loss:3.549149 +step:6333 train loss:3.562289 +step:6334 train loss:3.523831 +step:6335 train loss:3.530053 +step:6336 train loss:3.567656 +step:6337 train loss:3.556815 +step:6338 train loss:3.559405 +step:6339 train loss:3.576334 +step:6340 train loss:3.581504 +step:6341 train loss:3.534734 +step:6342 train loss:3.622254 +step:6343 train loss:3.586425 +step:6344 train loss:3.498583 +step:6345 train loss:3.534072 +step:6346 train loss:3.518177 +step:6347 train loss:3.576397 +step:6348 train loss:3.567747 +step:6349 train loss:3.543982 +step:6350 train loss:3.607407 +step:6351 train loss:3.532468 +step:6352 train loss:3.533029 +step:6353 train loss:3.517897 +step:6354 train loss:3.562939 +step:6355 train loss:3.468935 +step:6356 train loss:3.471465 +step:6357 train loss:3.499480 +step:6358 train loss:3.561413 +step:6359 train loss:3.560457 +step:6360 train loss:3.650489 +step:6361 train loss:3.520086 +step:6362 train loss:3.600054 +step:6363 train loss:3.551220 +step:6364 train loss:3.522993 +step:6365 train loss:3.589774 +step:6366 train loss:3.546746 +step:6367 train loss:3.521789 +step:6368 train loss:3.546209 +step:6369 train loss:3.532439 +step:6370 train loss:3.526694 +step:6371 train loss:3.518584 +step:6372 train loss:3.510372 +step:6373 train loss:3.638456 +step:6374 train loss:3.570828 +step:6375 train loss:3.528941 +step:6376 train loss:3.589690 +step:6377 train loss:3.562257 +step:6378 train loss:3.561030 +step:6379 train loss:3.550477 +step:6380 train loss:3.542790 +step:6381 train loss:3.556023 +step:6382 train loss:3.553869 +step:6383 train loss:3.491175 +step:6384 train loss:3.476971 +step:6385 train loss:3.499666 +step:6386 train loss:3.588324 +step:6387 train loss:3.551017 +step:6388 train loss:3.574569 +step:6389 train loss:3.549650 +step:6390 train loss:3.514291 +step:6391 train loss:3.510496 +step:6392 train loss:3.568230 +step:6393 train loss:3.604227 +step:6394 train loss:3.793873 +step:6395 train loss:3.558627 +step:6396 train loss:3.562293 +step:6397 train loss:3.595269 +step:6398 train loss:3.531869 +step:6399 train loss:3.572833 +step:6400 train loss:3.520408 +step:6401 train loss:3.524315 +step:6402 train loss:3.607709 +step:6403 train loss:3.545779 +step:6404 train loss:3.541041 +step:6405 train loss:3.561421 +step:6406 train loss:3.630013 +step:6407 train loss:3.602768 +step:6408 train loss:3.519094 +step:6409 train loss:3.555853 +step:6410 train loss:3.565093 +step:6411 train loss:3.580595 +step:6412 train loss:3.548140 +step:6413 train loss:3.565711 +step:6414 train loss:3.541820 +step:6415 train loss:3.533480 +step:6416 train loss:3.590406 +step:6417 train loss:3.595262 +step:6418 train loss:3.589109 +step:6419 train loss:3.528522 +step:6420 train loss:3.525259 +step:6421 train loss:3.531850 +step:6422 train loss:3.556763 +step:6423 train loss:3.621681 +step:6424 train loss:3.572134 +step:6425 train loss:3.576824 +step:6426 train loss:3.557328 +step:6427 train loss:3.566218 +step:6428 train loss:3.576327 +step:6429 train loss:3.598956 +step:6430 train loss:3.559061 +step:6431 train loss:3.553378 +step:6432 train loss:3.526803 +step:6433 train loss:3.534962 +step:6434 train loss:3.524908 +step:6435 train loss:3.536266 +step:6436 train loss:3.541598 +step:6437 train loss:3.507030 +step:6438 train loss:3.528636 +step:6439 train loss:3.525046 +step:6440 train loss:3.542552 +step:6441 train loss:3.509146 +step:6442 train loss:3.562171 +step:6443 train loss:3.550618 +step:6444 train loss:3.521431 +step:6445 train loss:3.562354 +step:6446 train loss:3.745732 +step:6447 train loss:3.544993 +step:6448 train loss:3.528333 +step:6449 train loss:3.599244 +step:6450 train loss:3.555014 +step:6451 train loss:3.522789 +step:6452 train loss:3.521920 +step:6453 train loss:3.581845 +step:6454 train loss:3.584928 +step:6455 train loss:3.519272 +step:6456 train loss:3.565003 +step:6457 train loss:3.534303 +step:6458 train loss:3.520293 +step:6459 train loss:3.517639 +step:6460 train loss:3.556118 +step:6461 train loss:3.581601 +step:6462 train loss:3.527227 +step:6463 train loss:3.537852 +step:6464 train loss:3.580569 +step:6465 train loss:3.561125 +step:6466 train loss:3.537021 +step:6467 train loss:3.610813 +step:6468 train loss:3.553938 +step:6469 train loss:3.523368 +step:6470 train loss:3.546044 +step:6471 train loss:3.595085 +step:6472 train loss:3.553571 +step:6473 train loss:3.547144 +step:6474 train loss:3.488024 +step:6475 train loss:3.541999 +step:6476 train loss:3.558002 +step:6477 train loss:3.562558 +step:6478 train loss:3.513818 +step:6479 train loss:3.533679 +step:6480 train loss:3.567120 +step:6481 train loss:3.523113 +step:6482 train loss:3.527542 +step:6483 train loss:3.486851 +step:6484 train loss:3.501259 +step:6485 train loss:3.588251 +step:6486 train loss:3.509201 +step:6487 train loss:3.505979 +step:6488 train loss:3.604528 +step:6489 train loss:3.466080 +step:6490 train loss:3.517053 +step:6491 train loss:3.518875 +step:6492 train loss:3.581019 +step:6493 train loss:3.612685 +step:6494 train loss:3.538496 +step:6495 train loss:3.507011 +step:6496 train loss:3.548573 +step:6497 train loss:3.534502 +step:6498 train loss:3.559873 +step:6499 train loss:3.577017 +step:6500 validation loss:3.472382 total_sharp:3.4089e-03 L1_sharp:-1.0306e-02 L2_sharp:3.2723e-03 L3_sharp:3.5077e-03 L4_sharp:2.3587e-03 L5_sharp:1.3930e-03 L6_sharp:1.4619e-03 L7_sharp:1.1301e-03 L8_sharp:1.1004e-03 L9_sharp:9.1061e-04 L10_sharp:4.7028e-04 L11_sharp:5.4211e-04 L12_sharp:6.8673e-04 total_fnorm:2.2745e+00 total_l1_linf:1.9245e+04 total_spectral:2.2745e+00 L1_fnorm:5.0394e-01 L2_fnorm:3.5070e-01 L3_fnorm:3.7823e-01 L4_fnorm:4.3153e-01 L5_fnorm:5.2881e-01 L6_fnorm:5.3767e-01 L7_fnorm:5.9760e-01 L8_fnorm:6.0069e-01 L9_fnorm:5.9584e-01 L10_fnorm:5.9596e-01 L11_fnorm:5.7709e-01 L12_fnorm:5.9861e-01 L1_l1linf:3.8241e-01 L2_l1linf:4.6353e-01 L3_l1linf:3.6498e-01 L4_l1linf:5.7018e-01 L5_l1linf:3.9780e-01 L6_l1linf:4.0478e-01 L7_l1linf:4.0377e-01 L8_l1linf:4.0514e-01 L9_l1linf:4.0648e-01 L10_l1linf:4.1160e-01 L11_l1linf:4.1261e-01 L12_l1linf:4.1586e-01 L1_spectral:1.2035e-02 L2_spectral:1.8096e-02 L3_spectral:1.3372e-02 L4_spectral:2.1427e-02 L5_spectral:1.2045e-02 L6_spectral:1.2552e-02 L7_spectral:1.2048e-02 L8_spectral:1.2046e-02 L9_spectral:1.2059e-02 L10_spectral:1.2051e-02 L11_spectral:1.2051e-02 L12_spectral:1.2044e-02 v_norm:2.2745e+00 cos_v_-g_hvp:2.1187e-02 g_hvp_norm:8.0434e-01 cos_v_-g_t:2.3056e-02 g_t_norm:8.5159e-01 hv_norm:4.7668e+00 cos_v_hv:1.6265e-03 hg_norm:1.5577e+04 cos_g_hg:-1.7237e-01 v_par:5.6389e-03 v_perp:2.2744e+00 L1_cos_v_neg_g:1.0045e-02 L1_v_norm:5.0394e-01 L2_cos_v_neg_g:1.4836e-02 L2_v_norm:3.5070e-01 L3_cos_v_neg_g:2.5310e-02 L3_v_norm:3.7823e-01 L4_cos_v_neg_g:2.4347e-02 L4_v_norm:4.3153e-01 L5_cos_v_neg_g:2.4673e-02 L5_v_norm:5.2881e-01 L6_cos_v_neg_g:2.6210e-02 L6_v_norm:5.3767e-01 L7_cos_v_neg_g:2.5656e-02 L7_v_norm:5.9760e-01 L8_cos_v_neg_g:2.5452e-02 L8_v_norm:6.0069e-01 L9_cos_v_neg_g:2.6680e-02 L9_v_norm:5.9584e-01 L10_cos_v_neg_g:2.8647e-02 L10_v_norm:5.9596e-01 L11_cos_v_neg_g:3.8659e-02 L11_v_norm:5.7709e-01 L12_cos_v_neg_g:7.2294e-02 L12_v_norm:5.9861e-01 +step:6500 train loss:3.601861 +step:6501 train loss:3.520929 +step:6502 train loss:3.539988 +step:6503 train loss:3.534305 +step:6504 train loss:3.642577 +step:6505 train loss:3.529051 +step:6506 train loss:3.525986 +step:6507 train loss:3.573396 +step:6508 train loss:3.523413 +step:6509 train loss:3.585257 +step:6510 train loss:3.557193 +step:6511 train loss:3.544049 +step:6512 train loss:3.554403 +step:6513 train loss:3.537811 +step:6514 train loss:3.576138 +step:6515 train loss:3.594501 +step:6516 train loss:3.544518 +step:6517 train loss:3.512858 +step:6518 train loss:3.564107 +step:6519 train loss:3.495210 +step:6520 train loss:3.505867 +step:6521 train loss:3.555716 +step:6522 train loss:3.574475 +step:6523 train loss:3.531722 +step:6524 train loss:3.530952 +step:6525 train loss:3.503619 +step:6526 train loss:3.660140 +step:6527 train loss:3.590824 +step:6528 train loss:3.532053 +step:6529 train loss:3.501049 +step:6530 train loss:3.550357 +step:6531 train loss:3.557251 +step:6532 train loss:3.495402 +step:6533 train loss:3.545986 +step:6534 train loss:3.495768 +step:6535 train loss:3.513412 +step:6536 train loss:3.519626 +step:6537 train loss:3.538203 +step:6538 train loss:3.523814 +step:6539 train loss:3.544914 +step:6540 train loss:3.528715 +step:6541 train loss:3.590378 +step:6542 train loss:3.567986 +step:6543 train loss:3.554666 +step:6544 train loss:3.487891 +step:6545 train loss:3.466451 +step:6546 train loss:3.502417 +step:6547 train loss:3.544491 +step:6548 train loss:3.577732 +step:6549 train loss:3.520246 +step:6550 train loss:3.534890 +step:6551 train loss:3.493014 +step:6552 train loss:3.486189 +step:6553 train loss:3.542810 +step:6554 train loss:3.608752 +step:6555 train loss:3.586494 +step:6556 train loss:3.566074 +step:6557 train loss:3.587297 +step:6558 train loss:3.619850 +step:6559 train loss:3.552773 +step:6560 train loss:3.549797 +step:6561 train loss:3.510302 +step:6562 train loss:3.532468 +step:6563 train loss:3.598075 +step:6564 train loss:3.553124 +step:6565 train loss:3.585471 +step:6566 train loss:3.607390 +step:6567 train loss:3.610987 +step:6568 train loss:3.568647 +step:6569 train loss:3.564597 +step:6570 train loss:3.537216 +step:6571 train loss:3.548954 +step:6572 train loss:3.554898 +step:6573 train loss:3.548706 +step:6574 train loss:3.509687 +step:6575 train loss:3.525506 +step:6576 train loss:3.525842 +step:6577 train loss:3.583094 +step:6578 train loss:3.534465 +step:6579 train loss:3.591100 +step:6580 train loss:3.539376 +step:6581 train loss:3.499637 +step:6582 train loss:3.506183 +step:6583 train loss:3.532727 +step:6584 train loss:3.566759 +step:6585 train loss:3.608587 +step:6586 train loss:3.584852 +step:6587 train loss:3.561421 +step:6588 train loss:3.521250 +step:6589 train loss:3.498610 +step:6590 train loss:3.511659 +step:6591 train loss:3.482826 +step:6592 train loss:3.506870 +step:6593 train loss:3.483486 +step:6594 train loss:3.504349 +step:6595 train loss:3.530883 +step:6596 train loss:3.480452 +step:6597 train loss:3.518712 +step:6598 train loss:3.522197 +step:6599 train loss:3.470227 +step:6600 train loss:3.419023 +step:6601 train loss:3.472897 +step:6602 train loss:3.485273 +step:6603 train loss:3.460940 +step:6604 train loss:3.462474 +step:6605 train loss:3.477018 +step:6606 train loss:3.513330 +step:6607 train loss:3.460736 +step:6608 train loss:3.490467 +step:6609 train loss:3.491791 +step:6610 train loss:3.452624 +step:6611 train loss:3.511198 +step:6612 train loss:3.475818 +step:6613 train loss:3.496881 +step:6614 train loss:3.549954 +step:6615 train loss:3.535508 +step:6616 train loss:3.559095 +step:6617 train loss:3.445792 +step:6618 train loss:3.483981 +step:6619 train loss:3.547106 +step:6620 train loss:3.511629 +step:6621 train loss:3.510988 +step:6622 train loss:3.519766 +step:6623 train loss:3.494833 +step:6624 train loss:3.521417 +step:6625 train loss:3.526269 +step:6626 train loss:3.532123 +step:6627 train loss:3.526396 +step:6628 train loss:3.566136 +step:6629 train loss:3.563074 +step:6630 train loss:3.486134 +step:6631 train loss:3.417831 +step:6632 train loss:3.587014 +step:6633 train loss:3.479534 +step:6634 train loss:3.522710 +step:6635 train loss:3.482708 +step:6636 train loss:3.482955 +step:6637 train loss:3.534236 +step:6638 train loss:3.552351 +step:6639 train loss:3.521459 +step:6640 train loss:3.522882 +step:6641 train loss:3.467519 +step:6642 train loss:3.503665 +step:6643 train loss:3.483253 +step:6644 train loss:3.522284 +step:6645 train loss:3.574415 +step:6646 train loss:3.439443 +step:6647 train loss:3.519641 +step:6648 train loss:3.496979 +step:6649 train loss:3.503915 +step:6650 train loss:3.534079 +step:6651 train loss:3.568039 +step:6652 train loss:3.523590 +step:6653 train loss:3.519101 +step:6654 train loss:3.464334 +step:6655 train loss:3.524668 +step:6656 train loss:3.480656 +step:6657 train loss:3.533684 +step:6658 train loss:3.483422 +step:6659 train loss:3.470881 +step:6660 train loss:3.508752 +step:6661 train loss:3.470064 +step:6662 train loss:3.517810 +step:6663 train loss:3.503802 +step:6664 train loss:3.495529 +step:6665 train loss:3.504808 +step:6666 train loss:3.472520 +step:6667 train loss:3.506488 +step:6668 train loss:3.492901 +step:6669 train loss:3.513303 +step:6670 train loss:3.540070 +step:6671 train loss:3.486803 +step:6672 train loss:3.510363 +step:6673 train loss:3.465696 +step:6674 train loss:3.478372 +step:6675 train loss:3.522034 +step:6676 train loss:3.470357 +step:6677 train loss:3.451141 +step:6678 train loss:3.443813 +step:6679 train loss:3.527824 +step:6680 train loss:3.498248 +step:6681 train loss:3.486889 +step:6682 train loss:3.492569 +step:6683 train loss:3.432132 +step:6684 train loss:3.481417 +step:6685 train loss:3.558161 +step:6686 train loss:3.445730 +step:6687 train loss:3.512499 +step:6688 train loss:3.504485 +step:6689 train loss:3.452706 +step:6690 train loss:3.535368 +step:6691 train loss:3.485260 +step:6692 train loss:3.499364 +step:6693 train loss:3.506951 +step:6694 train loss:3.543453 +step:6695 train loss:3.473115 +step:6696 train loss:3.480606 +step:6697 train loss:3.514052 +step:6698 train loss:3.492001 +step:6699 train loss:3.485279 +step:6700 train loss:3.432675 +step:6701 train loss:3.436466 +step:6702 train loss:3.485730 +step:6703 train loss:3.503399 +step:6704 train loss:3.519631 +step:6705 train loss:3.476812 +step:6706 train loss:3.533461 +step:6707 train loss:3.530345 +step:6708 train loss:3.495784 +step:6709 train loss:3.547258 +step:6710 train loss:3.465080 +step:6711 train loss:3.465288 +step:6712 train loss:3.482367 +step:6713 train loss:3.488872 +step:6714 train loss:3.488736 +step:6715 train loss:3.512460 +step:6716 train loss:3.488903 +step:6717 train loss:3.570383 +step:6718 train loss:3.492837 +step:6719 train loss:3.491673 +step:6720 train loss:3.567830 +step:6721 train loss:3.520107 +step:6722 train loss:3.462689 +step:6723 train loss:3.451965 +step:6724 train loss:3.479200 +step:6725 train loss:3.472778 +step:6726 train loss:3.460444 +step:6727 train loss:3.520611 +step:6728 train loss:3.446272 +step:6729 train loss:3.529319 +step:6730 train loss:3.509297 +step:6731 train loss:3.484421 +step:6732 train loss:3.613619 +step:6733 train loss:3.550528 +step:6734 train loss:3.509077 +step:6735 train loss:3.580810 +step:6736 train loss:3.479802 +step:6737 train loss:3.563906 +step:6738 train loss:3.500768 +step:6739 train loss:3.521017 +step:6740 train loss:3.467057 +step:6741 train loss:3.513113 +step:6742 train loss:3.518666 +step:6743 train loss:3.439896 +step:6744 train loss:3.552911 +step:6745 train loss:3.558078 +step:6746 train loss:3.509021 +step:6747 train loss:3.579614 +step:6748 train loss:3.680508 +step:6749 train loss:3.632272 +step:6750 validation loss:3.472834 +step:6750 train loss:3.527205 +step:6751 train loss:3.472055 +step:6752 train loss:3.506751 +step:6753 train loss:3.482317 +step:6754 train loss:3.544486 +step:6755 train loss:3.551726 +step:6756 train loss:3.495748 +step:6757 train loss:3.576437 +step:6758 train loss:3.461014 +step:6759 train loss:3.487881 +step:6760 train loss:3.459207 +step:6761 train loss:3.485013 +step:6762 train loss:3.549384 +step:6763 train loss:3.479418 +step:6764 train loss:3.471761 +step:6765 train loss:3.542523 +step:6766 train loss:3.526906 +step:6767 train loss:3.485176 +step:6768 train loss:3.490152 +step:6769 train loss:3.453650 +step:6770 train loss:3.517840 +step:6771 train loss:3.433022 +step:6772 train loss:3.539346 +step:6773 train loss:3.476719 +step:6774 train loss:3.461243 +step:6775 train loss:3.492508 +step:6776 train loss:3.440476 +step:6777 train loss:3.552893 +step:6778 train loss:3.441520 +step:6779 train loss:3.476502 +step:6780 train loss:3.479880 +step:6781 train loss:3.473341 +step:6782 train loss:3.463744 +step:6783 train loss:3.477984 +step:6784 train loss:3.523155 +step:6785 train loss:3.487662 +step:6786 train loss:3.492006 +step:6787 train loss:3.505876 +step:6788 train loss:3.503479 +step:6789 train loss:3.508190 +step:6790 train loss:3.506780 +step:6791 train loss:3.558897 +step:6792 train loss:3.510044 +step:6793 train loss:3.550274 +step:6794 train loss:3.514055 +step:6795 train loss:3.441939 +step:6796 train loss:3.524631 +step:6797 train loss:3.504555 +step:6798 train loss:3.514873 +step:6799 train loss:3.524340 +step:6800 train loss:3.528659 +step:6801 train loss:3.485242 +step:6802 train loss:3.522526 +step:6803 train loss:3.564672 +step:6804 train loss:3.506594 +step:6805 train loss:3.498405 +step:6806 train loss:3.534087 +step:6807 train loss:3.542679 +step:6808 train loss:3.488137 +step:6809 train loss:3.465301 +step:6810 train loss:3.500111 +step:6811 train loss:3.489321 +step:6812 train loss:3.434591 +step:6813 train loss:3.470667 +step:6814 train loss:3.435580 +step:6815 train loss:3.452975 +step:6816 train loss:3.514055 +step:6817 train loss:3.495746 +step:6818 train loss:3.499753 +step:6819 train loss:3.556076 +step:6820 train loss:3.535589 +step:6821 train loss:3.634753 +step:6822 train loss:3.477483 +step:6823 train loss:3.519145 +step:6824 train loss:3.497501 +step:6825 train loss:3.538381 +step:6826 train loss:3.443779 +step:6827 train loss:3.555240 +step:6828 train loss:3.482379 +step:6829 train loss:3.459470 +step:6830 train loss:3.468434 +step:6831 train loss:3.428979 +step:6832 train loss:3.490019 +step:6833 train loss:3.436027 +step:6834 train loss:3.508766 +step:6835 train loss:3.418696 +step:6836 train loss:3.500540 +step:6837 train loss:3.486852 +step:6838 train loss:3.509894 +step:6839 train loss:3.509448 +step:6840 train loss:3.518622 +step:6841 train loss:3.508758 +step:6842 train loss:3.444401 +step:6843 train loss:3.480454 +step:6844 train loss:3.487367 +step:6845 train loss:3.540921 +step:6846 train loss:3.487033 +step:6847 train loss:3.466192 +step:6848 train loss:3.583926 +step:6849 train loss:3.479894 +step:6850 train loss:3.541787 +step:6851 train loss:3.503297 +step:6852 train loss:3.494076 +step:6853 train loss:3.491372 +step:6854 train loss:3.499046 +step:6855 train loss:3.486099 +step:6856 train loss:3.506126 +step:6857 train loss:3.521180 +step:6858 train loss:3.445634 +step:6859 train loss:3.418037 +step:6860 train loss:3.475180 +step:6861 train loss:3.519475 +step:6862 train loss:3.470519 +step:6863 train loss:3.484181 +step:6864 train loss:3.533437 +step:6865 train loss:3.528127 +step:6866 train loss:3.512980 +step:6867 train loss:3.474933 +step:6868 train loss:3.536854 +step:6869 train loss:3.510923 +step:6870 train loss:3.545424 +step:6871 train loss:3.466065 +step:6872 train loss:3.474463 +step:6873 train loss:3.515889 +step:6874 train loss:3.460456 +step:6875 train loss:3.499285 +step:6876 train loss:3.472973 +step:6877 train loss:3.505457 +step:6878 train loss:3.474374 +step:6879 train loss:3.477690 +step:6880 train loss:3.451182 +step:6881 train loss:3.447992 +step:6882 train loss:3.446414 +step:6883 train loss:3.478687 +step:6884 train loss:3.479185 +step:6885 train loss:3.484046 +step:6886 train loss:3.406499 +step:6887 train loss:3.467695 +step:6888 train loss:3.529202 +step:6889 train loss:3.487090 +step:6890 train loss:3.506809 +step:6891 train loss:3.509126 +step:6892 train loss:3.523372 +step:6893 train loss:3.483825 +step:6894 train loss:3.492671 +step:6895 train loss:3.494608 +step:6896 train loss:3.477077 +step:6897 train loss:3.483235 +step:6898 train loss:3.530187 +step:6899 train loss:3.479642 +step:6900 train loss:3.484348 +step:6901 train loss:3.458835 +step:6902 train loss:3.516505 +step:6903 train loss:3.534477 +step:6904 train loss:3.531269 +step:6905 train loss:3.540545 +step:6906 train loss:3.596350 +step:6907 train loss:3.519370 +step:6908 train loss:3.526213 +step:6909 train loss:3.483788 +step:6910 train loss:3.454624 +step:6911 train loss:3.515767 +step:6912 train loss:3.447658 +step:6913 train loss:3.519378 +step:6914 train loss:3.441046 +step:6915 train loss:3.492707 +step:6916 train loss:3.495126 +step:6917 train loss:3.492032 +step:6918 train loss:3.495426 +step:6919 train loss:3.454646 +step:6920 train loss:3.525734 +step:6921 train loss:3.461359 +step:6922 train loss:3.472150 +step:6923 train loss:3.480491 +step:6924 train loss:3.495965 +step:6925 train loss:3.445168 +step:6926 train loss:3.521387 +step:6927 train loss:3.411377 +step:6928 train loss:3.500647 +step:6929 train loss:3.474524 +step:6930 train loss:3.494175 +step:6931 train loss:3.545707 +step:6932 train loss:3.444906 +step:6933 train loss:3.470231 +step:6934 train loss:3.557709 +step:6935 train loss:3.582237 +step:6936 train loss:3.470425 +step:6937 train loss:3.500863 +step:6938 train loss:3.488053 +step:6939 train loss:3.519916 +step:6940 train loss:3.553184 +step:6941 train loss:3.471432 +step:6942 train loss:3.464320 +step:6943 train loss:3.439291 +step:6944 train loss:3.470280 +step:6945 train loss:3.523112 +step:6946 train loss:3.538091 +step:6947 train loss:3.480316 +step:6948 train loss:3.507372 +step:6949 train loss:3.410732 +step:6950 train loss:3.533051 +step:6951 train loss:3.508359 +step:6952 train loss:3.525059 +step:6953 train loss:3.442921 +step:6954 train loss:3.488412 +step:6955 train loss:3.492505 +step:6956 train loss:3.501693 +step:6957 train loss:3.514286 +step:6958 train loss:3.506016 +step:6959 train loss:3.503323 +step:6960 train loss:3.511854 +step:6961 train loss:3.539529 +step:6962 train loss:3.460200 +step:6963 train loss:3.567624 +step:6964 train loss:3.465925 +step:6965 train loss:3.481224 +step:6966 train loss:3.465591 +step:6967 train loss:3.499583 +step:6968 train loss:3.509025 +step:6969 train loss:3.532449 +step:6970 train loss:3.500476 +step:6971 train loss:3.479698 +step:6972 train loss:3.482744 +step:6973 train loss:3.568043 +step:6974 train loss:3.469600 +step:6975 train loss:3.486898 +step:6976 train loss:3.502146 +step:6977 train loss:3.495921 +step:6978 train loss:3.511626 +step:6979 train loss:3.479468 +step:6980 train loss:3.514846 +step:6981 train loss:3.470082 +step:6982 train loss:3.456780 +step:6983 train loss:3.449582 +step:6984 train loss:3.530845 +step:6985 train loss:3.472456 +step:6986 train loss:3.482992 +step:6987 train loss:3.494099 +step:6988 train loss:3.551314 +step:6989 train loss:3.460308 +step:6990 train loss:3.434125 +step:6991 train loss:3.460217 +step:6992 train loss:3.462099 +step:6993 train loss:3.473738 +step:6994 train loss:3.506629 +step:6995 train loss:3.525603 +step:6996 train loss:3.447992 +step:6997 train loss:3.545102 +step:6998 train loss:3.490174 +step:6999 train loss:3.535319 +step:7000 validation loss:3.467084 total_sharp:1.9959e-03 L1_sharp:-4.8529e-02 L2_sharp:2.1776e-02 L3_sharp:7.4691e-03 L4_sharp:2.5707e-03 L5_sharp:1.5951e-03 L6_sharp:1.6052e-03 L7_sharp:1.1660e-03 L8_sharp:9.3063e-04 L9_sharp:8.2441e-04 L10_sharp:4.7694e-04 L11_sharp:5.1652e-04 L12_sharp:1.2782e-03 total_fnorm:2.2364e+00 total_l1_linf:1.8828e+04 total_spectral:2.2364e+00 L1_fnorm:4.1314e-01 L2_fnorm:3.2106e-01 L3_fnorm:3.4367e-01 L4_fnorm:4.1489e-01 L5_fnorm:5.2368e-01 L6_fnorm:5.2382e-01 L7_fnorm:5.9395e-01 L8_fnorm:5.9760e-01 L9_fnorm:5.9694e-01 L10_fnorm:5.9527e-01 L11_fnorm:5.7507e-01 L12_fnorm:5.9813e-01 L1_l1linf:3.5549e-01 L2_l1linf:4.3791e-01 L3_l1linf:4.0846e-01 L4_l1linf:5.7041e-01 L5_l1linf:4.0719e-01 L6_l1linf:3.9716e-01 L7_l1linf:4.0004e-01 L8_l1linf:4.0405e-01 L9_l1linf:4.0287e-01 L10_l1linf:4.0906e-01 L11_l1linf:4.3702e-01 L12_l1linf:4.3148e-01 L1_spectral:1.2032e-02 L2_spectral:1.5948e-02 L3_spectral:1.4071e-02 L4_spectral:2.1507e-02 L5_spectral:1.3122e-02 L6_spectral:1.2048e-02 L7_spectral:1.2046e-02 L8_spectral:1.2048e-02 L9_spectral:1.2048e-02 L10_spectral:1.2048e-02 L11_spectral:1.2045e-02 L12_spectral:1.2046e-02 v_norm:2.2364e+00 cos_v_-g_hvp:2.1001e-02 g_hvp_norm:7.8329e-01 cos_v_-g_t:2.4999e-02 g_t_norm:1.0026e+00 hv_norm:2.0563e+01 cos_v_hv:2.1708e-04 hg_norm:8.4326e+03 cos_g_hg:6.2067e-02 v_par:4.5413e-03 v_perp:2.2364e+00 L1_cos_v_neg_g:1.1839e-02 L1_v_norm:4.1314e-01 L2_cos_v_neg_g:1.0448e-02 L2_v_norm:3.2106e-01 L3_cos_v_neg_g:1.9853e-02 L3_v_norm:3.4367e-01 L4_cos_v_neg_g:1.9392e-02 L4_v_norm:4.1489e-01 L5_cos_v_neg_g:2.3427e-02 L5_v_norm:5.2368e-01 L6_cos_v_neg_g:2.6436e-02 L6_v_norm:5.2382e-01 L7_cos_v_neg_g:2.5428e-02 L7_v_norm:5.9395e-01 L8_cos_v_neg_g:2.5842e-02 L8_v_norm:5.9760e-01 L9_cos_v_neg_g:2.7259e-02 L9_v_norm:5.9694e-01 L10_cos_v_neg_g:2.9300e-02 L10_v_norm:5.9527e-01 L11_cos_v_neg_g:3.8004e-02 L11_v_norm:5.7507e-01 L12_cos_v_neg_g:7.2236e-02 L12_v_norm:5.9813e-01 +step:7000 train loss:3.463667 +step:7001 train loss:3.516311 +step:7002 train loss:3.452229 +step:7003 train loss:3.514248 +step:7004 train loss:3.447056 +step:7005 train loss:3.520597 +step:7006 train loss:3.471137 +step:7007 train loss:3.503288 +step:7008 train loss:3.460661 +step:7009 train loss:3.535776 +step:7010 train loss:3.495611 +step:7011 train loss:3.503759 +step:7012 train loss:3.481867 +step:7013 train loss:3.492610 +step:7014 train loss:3.505078 +step:7015 train loss:3.492244 +step:7016 train loss:3.535655 +step:7017 train loss:3.430157 +step:7018 train loss:3.554968 +step:7019 train loss:3.507568 +step:7020 train loss:3.557757 +step:7021 train loss:3.483528 +step:7022 train loss:3.489563 +step:7023 train loss:3.504326 +step:7024 train loss:3.521155 +step:7025 train loss:3.506660 +step:7026 train loss:3.534336 +step:7027 train loss:3.513597 +step:7028 train loss:3.544349 +step:7029 train loss:3.569077 +step:7030 train loss:3.607743 +step:7031 train loss:3.479283 +step:7032 train loss:3.486134 +step:7033 train loss:3.521790 +step:7034 train loss:3.498103 +step:7035 train loss:3.494644 +step:7036 train loss:3.477326 +step:7037 train loss:3.521816 +step:7038 train loss:3.486762 +step:7039 train loss:3.562767 +step:7040 train loss:3.471370 +step:7041 train loss:3.489520 +step:7042 train loss:3.465496 +step:7043 train loss:3.444821 +step:7044 train loss:3.504519 +step:7045 train loss:3.510842 +step:7046 train loss:3.506708 +step:7047 train loss:3.460818 +step:7048 train loss:3.537534 +step:7049 train loss:3.518837 +step:7050 train loss:3.531662 +step:7051 train loss:3.533294 +step:7052 train loss:3.476687 +step:7053 train loss:3.470195 +step:7054 train loss:3.621100 +step:7055 train loss:3.504267 +step:7056 train loss:3.513743 +step:7057 train loss:3.422476 +step:7058 train loss:3.554018 +step:7059 train loss:3.492999 +step:7060 train loss:3.499926 +step:7061 train loss:3.480490 +step:7062 train loss:3.596973 +step:7063 train loss:3.483509 +step:7064 train loss:3.559296 +step:7065 train loss:3.483984 +step:7066 train loss:3.477137 +step:7067 train loss:3.481984 +step:7068 train loss:3.513328 +step:7069 train loss:3.518590 +step:7070 train loss:3.514250 +step:7071 train loss:3.570845 +step:7072 train loss:3.504814 +step:7073 train loss:3.506093 +step:7074 train loss:3.476766 +step:7075 train loss:3.570960 +step:7076 train loss:3.442879 +step:7077 train loss:3.540334 +step:7078 train loss:3.458085 +step:7079 train loss:3.513627 +step:7080 train loss:3.511546 +step:7081 train loss:3.487561 +step:7082 train loss:3.467942 +step:7083 train loss:3.502134 +step:7084 train loss:3.501416 +step:7085 train loss:3.471554 +step:7086 train loss:3.462987 +step:7087 train loss:3.495341 +step:7088 train loss:3.482593 +step:7089 train loss:3.510162 +step:7090 train loss:3.490876 +step:7091 train loss:3.535448 +step:7092 train loss:3.475174 +step:7093 train loss:3.483157 +step:7094 train loss:3.502263 +step:7095 train loss:3.546275 +step:7096 train loss:3.471644 +step:7097 train loss:3.526330 +step:7098 train loss:3.517848 +step:7099 train loss:3.530230 +step:7100 train loss:3.502483 +step:7101 train loss:3.525963 +step:7102 train loss:3.502313 +step:7103 train loss:3.516535 +step:7104 train loss:3.529535 +step:7105 train loss:3.529572 +step:7106 train loss:3.473503 +step:7107 train loss:3.464165 +step:7108 train loss:3.510728 +step:7109 train loss:3.600980 +step:7110 train loss:3.520130 +step:7111 train loss:3.520342 +step:7112 train loss:3.568921 +step:7113 train loss:3.572589 +step:7114 train loss:3.638580 +step:7115 train loss:3.545455 +step:7116 train loss:3.526188 +step:7117 train loss:3.469725 +step:7118 train loss:3.528692 +step:7119 train loss:3.482779 +step:7120 train loss:3.546826 +step:7121 train loss:3.545699 +step:7122 train loss:3.639354 +step:7123 train loss:3.536297 +step:7124 train loss:3.584070 +step:7125 train loss:3.518739 +step:7126 train loss:3.461004 +step:7127 train loss:3.446278 +step:7128 train loss:3.508866 +step:7129 train loss:3.486460 +step:7130 train loss:3.543020 +step:7131 train loss:3.529704 +step:7132 train loss:3.543219 +step:7133 train loss:3.492599 +step:7134 train loss:3.515989 +step:7135 train loss:3.526342 +step:7136 train loss:3.492687 +step:7137 train loss:3.483545 +step:7138 train loss:3.500631 +step:7139 train loss:3.491292 +step:7140 train loss:3.573447 +step:7141 train loss:3.498347 +step:7142 train loss:3.562635 +step:7143 train loss:3.464798 +step:7144 train loss:3.521013 +step:7145 train loss:3.580955 +step:7146 train loss:3.528457 +step:7147 train loss:3.551976 +step:7148 train loss:3.494206 +step:7149 train loss:3.520275 +step:7150 train loss:3.437524 +step:7151 train loss:3.484679 +step:7152 train loss:3.447767 +step:7153 train loss:3.527650 +step:7154 train loss:3.500541 +step:7155 train loss:3.502849 +step:7156 train loss:3.468173 +step:7157 train loss:3.556425 +step:7158 train loss:3.466032 +step:7159 train loss:3.497262 +step:7160 train loss:3.505857 +step:7161 train loss:3.482687 +step:7162 train loss:3.515091 +step:7163 train loss:3.487984 +step:7164 train loss:3.481079 +step:7165 train loss:3.522383 +step:7166 train loss:3.459635 +step:7167 train loss:3.549943 +step:7168 train loss:3.496798 +step:7169 train loss:3.574329 +step:7170 train loss:3.494609 +step:7171 train loss:3.534850 +step:7172 train loss:3.532725 +step:7173 train loss:3.550718 +step:7174 train loss:3.513155 +step:7175 train loss:3.559551 +step:7176 train loss:3.516618 +step:7177 train loss:3.463314 +step:7178 train loss:3.527383 +step:7179 train loss:3.567544 +step:7180 train loss:3.534864 +step:7181 train loss:3.545084 +step:7182 train loss:3.520006 +step:7183 train loss:3.558484 +step:7184 train loss:3.501714 +step:7185 train loss:3.498561 +step:7186 train loss:3.509945 +step:7187 train loss:3.604556 +step:7188 train loss:3.539508 +step:7189 train loss:3.513897 +step:7190 train loss:3.520661 +step:7191 train loss:3.493754 +step:7192 train loss:3.580301 +step:7193 train loss:3.622699 +step:7194 train loss:3.557505 +step:7195 train loss:3.593236 +step:7196 train loss:3.495069 +step:7197 train loss:3.513263 +step:7198 train loss:3.470188 +step:7199 train loss:3.489132 +step:7200 train loss:3.477294 +step:7201 train loss:3.481623 +step:7202 train loss:3.474370 +step:7203 train loss:3.533136 +step:7204 train loss:3.526808 +step:7205 train loss:3.535188 +step:7206 train loss:3.660861 +step:7207 train loss:3.466332 +step:7208 train loss:3.612849 +step:7209 train loss:3.620379 +step:7210 train loss:3.622299 +step:7211 train loss:3.654154 +step:7212 train loss:3.510331 +step:7213 train loss:3.508076 +step:7214 train loss:3.545092 +step:7215 train loss:3.511170 +step:7216 train loss:3.577274 +step:7217 train loss:3.501110 +step:7218 train loss:3.526431 +step:7219 train loss:3.530728 +step:7220 train loss:3.558704 +step:7221 train loss:3.503100 +step:7222 train loss:3.492479 +step:7223 train loss:3.545299 +step:7224 train loss:3.499766 +step:7225 train loss:3.543069 +step:7226 train loss:3.479260 +step:7227 train loss:3.471374 +step:7228 train loss:3.507490 +step:7229 train loss:3.509870 +step:7230 train loss:3.493490 +step:7231 train loss:3.508224 +step:7232 train loss:3.535878 +step:7233 train loss:3.490542 +step:7234 train loss:3.515367 +step:7235 train loss:3.553741 +step:7236 train loss:3.501245 +step:7237 train loss:3.545962 +step:7238 train loss:3.509549 +step:7239 train loss:3.561654 +step:7240 train loss:3.531916 +step:7241 train loss:3.570685 +step:7242 train loss:3.504864 +step:7243 train loss:3.496686 +step:7244 train loss:3.540223 +step:7245 train loss:3.475841 +step:7246 train loss:3.634907 +step:7247 train loss:3.469228 +step:7248 train loss:3.547253 +step:7249 train loss:3.494387 +step:7250 validation loss:3.477925 +step:7250 train loss:3.552862 +step:7251 train loss:3.551734 +step:7252 train loss:3.600927 +step:7253 train loss:3.493310 +step:7254 train loss:3.526260 +step:7255 train loss:3.558371 +step:7256 train loss:3.479400 +step:7257 train loss:3.568882 +step:7258 train loss:3.571012 +step:7259 train loss:3.523152 +step:7260 train loss:3.665815 +step:7261 train loss:3.525071 +step:7262 train loss:3.484150 +step:7263 train loss:3.527391 +step:7264 train loss:3.555445 +step:7265 train loss:3.568512 +step:7266 train loss:3.517624 +step:7267 train loss:3.532960 +step:7268 train loss:3.476804 +step:7269 train loss:3.512952 +step:7270 train loss:3.538770 +step:7271 train loss:3.492467 +step:7272 train loss:3.488389 +step:7273 train loss:3.514194 +step:7274 train loss:3.521639 +step:7275 train loss:3.519043 +step:7276 train loss:3.541345 +step:7277 train loss:3.540504 +step:7278 train loss:3.509298 +step:7279 train loss:3.548635 +step:7280 train loss:3.577873 +step:7281 train loss:3.535369 +step:7282 train loss:3.520869 +step:7283 train loss:3.486560 +step:7284 train loss:3.504909 +step:7285 train loss:3.544316 +step:7286 train loss:3.492341 +step:7287 train loss:3.519941 +step:7288 train loss:3.518574 +step:7289 train loss:3.485777 +step:7290 train loss:3.507386 +step:7291 train loss:3.571122 +step:7292 train loss:3.604006 +step:7293 train loss:3.608555 +step:7294 train loss:3.605052 +step:7295 train loss:3.489489 +step:7296 train loss:3.500755 +step:7297 train loss:3.517102 +step:7298 train loss:3.539437 +step:7299 train loss:3.547603 +step:7300 train loss:3.592701 +step:7301 train loss:3.519209 +step:7302 train loss:3.480434 +step:7303 train loss:3.483172 +step:7304 train loss:3.482985 +step:7305 train loss:3.497020 +step:7306 train loss:3.484668 +step:7307 train loss:3.485729 +step:7308 train loss:3.501951 +step:7309 train loss:3.509309 +step:7310 train loss:3.501808 +step:7311 train loss:3.482049 +step:7312 train loss:3.562394 +step:7313 train loss:3.523217 +step:7314 train loss:3.483092 +step:7315 train loss:3.533698 +step:7316 train loss:3.532474 +step:7317 train loss:3.578528 +step:7318 train loss:3.536623 +step:7319 train loss:3.482645 +step:7320 train loss:3.467280 +step:7321 train loss:3.551263 +step:7322 train loss:3.504887 +step:7323 train loss:3.486963 +step:7324 train loss:3.533364 +step:7325 train loss:3.463085 +step:7326 train loss:3.543729 +step:7327 train loss:3.523233 +step:7328 train loss:3.523716 +step:7329 train loss:3.520679 +step:7330 train loss:3.554793 +step:7331 train loss:3.553092 +step:7332 train loss:3.514150 +step:7333 train loss:3.507047 +step:7334 train loss:3.495618 +step:7335 train loss:3.523574 +step:7336 train loss:3.496163 +step:7337 train loss:3.492964 +step:7338 train loss:3.487383 +step:7339 train loss:3.507095 +step:7340 train loss:3.488376 +step:7341 train loss:3.503389 +step:7342 train loss:3.480257 +step:7343 train loss:3.540488 +step:7344 train loss:3.502634 +step:7345 train loss:3.505503 +step:7346 train loss:3.567331 +step:7347 train loss:3.517707 +step:7348 train loss:3.490054 +step:7349 train loss:3.481348 +step:7350 train loss:3.552411 +step:7351 train loss:3.545804 +step:7352 train loss:3.593352 +step:7353 train loss:3.549683 +step:7354 train loss:3.526240 +step:7355 train loss:3.550557 +step:7356 train loss:3.539286 +step:7357 train loss:3.513195 +step:7358 train loss:3.507470 +step:7359 train loss:3.485019 +step:7360 train loss:3.530015 +step:7361 train loss:3.512732 +step:7362 train loss:3.513155 +step:7363 train loss:3.663687 +step:7364 train loss:3.495684 +step:7365 train loss:3.553397 +step:7366 train loss:3.530329 +step:7367 train loss:3.524326 +step:7368 train loss:3.503871 +step:7369 train loss:3.550561 +step:7370 train loss:3.487147 +step:7371 train loss:3.550539 +step:7372 train loss:3.551124 +step:7373 train loss:3.535666 +step:7374 train loss:3.575501 +step:7375 train loss:3.506628 +step:7376 train loss:3.539004 +step:7377 train loss:3.551510 +step:7378 train loss:3.516542 +step:7379 train loss:3.564091 +step:7380 train loss:3.616464 +step:7381 train loss:3.516979 +step:7382 train loss:3.557792 +step:7383 train loss:3.526050 +step:7384 train loss:3.552532 +step:7385 train loss:3.508802 +step:7386 train loss:3.524671 +step:7387 train loss:3.553909 +step:7388 train loss:3.569593 +step:7389 train loss:3.580219 +step:7390 train loss:3.668289 +step:7391 train loss:3.633624 +step:7392 train loss:3.515367 +step:7393 train loss:3.466592 +step:7394 train loss:3.545692 +step:7395 train loss:3.494189 +step:7396 train loss:3.457757 +step:7397 train loss:3.483592 +step:7398 train loss:3.550701 +step:7399 train loss:3.534354 +step:7400 train loss:3.558727 +step:7401 train loss:3.526384 +step:7402 train loss:3.528828 +step:7403 train loss:3.554683 +step:7404 train loss:3.577593 +step:7405 train loss:3.565627 +step:7406 train loss:3.578239 +step:7407 train loss:3.626400 +step:7408 train loss:3.573096 +step:7409 train loss:3.525549 +step:7410 train loss:3.551276 +step:7411 train loss:3.569396 +step:7412 train loss:3.562718 +step:7413 train loss:3.602705 +step:7414 train loss:3.528158 +step:7415 train loss:3.643553 +step:7416 train loss:3.548110 +step:7417 train loss:3.541657 +step:7418 train loss:3.511569 +step:7419 train loss:3.486498 +step:7420 train loss:3.530655 +step:7421 train loss:3.511895 +step:7422 train loss:3.532383 +step:7423 train loss:3.585559 +step:7424 train loss:3.600605 +step:7425 train loss:3.504565 +step:7426 train loss:3.542329 +step:7427 train loss:3.545399 +step:7428 train loss:3.548770 +step:7429 train loss:3.514697 +step:7430 train loss:3.549881 +step:7431 train loss:3.500820 +step:7432 train loss:3.560349 +step:7433 train loss:3.514668 +step:7434 train loss:3.559771 +step:7435 train loss:3.591364 +step:7436 train loss:3.537424 +step:7437 train loss:3.527538 +step:7438 train loss:3.572086 +step:7439 train loss:3.516875 +step:7440 train loss:3.612168 +step:7441 train loss:3.571125 +step:7442 train loss:3.568000 +step:7443 train loss:3.533824 +step:7444 train loss:3.574929 +step:7445 train loss:3.509228 +step:7446 train loss:3.531954 +step:7447 train loss:3.518700 +step:7448 train loss:3.571385 +step:7449 train loss:3.551201 +step:7450 train loss:3.538537 +step:7451 train loss:3.582941 +step:7452 train loss:3.503764 +step:7453 train loss:3.499087 +step:7454 train loss:3.503226 +step:7455 train loss:3.577496 +step:7456 train loss:3.554163 +step:7457 train loss:3.521201 +step:7458 train loss:3.535847 +step:7459 train loss:3.536692 +step:7460 train loss:3.633658 +step:7461 train loss:3.588152 +step:7462 train loss:3.536804 +step:7463 train loss:3.552604 +step:7464 train loss:3.556152 +step:7465 train loss:3.570565 +step:7466 train loss:3.572851 +step:7467 train loss:3.616388 +step:7468 train loss:3.562478 +step:7469 train loss:3.578611 +step:7470 train loss:3.557353 +step:7471 train loss:3.505217 +step:7472 train loss:3.470834 +step:7473 train loss:3.496711 +step:7474 train loss:3.538864 +step:7475 train loss:3.576638 +step:7476 train loss:3.533957 +step:7477 train loss:3.605251 +step:7478 train loss:3.525221 +step:7479 train loss:3.539244 +step:7480 train loss:3.575516 +step:7481 train loss:3.490354 +step:7482 train loss:3.537879 +step:7483 train loss:3.608021 +step:7484 train loss:3.507704 +step:7485 train loss:3.544139 +step:7486 train loss:3.461045 +step:7487 train loss:3.436901 +step:7488 train loss:3.553691 +step:7489 train loss:3.584110 +step:7490 train loss:3.551110 +step:7491 train loss:3.526210 +step:7492 train loss:3.557112 +step:7493 train loss:3.495928 +step:7494 train loss:3.560252 +step:7495 train loss:3.542674 +step:7496 train loss:3.544304 +step:7497 train loss:3.611525 +step:7498 train loss:3.594713 +step:7499 train loss:3.561977 +step:7500 validation loss:3.477495 total_sharp:1.0981e-02 L1_sharp:2.6453e-02 L2_sharp:1.8037e-02 L3_sharp:8.9128e-03 L4_sharp:4.5869e-03 L5_sharp:2.2987e-03 L6_sharp:2.1973e-03 L7_sharp:1.8084e-03 L8_sharp:1.6506e-03 L9_sharp:1.0275e-03 L10_sharp:6.3985e-04 L11_sharp:5.7009e-04 L12_sharp:3.7250e-04 total_fnorm:2.2057e+00 total_l1_linf:1.8426e+04 total_spectral:2.2057e+00 L1_fnorm:4.9012e-01 L2_fnorm:2.4422e-01 L3_fnorm:2.9535e-01 L4_fnorm:4.0248e-01 L5_fnorm:4.8261e-01 L6_fnorm:5.0216e-01 L7_fnorm:5.8814e-01 L8_fnorm:5.9264e-01 L9_fnorm:5.9280e-01 L10_fnorm:5.9383e-01 L11_fnorm:5.7241e-01 L12_fnorm:5.9763e-01 L1_l1linf:3.3450e-01 L2_l1linf:4.6822e-01 L3_l1linf:4.5829e-01 L4_l1linf:5.4941e-01 L5_l1linf:4.2629e-01 L6_l1linf:3.9762e-01 L7_l1linf:3.9692e-01 L8_l1linf:3.9971e-01 L9_l1linf:4.0167e-01 L10_l1linf:4.1139e-01 L11_l1linf:4.3849e-01 L12_l1linf:4.4139e-01 L1_spectral:1.2042e-02 L2_spectral:1.4398e-02 L3_spectral:1.2028e-02 L4_spectral:2.0376e-02 L5_spectral:1.3246e-02 L6_spectral:1.2685e-02 L7_spectral:1.2047e-02 L8_spectral:1.2048e-02 L9_spectral:1.2047e-02 L10_spectral:1.2056e-02 L11_spectral:1.2044e-02 L12_spectral:1.2044e-02 v_norm:2.2057e+00 cos_v_-g_hvp:2.0105e-02 g_hvp_norm:1.1257e+00 cos_v_-g_t:2.2235e-02 g_t_norm:1.2367e+00 hv_norm:9.5259e+00 cos_v_hv:2.5427e-03 hg_norm:4.6158e+03 cos_g_hg:-1.5396e-02 v_par:4.1594e-03 v_perp:2.2057e+00 L1_cos_v_neg_g:1.5391e-02 L1_v_norm:4.9012e-01 L2_cos_v_neg_g:3.1871e-02 L2_v_norm:2.4422e-01 L3_cos_v_neg_g:3.0111e-02 L3_v_norm:2.9535e-01 L4_cos_v_neg_g:2.1926e-02 L4_v_norm:4.0248e-01 L5_cos_v_neg_g:2.2796e-02 L5_v_norm:4.8261e-01 L6_cos_v_neg_g:2.5416e-02 L6_v_norm:5.0216e-01 L7_cos_v_neg_g:2.3901e-02 L7_v_norm:5.8814e-01 L8_cos_v_neg_g:2.5880e-02 L8_v_norm:5.9264e-01 L9_cos_v_neg_g:2.6045e-02 L9_v_norm:5.9280e-01 L10_cos_v_neg_g:2.8563e-02 L10_v_norm:5.9383e-01 L11_cos_v_neg_g:3.7000e-02 L11_v_norm:5.7241e-01 L12_cos_v_neg_g:6.7627e-02 L12_v_norm:5.9763e-01 +step:7500 train loss:3.558128 +step:7501 train loss:3.554524 +step:7502 train loss:3.560619 +step:7503 train loss:3.554556 +step:7504 train loss:3.519452 +step:7505 train loss:3.516526 +step:7506 train loss:3.509920 +step:7507 train loss:3.533023 +step:7508 train loss:3.549145 +step:7509 train loss:3.561168 +step:7510 train loss:3.533494 +step:7511 train loss:3.613347 +step:7512 train loss:3.546207 +step:7513 train loss:3.587537 +step:7514 train loss:3.506319 +step:7515 train loss:3.467948 +step:7516 train loss:3.481272 +step:7517 train loss:3.542161 +step:7518 train loss:3.532597 +step:7519 train loss:3.551056 +step:7520 train loss:3.514803 +step:7521 train loss:3.518559 +step:7522 train loss:3.518308 +step:7523 train loss:3.512810 +step:7524 train loss:3.560542 +step:7525 train loss:3.545172 +step:7526 train loss:3.524987 +step:7527 train loss:3.527447 +step:7528 train loss:3.574976 +step:7529 train loss:3.542277 +step:7530 train loss:3.481798 +step:7531 train loss:3.593328 +step:7532 train loss:3.537004 +step:7533 train loss:3.588974 +step:7534 train loss:3.600907 +step:7535 train loss:3.519730 +step:7536 train loss:3.519732 +step:7537 train loss:3.561575 +step:7538 train loss:3.533832 +step:7539 train loss:3.560992 +step:7540 train loss:3.541317 +step:7541 train loss:3.525021 +step:7542 train loss:3.574499 +step:7543 train loss:3.519686 +step:7544 train loss:3.502065 +step:7545 train loss:3.511828 +step:7546 train loss:3.472335 +step:7547 train loss:3.526704 +step:7548 train loss:3.461287 +step:7549 train loss:3.506203 +step:7550 train loss:3.438967 +step:7551 train loss:3.483705 +step:7552 train loss:3.492440 +step:7553 train loss:3.477602 +step:7554 train loss:3.492250 +step:7555 train loss:3.483971 +step:7556 train loss:3.535599 +step:7557 train loss:3.503513 +step:7558 train loss:3.494668 +step:7559 train loss:3.442309 +step:7560 train loss:3.512376 +step:7561 train loss:3.524927 +step:7562 train loss:3.510508 +step:7563 train loss:3.533466 +step:7564 train loss:3.526160 +step:7565 train loss:3.500998 +step:7566 train loss:3.484548 +step:7567 train loss:3.528540 +step:7568 train loss:3.553371 +step:7569 train loss:3.664412 +step:7570 train loss:3.581218 +step:7571 train loss:3.477517 +step:7572 train loss:3.515980 +step:7573 train loss:3.485417 +step:7574 train loss:3.501931 +step:7575 train loss:3.500367 +step:7576 train loss:3.523605 +step:7577 train loss:3.525716 +step:7578 train loss:3.499639 +step:7579 train loss:3.481069 +step:7580 train loss:3.489847 +step:7581 train loss:3.507866 +step:7582 train loss:3.479281 +step:7583 train loss:3.497092 +step:7584 train loss:3.514488 +step:7585 train loss:3.475646 +step:7586 train loss:3.540958 +step:7587 train loss:3.453398 +step:7588 train loss:3.481014 +step:7589 train loss:3.479148 +step:7590 train loss:3.511884 +step:7591 train loss:3.531245 +step:7592 train loss:3.589534 +step:7593 train loss:3.542195 +step:7594 train loss:3.465320 +step:7595 train loss:3.454875 +step:7596 train loss:3.472638 +step:7597 train loss:3.498368 +step:7598 train loss:3.506041 +step:7599 train loss:3.468082 +step:7600 train loss:3.476163 +step:7601 train loss:3.466063 +step:7602 train loss:3.523040 +step:7603 train loss:3.469713 +step:7604 train loss:3.523389 +step:7605 train loss:3.517025 +step:7606 train loss:3.498977 +step:7607 train loss:3.551125 +step:7608 train loss:3.491701 +step:7609 train loss:3.475557 +step:7610 train loss:3.485483 +step:7611 train loss:3.524589 +step:7612 train loss:3.469609 +step:7613 train loss:3.537232 +step:7614 train loss:3.531916 +step:7615 train loss:3.510132 +step:7616 train loss:3.495537 +step:7617 train loss:3.426090 +step:7618 train loss:3.467516 +step:7619 train loss:3.458467 +step:7620 train loss:3.472924 +step:7621 train loss:3.446364 +step:7622 train loss:3.554344 +step:7623 train loss:3.511741 +step:7624 train loss:3.536915 +step:7625 train loss:3.490803 +step:7626 train loss:3.478146 +step:7627 train loss:3.517336 +step:7628 train loss:3.477896 +step:7629 train loss:3.507324 +step:7630 train loss:3.464352 +step:7631 train loss:3.546899 +step:7632 train loss:3.509438 +step:7633 train loss:3.472448 +step:7634 train loss:3.444218 +step:7635 train loss:3.528424 +step:7636 train loss:3.479252 +step:7637 train loss:3.476954 +step:7638 train loss:3.548444 +step:7639 train loss:3.539986 +step:7640 train loss:3.596472 +step:7641 train loss:3.521200 +step:7642 train loss:3.546757 +step:7643 train loss:3.441393 +step:7644 train loss:3.512183 +step:7645 train loss:3.479449 +step:7646 train loss:3.532941 +step:7647 train loss:3.492822 +step:7648 train loss:3.564144 +step:7649 train loss:3.527351 +step:7650 train loss:3.456345 +step:7651 train loss:3.507000 +step:7652 train loss:3.510170 +step:7653 train loss:3.522858 +step:7654 train loss:3.482059 +step:7655 train loss:3.540496 +step:7656 train loss:3.509420 +step:7657 train loss:3.481704 +step:7658 train loss:3.502349 +step:7659 train loss:3.551395 +step:7660 train loss:3.534666 +step:7661 train loss:3.448505 +step:7662 train loss:3.516707 +step:7663 train loss:3.453228 +step:7664 train loss:3.486313 +step:7665 train loss:3.474379 +step:7666 train loss:3.494911 +step:7667 train loss:3.458175 +step:7668 train loss:3.496842 +step:7669 train loss:3.483184 +step:7670 train loss:3.484700 +step:7671 train loss:3.493597 +step:7672 train loss:3.520048 +step:7673 train loss:3.498480 +step:7674 train loss:3.536542 +step:7675 train loss:3.502766 +step:7676 train loss:3.491710 +step:7677 train loss:3.510115 +step:7678 train loss:3.494662 +step:7679 train loss:3.499216 +step:7680 train loss:3.528414 +step:7681 train loss:3.541379 +step:7682 train loss:3.523187 +step:7683 train loss:3.482697 +step:7684 train loss:3.506428 +step:7685 train loss:3.517198 +step:7686 train loss:3.466503 +step:7687 train loss:3.515335 +step:7688 train loss:3.522198 +step:7689 train loss:3.481097 +step:7690 train loss:3.443616 +step:7691 train loss:3.519227 +step:7692 train loss:3.517758 +step:7693 train loss:3.487180 +step:7694 train loss:3.530685 +step:7695 train loss:3.453557 +step:7696 train loss:3.486078 +step:7697 train loss:3.475761 +step:7698 train loss:3.490085 +step:7699 train loss:3.506369 +step:7700 train loss:3.525374 +step:7701 train loss:3.487394 +step:7702 train loss:3.528461 +step:7703 train loss:3.502789 +step:7704 train loss:3.484495 +step:7705 train loss:3.483153 +step:7706 train loss:3.514552 +step:7707 train loss:3.478132 +step:7708 train loss:3.502017 +step:7709 train loss:3.519290 +step:7710 train loss:3.496648 +step:7711 train loss:3.512919 +step:7712 train loss:3.513120 +step:7713 train loss:3.572195 +step:7714 train loss:3.508269 +step:7715 train loss:3.515091 +step:7716 train loss:3.497988 +step:7717 train loss:3.485737 +step:7718 train loss:3.497986 +step:7719 train loss:3.461087 +step:7720 train loss:3.489141 +step:7721 train loss:3.477020 +step:7722 train loss:3.489631 +step:7723 train loss:3.524362 +step:7724 train loss:3.520079 +step:7725 train loss:3.480559 +step:7726 train loss:3.451615 +step:7727 train loss:3.490192 +step:7728 train loss:3.506282 +step:7729 train loss:3.493828 +step:7730 train loss:3.504408 +step:7731 train loss:3.494544 +step:7732 train loss:3.488866 +step:7733 train loss:3.543865 +step:7734 train loss:3.518275 +step:7735 train loss:3.480098 +step:7736 train loss:3.562999 +step:7737 train loss:3.537723 +step:7738 train loss:3.650893 +step:7739 train loss:3.535176 +step:7740 train loss:3.549429 +step:7741 train loss:3.552223 +step:7742 train loss:3.551028 +step:7743 train loss:3.491488 +step:7744 train loss:3.514396 +step:7745 train loss:3.554261 +step:7746 train loss:3.549184 +step:7747 train loss:3.506072 +step:7748 train loss:3.531638 +step:7749 train loss:3.534582 +step:7750 validation loss:3.461020 +step:7750 train loss:3.573656 +step:7751 train loss:3.543404 +step:7752 train loss:3.515488 +step:7753 train loss:3.511471 +step:7754 train loss:3.485897 +step:7755 train loss:3.564966 +step:7756 train loss:3.542305 +step:7757 train loss:3.531759 +step:7758 train loss:3.534656 +step:7759 train loss:3.573293 +step:7760 train loss:3.566258 +step:7761 train loss:3.553682 +step:7762 train loss:3.519976 +step:7763 train loss:3.478716 +step:7764 train loss:3.512196 +step:7765 train loss:3.494727 +step:7766 train loss:3.539657 +step:7767 train loss:3.575238 +step:7768 train loss:3.527704 +step:7769 train loss:3.543957 +step:7770 train loss:3.582066 +step:7771 train loss:3.597547 +step:7772 train loss:3.493787 +step:7773 train loss:3.530970 +step:7774 train loss:3.559827 +step:7775 train loss:3.511890 +step:7776 train loss:3.464576 +step:7777 train loss:3.532925 +step:7778 train loss:3.577226 +step:7779 train loss:3.527825 +step:7780 train loss:3.503299 +step:7781 train loss:3.521167 +step:7782 train loss:3.510980 +step:7783 train loss:3.565945 +step:7784 train loss:3.501631 +step:7785 train loss:3.505139 +step:7786 train loss:3.534141 +step:7787 train loss:3.573375 +step:7788 train loss:3.507513 +step:7789 train loss:3.527520 +step:7790 train loss:3.561455 +step:7791 train loss:3.586913 +step:7792 train loss:3.568337 +step:7793 train loss:3.567874 +step:7794 train loss:3.532645 +step:7795 train loss:3.497382 +step:7796 train loss:3.581932 +step:7797 train loss:3.554147 +step:7798 train loss:3.526267 +step:7799 train loss:3.563972 +step:7800 train loss:3.593899 +step:7801 train loss:3.574735 +step:7802 train loss:3.550063 +step:7803 train loss:3.530452 +step:7804 train loss:3.564317 +step:7805 train loss:3.527039 +step:7806 train loss:3.538463 +step:7807 train loss:3.536348 +step:7808 train loss:3.498722 +step:7809 train loss:3.488061 +step:7810 train loss:3.498255 +step:7811 train loss:3.517817 +step:7812 train loss:3.541310 +step:7813 train loss:3.529742 +step:7814 train loss:3.617665 +step:7815 train loss:3.521374 +step:7816 train loss:3.535357 +step:7817 train loss:3.468482 +step:7818 train loss:3.470834 +step:7819 train loss:3.531700 +step:7820 train loss:3.465447 +step:7821 train loss:3.525229 +step:7822 train loss:3.583653 +step:7823 train loss:3.532807 +step:7824 train loss:3.478605 +step:7825 train loss:3.535054 +step:7826 train loss:3.514371 +step:7827 train loss:3.522631 +step:7828 train loss:3.585302 +step:7829 train loss:3.535194 +step:7830 train loss:3.486166 +step:7831 train loss:3.513645 +step:7832 train loss:3.571143 +step:7833 train loss:3.510981 +step:7834 train loss:3.523273 +step:7835 train loss:3.593128 +step:7836 train loss:3.504696 +step:7837 train loss:3.433176 +step:7838 train loss:3.544645 +step:7839 train loss:3.550160 +step:7840 train loss:3.462942 +step:7841 train loss:3.539795 +step:7842 train loss:3.505125 +step:7843 train loss:3.544012 +step:7844 train loss:3.544694 +step:7845 train loss:3.530254 +step:7846 train loss:3.580296 +step:7847 train loss:3.503623 +step:7848 train loss:3.460721 +step:7849 train loss:3.600561 +step:7850 train loss:3.528512 +step:7851 train loss:3.578928 +step:7852 train loss:3.584870 +step:7853 train loss:3.535131 +step:7854 train loss:3.544896 +step:7855 train loss:3.563694 +step:7856 train loss:3.567149 +step:7857 train loss:3.493116 +step:7858 train loss:3.557935 +step:7859 train loss:3.535573 +step:7860 train loss:3.557348 +step:7861 train loss:3.538709 +step:7862 train loss:3.547795 +step:7863 train loss:3.580770 +step:7864 train loss:3.550909 +step:7865 train loss:3.584588 +step:7866 train loss:3.486564 +step:7867 train loss:3.496352 +step:7868 train loss:3.496196 +step:7869 train loss:3.564496 +step:7870 train loss:3.476382 +step:7871 train loss:3.547780 +step:7872 train loss:3.538952 +step:7873 train loss:3.534859 +step:7874 train loss:3.487841 +step:7875 train loss:3.537694 +step:7876 train loss:3.488553 +step:7877 train loss:3.567909 +step:7878 train loss:3.590679 +step:7879 train loss:3.547521 +step:7880 train loss:3.560974 +step:7881 train loss:3.544521 +step:7882 train loss:3.535727 +step:7883 train loss:3.541265 +step:7884 train loss:3.537057 +step:7885 train loss:3.609242 +step:7886 train loss:3.524225 +step:7887 train loss:3.557077 +step:7888 train loss:3.552174 +step:7889 train loss:3.535196 +step:7890 train loss:3.522906 +step:7891 train loss:3.568178 +step:7892 train loss:3.647849 +step:7893 train loss:3.538695 +step:7894 train loss:3.579827 +step:7895 train loss:3.605088 +step:7896 train loss:3.531994 +step:7897 train loss:3.541145 +step:7898 train loss:3.556234 +step:7899 train loss:3.530665 +step:7900 train loss:3.486995 +step:7901 train loss:3.540089 +step:7902 train loss:3.578131 +step:7903 train loss:3.566641 +step:7904 train loss:3.543862 +step:7905 train loss:3.549933 +step:7906 train loss:3.552563 +step:7907 train loss:3.473720 +step:7908 train loss:3.562673 +step:7909 train loss:3.538272 +step:7910 train loss:3.490783 +step:7911 train loss:3.499675 +step:7912 train loss:3.554316 +step:7913 train loss:3.496847 +step:7914 train loss:3.474017 +step:7915 train loss:3.535561 +step:7916 train loss:3.507699 +step:7917 train loss:3.511974 +step:7918 train loss:3.550531 +step:7919 train loss:3.555307 +step:7920 train loss:3.533970 +step:7921 train loss:3.526350 +step:7922 train loss:3.594201 +step:7923 train loss:3.556054 +step:7924 train loss:3.523030 +step:7925 train loss:3.546545 +step:7926 train loss:3.567016 +step:7927 train loss:3.517121 +step:7928 train loss:3.512051 +step:7929 train loss:3.486855 +step:7930 train loss:3.518099 +step:7931 train loss:3.550539 +step:7932 train loss:3.512675 +step:7933 train loss:3.518528 +step:7934 train loss:3.523329 +step:7935 train loss:3.503964 +step:7936 train loss:3.523738 +step:7937 train loss:3.483324 +step:7938 train loss:3.495711 +step:7939 train loss:3.470819 +step:7940 train loss:3.471195 +step:7941 train loss:3.566424 +step:7942 train loss:3.449406 +step:7943 train loss:3.514674 +step:7944 train loss:3.494417 +step:7945 train loss:3.470316 +step:7946 train loss:3.558829 +step:7947 train loss:3.565379 +step:7948 train loss:3.532833 +step:7949 train loss:3.577640 +step:7950 train loss:3.547600 +step:7951 train loss:3.503316 +step:7952 train loss:3.547576 +step:7953 train loss:3.522845 +step:7954 train loss:3.507739 +step:7955 train loss:3.600667 +step:7956 train loss:3.534379 +step:7957 train loss:3.500337 +step:7958 train loss:3.518046 +step:7959 train loss:3.503685 +step:7960 train loss:3.517665 +step:7961 train loss:3.499385 +step:7962 train loss:3.455127 +step:7963 train loss:3.536683 +step:7964 train loss:3.544050 +step:7965 train loss:3.507943 +step:7966 train loss:3.526764 +step:7967 train loss:3.555818 +step:7968 train loss:3.516856 +step:7969 train loss:3.554528 +step:7970 train loss:3.535826 +step:7971 train loss:3.570016 +step:7972 train loss:3.530794 +step:7973 train loss:3.584689 +step:7974 train loss:3.536883 +step:7975 train loss:3.610536 +step:7976 train loss:3.586559 +step:7977 train loss:3.482577 +step:7978 train loss:3.544173 +step:7979 train loss:3.534338 +step:7980 train loss:3.526031 +step:7981 train loss:3.571998 +step:7982 train loss:3.526102 +step:7983 train loss:3.575469 +step:7984 train loss:3.574454 +step:7985 train loss:3.501074 +step:7986 train loss:3.533443 +step:7987 train loss:3.519894 +step:7988 train loss:3.539842 +step:7989 train loss:3.567549 +step:7990 train loss:3.550383 +step:7991 train loss:3.462884 +step:7992 train loss:3.465661 +step:7993 train loss:3.586503 +step:7994 train loss:3.517080 +step:7995 train loss:3.548434 +step:7996 train loss:3.552886 +step:7997 train loss:3.498273 +step:7998 train loss:3.616669 +step:7999 train loss:3.621838 +step:8000 validation loss:3.461798 total_sharp:-2.8885e-03 L1_sharp:-3.1365e-02 L2_sharp:1.3258e-02 L3_sharp:4.0372e-03 L4_sharp:2.7536e-03 L5_sharp:1.6114e-03 L6_sharp:1.8803e-03 L7_sharp:1.3951e-03 L8_sharp:1.3018e-03 L9_sharp:1.0026e-03 L10_sharp:5.1834e-04 L11_sharp:6.4566e-04 L12_sharp:6.7827e-04 total_fnorm:2.2277e+00 total_l1_linf:1.8671e+04 total_spectral:2.2277e+00 L1_fnorm:4.7173e-01 L2_fnorm:2.5940e-01 L3_fnorm:3.1770e-01 L4_fnorm:4.1032e-01 L5_fnorm:4.9931e-01 L6_fnorm:5.2690e-01 L7_fnorm:5.8804e-01 L8_fnorm:5.9296e-01 L9_fnorm:5.9225e-01 L10_fnorm:5.9412e-01 L11_fnorm:5.6889e-01 L12_fnorm:5.9847e-01 L1_l1linf:3.7437e-01 L2_l1linf:5.4473e-01 L3_l1linf:4.9019e-01 L4_l1linf:5.6396e-01 L5_l1linf:4.1692e-01 L6_l1linf:4.1023e-01 L7_l1linf:3.9901e-01 L8_l1linf:3.9754e-01 L9_l1linf:4.0106e-01 L10_l1linf:4.2973e-01 L11_l1linf:4.6658e-01 L12_l1linf:4.5777e-01 L1_spectral:1.2037e-02 L2_spectral:1.6173e-02 L3_spectral:1.2028e-02 L4_spectral:2.1621e-02 L5_spectral:1.3356e-02 L6_spectral:1.2042e-02 L7_spectral:1.2049e-02 L8_spectral:1.2051e-02 L9_spectral:1.2045e-02 L10_spectral:1.2052e-02 L11_spectral:1.2041e-02 L12_spectral:1.2044e-02 v_norm:2.2277e+00 cos_v_-g_hvp:1.7793e-02 g_hvp_norm:9.0800e-01 cos_v_-g_t:2.3089e-02 g_t_norm:9.6350e-01 hv_norm:1.0122e+01 cos_v_hv:-6.3573e-04 hg_norm:1.0980e+04 cos_g_hg:-1.9893e-01 v_par:4.6375e-03 v_perp:2.2277e+00 L1_cos_v_neg_g:6.2191e-03 L1_v_norm:4.7173e-01 L2_cos_v_neg_g:1.4711e-02 L2_v_norm:2.5940e-01 L3_cos_v_neg_g:2.1589e-02 L3_v_norm:3.1770e-01 L4_cos_v_neg_g:1.8596e-02 L4_v_norm:4.1032e-01 L5_cos_v_neg_g:2.2233e-02 L5_v_norm:4.9931e-01 L6_cos_v_neg_g:2.2985e-02 L6_v_norm:5.2690e-01 L7_cos_v_neg_g:2.3163e-02 L7_v_norm:5.8804e-01 L8_cos_v_neg_g:2.5114e-02 L8_v_norm:5.9296e-01 L9_cos_v_neg_g:2.5846e-02 L9_v_norm:5.9225e-01 L10_cos_v_neg_g:2.8596e-02 L10_v_norm:5.9412e-01 L11_cos_v_neg_g:3.7093e-02 L11_v_norm:5.6889e-01 L12_cos_v_neg_g:6.1714e-02 L12_v_norm:5.9847e-01 +step:8000 train loss:3.492537 +step:8001 train loss:3.567026 +step:8002 train loss:3.481273 +step:8003 train loss:3.508501 +step:8004 train loss:3.545107 +step:8005 train loss:3.665956 +step:8006 train loss:3.569344 +step:8007 train loss:3.543307 +step:8008 train loss:3.521898 +step:8009 train loss:3.523484 +step:8010 train loss:3.573544 +step:8011 train loss:3.553632 +step:8012 train loss:3.481145 +step:8013 train loss:3.556345 +step:8014 train loss:3.505440 +step:8015 train loss:3.523742 +step:8016 train loss:3.509313 +step:8017 train loss:3.487498 +step:8018 train loss:3.554624 +step:8019 train loss:3.514951 +step:8020 train loss:3.508350 +step:8021 train loss:3.515529 +step:8022 train loss:3.556846 +step:8023 train loss:3.646745 +step:8024 train loss:3.533916 +step:8025 train loss:3.561674 +step:8026 train loss:3.513960 +step:8027 train loss:3.533900 +step:8028 train loss:3.453102 +step:8029 train loss:3.565757 +step:8030 train loss:3.533720 +step:8031 train loss:3.558614 +step:8032 train loss:3.534483 +step:8033 train loss:3.535845 +step:8034 train loss:3.479852 +step:8035 train loss:3.458063 +step:8036 train loss:3.519976 +step:8037 train loss:3.447268 +step:8038 train loss:3.545392 +step:8039 train loss:3.572505 +step:8040 train loss:3.503518 +step:8041 train loss:3.473653 +step:8042 train loss:3.564436 +step:8043 train loss:3.573526 +step:8044 train loss:3.548124 +step:8045 train loss:3.549109 +step:8046 train loss:3.512987 +step:8047 train loss:3.594323 +step:8048 train loss:3.525054 +step:8049 train loss:3.558002 +step:8050 train loss:3.556036 +step:8051 train loss:3.490379 +step:8052 train loss:3.521105 +step:8053 train loss:3.579108 +step:8054 train loss:3.504320 +step:8055 train loss:3.520798 +step:8056 train loss:3.503407 +step:8057 train loss:3.529590 +step:8058 train loss:3.509707 +step:8059 train loss:3.506421 +step:8060 train loss:3.492273 +step:8061 train loss:3.500395 +step:8062 train loss:3.507961 +step:8063 train loss:3.511646 +step:8064 train loss:3.492607 +step:8065 train loss:3.509811 +step:8066 train loss:3.491812 +step:8067 train loss:3.521078 +step:8068 train loss:3.522326 +step:8069 train loss:3.545167 +step:8070 train loss:3.570471 +step:8071 train loss:3.531365 +step:8072 train loss:3.556914 +step:8073 train loss:3.508768 +step:8074 train loss:3.573186 +step:8075 train loss:3.554412 +step:8076 train loss:3.577734 +step:8077 train loss:3.506668 +step:8078 train loss:3.478925 +step:8079 train loss:3.516307 +step:8080 train loss:3.539871 +step:8081 train loss:3.472827 +step:8082 train loss:3.522399 +step:8083 train loss:3.466042 +step:8084 train loss:3.507943 +step:8085 train loss:3.486336 +step:8086 train loss:3.578394 +step:8087 train loss:3.475282 +step:8088 train loss:3.541410 +step:8089 train loss:3.611561 +step:8090 train loss:3.536683 +step:8091 train loss:3.633835 +step:8092 train loss:3.562870 +step:8093 train loss:3.516090 +step:8094 train loss:3.549540 +step:8095 train loss:3.551880 +step:8096 train loss:3.590059 +step:8097 train loss:3.515411 +step:8098 train loss:3.499181 +step:8099 train loss:3.533215 +step:8100 train loss:3.492972 +step:8101 train loss:3.469291 +step:8102 train loss:3.525892 +step:8103 train loss:3.468245 +step:8104 train loss:3.468711 +step:8105 train loss:3.579825 +step:8106 train loss:3.578729 +step:8107 train loss:3.575273 +step:8108 train loss:3.566133 +step:8109 train loss:3.536414 +step:8110 train loss:3.507630 +step:8111 train loss:3.538171 +step:8112 train loss:3.552362 +step:8113 train loss:3.535571 +step:8114 train loss:3.494943 +step:8115 train loss:3.578384 +step:8116 train loss:3.464043 +step:8117 train loss:3.534721 +step:8118 train loss:3.513824 +step:8119 train loss:3.596250 +step:8120 train loss:3.486283 +step:8121 train loss:3.456991 +step:8122 train loss:3.490668 +step:8123 train loss:3.481498 +step:8124 train loss:3.554757 +step:8125 train loss:3.514745 +step:8126 train loss:3.563758 +step:8127 train loss:3.500110 +step:8128 train loss:3.550996 +step:8129 train loss:3.466431 +step:8130 train loss:3.534881 +step:8131 train loss:3.518731 +step:8132 train loss:3.561270 +step:8133 train loss:3.516938 +step:8134 train loss:3.534859 +step:8135 train loss:3.536863 +step:8136 train loss:3.437818 +step:8137 train loss:3.401598 +step:8138 train loss:3.470491 +step:8139 train loss:3.504601 +step:8140 train loss:3.498131 +step:8141 train loss:3.542569 +step:8142 train loss:3.496138 +step:8143 train loss:3.506982 +step:8144 train loss:3.544376 +step:8145 train loss:3.486970 +step:8146 train loss:3.576539 +step:8147 train loss:3.533874 +step:8148 train loss:3.538333 +step:8149 train loss:3.490594 +step:8150 train loss:3.539298 +step:8151 train loss:3.498209 +step:8152 train loss:3.482413 +step:8153 train loss:3.472267 +step:8154 train loss:3.557466 +step:8155 train loss:3.513387 +step:8156 train loss:3.554916 +step:8157 train loss:3.453586 +step:8158 train loss:3.460880 +step:8159 train loss:3.492450 +step:8160 train loss:3.473978 +step:8161 train loss:3.521045 +step:8162 train loss:3.546294 +step:8163 train loss:3.434780 +step:8164 train loss:3.451985 +step:8165 train loss:3.530685 +step:8166 train loss:3.490530 +step:8167 train loss:3.480907 +step:8168 train loss:3.474545 +step:8169 train loss:3.427095 +step:8170 train loss:3.528618 +step:8171 train loss:3.458902 +step:8172 train loss:3.543049 +step:8173 train loss:3.473795 +step:8174 train loss:3.559348 +step:8175 train loss:3.494198 +step:8176 train loss:3.556417 +step:8177 train loss:3.443404 +step:8178 train loss:3.483786 +step:8179 train loss:3.488279 +step:8180 train loss:3.507298 +step:8181 train loss:3.506022 +step:8182 train loss:3.459892 +step:8183 train loss:3.496867 +step:8184 train loss:3.406582 +step:8185 train loss:3.525187 +step:8186 train loss:3.531887 +step:8187 train loss:3.550736 +step:8188 train loss:3.514103 +step:8189 train loss:3.484121 +step:8190 train loss:3.478299 +step:8191 train loss:3.454162 +step:8192 train loss:3.509034 +step:8193 train loss:3.473358 +step:8194 train loss:3.506366 +step:8195 train loss:3.467510 +step:8196 train loss:3.515054 +step:8197 train loss:3.453090 +step:8198 train loss:3.643445 +step:8199 train loss:3.655691 +step:8200 train loss:3.519021 +step:8201 train loss:3.488564 +step:8202 train loss:3.566478 +step:8203 train loss:3.466712 +step:8204 train loss:3.512764 +step:8205 train loss:3.482794 +step:8206 train loss:3.504440 +step:8207 train loss:3.504366 +step:8208 train loss:3.458382 +step:8209 train loss:3.501469 +step:8210 train loss:3.396555 +step:8211 train loss:3.487617 +step:8212 train loss:3.442821 +step:8213 train loss:3.510043 +step:8214 train loss:3.479354 +step:8215 train loss:3.477342 +step:8216 train loss:3.497471 +step:8217 train loss:3.407469 +step:8218 train loss:3.482525 +step:8219 train loss:3.455145 +step:8220 train loss:3.506903 +step:8221 train loss:3.443178 +step:8222 train loss:3.507443 +step:8223 train loss:3.513647 +step:8224 train loss:3.581203 +step:8225 train loss:3.471548 +step:8226 train loss:3.560075 +step:8227 train loss:3.502742 +step:8228 train loss:3.454305 +step:8229 train loss:3.443320 +step:8230 train loss:3.493748 +step:8231 train loss:3.581729 +step:8232 train loss:3.505201 +step:8233 train loss:3.494741 +step:8234 train loss:3.470320 +step:8235 train loss:3.506112 +step:8236 train loss:3.525731 +step:8237 train loss:3.506975 +step:8238 train loss:3.452974 +step:8239 train loss:3.534243 +step:8240 train loss:3.416846 +step:8241 train loss:3.569120 +step:8242 train loss:3.499110 +step:8243 train loss:3.598684 +step:8244 train loss:3.476445 +step:8245 train loss:3.510076 +step:8246 train loss:3.464205 +step:8247 train loss:3.533214 +step:8248 train loss:3.487137 +step:8249 train loss:3.472400 +step:8250 validation loss:3.454832 +step:8250 train loss:3.464874 +step:8251 train loss:3.448379 +step:8252 train loss:3.514441 +step:8253 train loss:3.415617 +step:8254 train loss:3.442089 +step:8255 train loss:3.463431 +step:8256 train loss:3.496294 +step:8257 train loss:3.462464 +step:8258 train loss:3.466922 +step:8259 train loss:3.501514 +step:8260 train loss:3.485255 +step:8261 train loss:3.541032 +step:8262 train loss:3.511212 +step:8263 train loss:3.535580 +step:8264 train loss:3.494466 +step:8265 train loss:3.552271 +step:8266 train loss:3.494628 +step:8267 train loss:3.511882 +step:8268 train loss:3.490706 +step:8269 train loss:3.635104 +step:8270 train loss:3.434164 +step:8271 train loss:3.513512 +step:8272 train loss:3.460690 +step:8273 train loss:3.473187 +step:8274 train loss:3.529829 +step:8275 train loss:3.502063 +step:8276 train loss:3.585533 +step:8277 train loss:3.475191 +step:8278 train loss:3.479557 +step:8279 train loss:3.426223 +step:8280 train loss:3.511263 +step:8281 train loss:3.451626 +step:8282 train loss:3.530229 +step:8283 train loss:3.488741 +step:8284 train loss:3.484724 +step:8285 train loss:3.584556 +step:8286 train loss:3.533079 +step:8287 train loss:3.501829 +step:8288 train loss:3.462685 +step:8289 train loss:3.524787 +step:8290 train loss:3.468049 +step:8291 train loss:3.540594 +step:8292 train loss:3.514505 +step:8293 train loss:3.562550 +step:8294 train loss:3.607245 +step:8295 train loss:3.522856 +step:8296 train loss:3.488822 +step:8297 train loss:3.464605 +step:8298 train loss:3.532284 +step:8299 train loss:3.430597 +step:8300 train loss:3.458213 +step:8301 train loss:3.508442 +step:8302 train loss:3.528051 +step:8303 train loss:3.424229 +step:8304 train loss:3.510662 +step:8305 train loss:3.526936 +step:8306 train loss:3.490598 +step:8307 train loss:3.463283 +step:8308 train loss:3.485271 +step:8309 train loss:3.472459 +step:8310 train loss:3.509993 +step:8311 train loss:3.456934 +step:8312 train loss:3.495093 +step:8313 train loss:3.492600 +step:8314 train loss:3.511691 +step:8315 train loss:3.489666 +step:8316 train loss:3.540845 +step:8317 train loss:3.464691 +step:8318 train loss:3.520779 +step:8319 train loss:3.467246 +step:8320 train loss:3.414493 +step:8321 train loss:3.556401 +step:8322 train loss:3.420296 +step:8323 train loss:3.531179 +step:8324 train loss:3.508366 +step:8325 train loss:3.440168 +step:8326 train loss:3.487689 +step:8327 train loss:3.434233 +step:8328 train loss:3.603154 +step:8329 train loss:3.463184 +step:8330 train loss:3.470035 +step:8331 train loss:3.552475 +step:8332 train loss:3.498840 +step:8333 train loss:3.527962 +step:8334 train loss:3.479175 +step:8335 train loss:3.529442 +step:8336 train loss:3.463407 +step:8337 train loss:3.470654 +step:8338 train loss:3.490434 +step:8339 train loss:3.468550 +step:8340 train loss:3.484077 +step:8341 train loss:3.446343 +step:8342 train loss:3.488178 +step:8343 train loss:3.507667 +step:8344 train loss:3.460532 +step:8345 train loss:3.475689 +step:8346 train loss:3.459562 +step:8347 train loss:3.526035 +step:8348 train loss:3.476216 +step:8349 train loss:3.507771 +step:8350 train loss:3.459983 +step:8351 train loss:3.450194 +step:8352 train loss:3.455420 +step:8353 train loss:3.469737 +step:8354 train loss:3.502330 +step:8355 train loss:3.487357 +step:8356 train loss:3.476617 +step:8357 train loss:3.484633 +step:8358 train loss:3.473622 +step:8359 train loss:3.508476 +step:8360 train loss:3.493932 +step:8361 train loss:3.413469 +step:8362 train loss:3.479075 +step:8363 train loss:3.483507 +step:8364 train loss:3.517685 +step:8365 train loss:3.513709 +step:8366 train loss:3.447143 +step:8367 train loss:3.442177 +step:8368 train loss:3.471267 +step:8369 train loss:3.504567 +step:8370 train loss:3.417092 +step:8371 train loss:3.451038 +step:8372 train loss:3.457604 +step:8373 train loss:3.451787 +step:8374 train loss:3.481494 +step:8375 train loss:3.467736 +step:8376 train loss:3.454484 +step:8377 train loss:3.447526 +step:8378 train loss:3.423809 +step:8379 train loss:3.482381 +step:8380 train loss:3.458529 +step:8381 train loss:3.482393 +step:8382 train loss:3.463629 +step:8383 train loss:3.506443 +step:8384 train loss:3.515475 +step:8385 train loss:3.491038 +step:8386 train loss:3.529632 +step:8387 train loss:3.429066 +step:8388 train loss:3.463677 +step:8389 train loss:3.418177 +step:8390 train loss:3.496439 +step:8391 train loss:3.480716 +step:8392 train loss:3.435934 +step:8393 train loss:3.525885 +step:8394 train loss:3.505853 +step:8395 train loss:3.452602 +step:8396 train loss:3.645171 +step:8397 train loss:3.448337 +step:8398 train loss:3.522332 +step:8399 train loss:3.463209 +step:8400 train loss:3.471278 +step:8401 train loss:3.482092 +step:8402 train loss:3.446865 +step:8403 train loss:3.521930 +step:8404 train loss:3.446884 +step:8405 train loss:3.473212 +step:8406 train loss:3.462049 +step:8407 train loss:3.527352 +step:8408 train loss:3.441803 +step:8409 train loss:3.392620 +step:8410 train loss:3.472974 +step:8411 train loss:3.519451 +step:8412 train loss:3.510774 +step:8413 train loss:3.462483 +step:8414 train loss:3.439145 +step:8415 train loss:3.468034 +step:8416 train loss:3.447577 +step:8417 train loss:3.465344 +step:8418 train loss:3.512281 +step:8419 train loss:3.434552 +step:8420 train loss:3.485086 +step:8421 train loss:3.459648 +step:8422 train loss:3.500337 +step:8423 train loss:3.459454 +step:8424 train loss:3.452106 +step:8425 train loss:3.498749 +step:8426 train loss:3.434287 +step:8427 train loss:3.513146 +step:8428 train loss:3.409319 +step:8429 train loss:3.439951 +step:8430 train loss:3.475860 +step:8431 train loss:3.445912 +step:8432 train loss:3.485027 +step:8433 train loss:3.445375 +step:8434 train loss:3.465415 +step:8435 train loss:3.457827 +step:8436 train loss:3.478074 +step:8437 train loss:3.490475 +step:8438 train loss:3.419658 +step:8439 train loss:3.497084 +step:8440 train loss:3.516742 +step:8441 train loss:3.552781 +step:8442 train loss:3.474711 +step:8443 train loss:3.525803 +step:8444 train loss:3.467174 +step:8445 train loss:3.417219 +step:8446 train loss:3.456641 +step:8447 train loss:3.517654 +step:8448 train loss:3.402979 +step:8449 train loss:3.467804 +step:8450 train loss:3.414593 +step:8451 train loss:3.472631 +step:8452 train loss:3.464810 +step:8453 train loss:3.445467 +step:8454 train loss:3.512698 +step:8455 train loss:3.424366 +step:8456 train loss:3.487471 +step:8457 train loss:3.463001 +step:8458 train loss:3.440319 +step:8459 train loss:3.525577 +step:8460 train loss:3.458306 +step:8461 train loss:3.487658 +step:8462 train loss:3.472474 +step:8463 train loss:3.433731 +step:8464 train loss:3.463358 +step:8465 train loss:3.480899 +step:8466 train loss:3.561635 +step:8467 train loss:3.438420 +step:8468 train loss:3.435921 +step:8469 train loss:3.470954 +step:8470 train loss:3.492390 +step:8471 train loss:3.526313 +step:8472 train loss:3.426060 +step:8473 train loss:3.496893 +step:8474 train loss:3.479284 +step:8475 train loss:3.456726 +step:8476 train loss:3.490153 +step:8477 train loss:3.463909 +step:8478 train loss:3.485177 +step:8479 train loss:3.479786 +step:8480 train loss:3.438783 +step:8481 train loss:3.503843 +step:8482 train loss:3.464010 +step:8483 train loss:3.559525 +step:8484 train loss:3.471190 +step:8485 train loss:3.415490 +step:8486 train loss:3.484467 +step:8487 train loss:3.421458 +step:8488 train loss:3.449548 +step:8489 train loss:3.560814 +step:8490 train loss:3.479691 +step:8491 train loss:3.451696 +step:8492 train loss:3.505150 +step:8493 train loss:3.423995 +step:8494 train loss:3.440976 +step:8495 train loss:3.403666 +step:8496 train loss:3.575952 +step:8497 train loss:3.657232 +step:8498 train loss:3.689875 +step:8499 train loss:3.619804 +step:8500 validation loss:3.434123 total_sharp:2.3619e-02 L1_sharp:1.9937e-01 L2_sharp:1.1663e-01 L3_sharp:1.9979e-03 L4_sharp:4.0124e-03 L5_sharp:2.9226e-03 L6_sharp:2.1510e-03 L7_sharp:1.7069e-03 L8_sharp:1.5057e-03 L9_sharp:1.0105e-03 L10_sharp:5.4033e-04 L11_sharp:5.3047e-04 L12_sharp:4.0465e-04 total_fnorm:1.6764e+00 total_l1_linf:1.4046e+04 total_spectral:1.6764e+00 L1_fnorm:3.4688e-01 L2_fnorm:2.1767e-01 L3_fnorm:2.4828e-01 L4_fnorm:3.0433e-01 L5_fnorm:3.7874e-01 L6_fnorm:3.9340e-01 L7_fnorm:4.3810e-01 L8_fnorm:4.4187e-01 L9_fnorm:4.4367e-01 L10_fnorm:4.4441e-01 L11_fnorm:4.2855e-01 L12_fnorm:4.4834e-01 L1_l1linf:2.9491e-01 L2_l1linf:3.9444e-01 L3_l1linf:3.5021e-01 L4_l1linf:3.6488e-01 L5_l1linf:3.5561e-01 L6_l1linf:3.3972e-01 L7_l1linf:3.1610e-01 L8_l1linf:2.9976e-01 L9_l1linf:3.1122e-01 L10_l1linf:3.2316e-01 L11_l1linf:3.5117e-01 L12_l1linf:3.6460e-01 L1_spectral:9.0363e-03 L2_spectral:9.1664e-03 L3_spectral:9.8766e-03 L4_spectral:1.5093e-02 L5_spectral:1.1005e-02 L6_spectral:9.0364e-03 L7_spectral:9.0393e-03 L8_spectral:9.0425e-03 L9_spectral:9.0416e-03 L10_spectral:9.0462e-03 L11_spectral:9.0367e-03 L12_spectral:9.0400e-03 v_norm:1.6764e+00 cos_v_-g_hvp:2.2433e-02 g_hvp_norm:7.2607e-01 cos_v_-g_t:2.5140e-02 g_t_norm:7.4732e-01 hv_norm:1.5941e+01 cos_v_hv:2.4839e-03 hg_norm:2.4730e+03 cos_g_hg:-8.8992e-03 v_par:3.9933e-03 v_perp:1.6764e+00 L1_cos_v_neg_g:9.6038e-03 L1_v_norm:3.4688e-01 L2_cos_v_neg_g:8.6323e-03 L2_v_norm:2.1767e-01 L3_cos_v_neg_g:2.3623e-02 L3_v_norm:2.4828e-01 L4_cos_v_neg_g:2.3922e-02 L4_v_norm:3.0433e-01 L5_cos_v_neg_g:2.5632e-02 L5_v_norm:3.7874e-01 L6_cos_v_neg_g:2.6498e-02 L6_v_norm:3.9340e-01 L7_cos_v_neg_g:2.4930e-02 L7_v_norm:4.3810e-01 L8_cos_v_neg_g:2.6006e-02 L8_v_norm:4.4187e-01 L9_cos_v_neg_g:2.8121e-02 L9_v_norm:4.4367e-01 L10_cos_v_neg_g:2.9930e-02 L10_v_norm:4.4441e-01 L11_cos_v_neg_g:3.8452e-02 L11_v_norm:4.2855e-01 L12_cos_v_neg_g:6.7593e-02 L12_v_norm:4.4834e-01 +step:8500 train loss:3.474633 +step:8501 train loss:3.507336 +step:8502 train loss:3.497818 +step:8503 train loss:3.506810 +step:8504 train loss:3.436945 +step:8505 train loss:3.494101 +step:8506 train loss:3.428187 +step:8507 train loss:3.473979 +step:8508 train loss:3.455912 +step:8509 train loss:3.463905 +step:8510 train loss:3.479213 +step:8511 train loss:3.530291 +step:8512 train loss:3.499895 +step:8513 train loss:3.486835 +step:8514 train loss:3.462961 +step:8515 train loss:3.501794 +step:8516 train loss:3.454940 +step:8517 train loss:3.497402 +step:8518 train loss:3.495326 +step:8519 train loss:3.495650 +step:8520 train loss:3.505043 +step:8521 train loss:3.478748 +step:8522 train loss:3.504011 +step:8523 train loss:3.495952 +step:8524 train loss:3.401465 +step:8525 train loss:3.454297 +step:8526 train loss:3.508691 +step:8527 train loss:3.512329 +step:8528 train loss:3.494618 +step:8529 train loss:3.531915 +step:8530 train loss:3.461039 +step:8531 train loss:3.545008 +step:8532 train loss:3.494005 +step:8533 train loss:3.498021 +step:8534 train loss:3.451323 +step:8535 train loss:3.505861 +step:8536 train loss:3.492800 +step:8537 train loss:3.504136 +step:8538 train loss:3.514277 +step:8539 train loss:3.507639 +step:8540 train loss:3.468370 +step:8541 train loss:3.559746 +step:8542 train loss:3.620222 +step:8543 train loss:3.566608 +step:8544 train loss:3.507035 +step:8545 train loss:3.461385 +step:8546 train loss:3.519714 +step:8547 train loss:3.432647 +step:8548 train loss:3.520947 +step:8549 train loss:3.401359 +step:8550 train loss:3.481487 +step:8551 train loss:3.479426 +step:8552 train loss:3.486568 +step:8553 train loss:3.489150 +step:8554 train loss:3.438810 +step:8555 train loss:3.472082 +step:8556 train loss:3.482406 +step:8557 train loss:3.510828 +step:8558 train loss:3.509075 +step:8559 train loss:3.470221 +step:8560 train loss:3.509780 +step:8561 train loss:3.517793 +step:8562 train loss:3.449418 +step:8563 train loss:3.504383 +step:8564 train loss:3.475548 +step:8565 train loss:3.477325 +step:8566 train loss:3.511696 +step:8567 train loss:3.457661 +step:8568 train loss:3.463208 +step:8569 train loss:3.490112 +step:8570 train loss:3.422922 +step:8571 train loss:3.463753 +step:8572 train loss:3.482001 +step:8573 train loss:3.543697 +step:8574 train loss:3.466389 +step:8575 train loss:3.499064 +step:8576 train loss:3.472430 +step:8577 train loss:3.459438 +step:8578 train loss:3.513896 +step:8579 train loss:3.522519 +step:8580 train loss:3.499843 +step:8581 train loss:3.527647 +step:8582 train loss:3.489233 +step:8583 train loss:3.459318 +step:8584 train loss:3.554300 +step:8585 train loss:3.450235 +step:8586 train loss:3.465847 +step:8587 train loss:3.513369 +step:8588 train loss:3.446431 +step:8589 train loss:3.504215 +step:8590 train loss:3.482253 +step:8591 train loss:3.466820 +step:8592 train loss:3.480031 +step:8593 train loss:3.465806 +step:8594 train loss:3.500815 +step:8595 train loss:3.466732 +step:8596 train loss:3.482237 +step:8597 train loss:3.519057 +step:8598 train loss:3.482034 +step:8599 train loss:3.546390 +step:8600 train loss:3.497169 +step:8601 train loss:3.436829 +step:8602 train loss:3.524734 +step:8603 train loss:3.454124 +step:8604 train loss:3.542547 +step:8605 train loss:3.473765 +step:8606 train loss:3.442412 +step:8607 train loss:3.457961 +step:8608 train loss:3.422093 +step:8609 train loss:3.396353 +step:8610 train loss:3.522602 +step:8611 train loss:3.450853 +step:8612 train loss:3.478609 +step:8613 train loss:3.486835 +step:8614 train loss:3.445443 +step:8615 train loss:3.468734 +step:8616 train loss:3.522860 +step:8617 train loss:3.565786 +step:8618 train loss:3.528297 +step:8619 train loss:3.485444 +step:8620 train loss:3.528187 +step:8621 train loss:3.472870 +step:8622 train loss:3.497683 +step:8623 train loss:3.498744 +step:8624 train loss:3.491004 +step:8625 train loss:3.531147 +step:8626 train loss:3.546663 +step:8627 train loss:3.491930 +step:8628 train loss:3.532828 +step:8629 train loss:3.464431 +step:8630 train loss:3.498633 +step:8631 train loss:3.572227 +step:8632 train loss:3.526344 +step:8633 train loss:3.465933 +step:8634 train loss:3.452639 +step:8635 train loss:3.474648 +step:8636 train loss:3.506885 +step:8637 train loss:3.472298 +step:8638 train loss:3.529221 +step:8639 train loss:3.506071 +step:8640 train loss:3.462756 +step:8641 train loss:3.462837 +step:8642 train loss:3.475320 +step:8643 train loss:3.508019 +step:8644 train loss:3.540163 +step:8645 train loss:3.463633 +step:8646 train loss:3.520362 +step:8647 train loss:3.558972 +step:8648 train loss:3.493519 +step:8649 train loss:3.511366 +step:8650 train loss:3.469813 +step:8651 train loss:3.474223 +step:8652 train loss:3.461951 +step:8653 train loss:3.457440 +step:8654 train loss:3.572435 +step:8655 train loss:3.504351 +step:8656 train loss:3.507959 +step:8657 train loss:3.513930 +step:8658 train loss:3.496978 +step:8659 train loss:3.469263 +step:8660 train loss:3.565451 +step:8661 train loss:3.456505 +step:8662 train loss:3.537831 +step:8663 train loss:3.456976 +step:8664 train loss:3.501084 +step:8665 train loss:3.538307 +step:8666 train loss:3.498065 +step:8667 train loss:3.499689 +step:8668 train loss:3.427453 +step:8669 train loss:3.458268 +step:8670 train loss:3.472105 +step:8671 train loss:3.493629 +step:8672 train loss:3.495744 +step:8673 train loss:3.515991 +step:8674 train loss:3.469500 +step:8675 train loss:3.469064 +step:8676 train loss:3.496441 +step:8677 train loss:3.479445 +step:8678 train loss:3.492905 +step:8679 train loss:3.510917 +step:8680 train loss:3.472028 +step:8681 train loss:3.504450 +step:8682 train loss:3.500825 +step:8683 train loss:3.491214 +step:8684 train loss:3.481984 +step:8685 train loss:3.435745 +step:8686 train loss:3.453374 +step:8687 train loss:3.505114 +step:8688 train loss:3.507003 +step:8689 train loss:3.494806 +step:8690 train loss:3.518191 +step:8691 train loss:3.452096 +step:8692 train loss:3.444283 +step:8693 train loss:3.492008 +step:8694 train loss:3.497573 +step:8695 train loss:3.467065 +step:8696 train loss:3.459102 +step:8697 train loss:3.419534 +step:8698 train loss:3.434543 +step:8699 train loss:3.410773 +step:8700 train loss:3.397265 +step:8701 train loss:3.457566 +step:8702 train loss:3.494584 +step:8703 train loss:3.504564 +step:8704 train loss:3.596356 +step:8705 train loss:3.444551 +step:8706 train loss:3.533223 +step:8707 train loss:3.479211 +step:8708 train loss:3.499007 +step:8709 train loss:3.459912 +step:8710 train loss:3.450182 +step:8711 train loss:3.495256 +step:8712 train loss:3.394626 +step:8713 train loss:3.429667 +step:8714 train loss:3.429259 +step:8715 train loss:3.416758 +step:8716 train loss:3.447328 +step:8717 train loss:3.386227 +step:8718 train loss:3.501415 +step:8719 train loss:3.414560 +step:8720 train loss:3.442260 +step:8721 train loss:3.518736 +step:8722 train loss:3.454409 +step:8723 train loss:3.456795 +step:8724 train loss:3.450375 +step:8725 train loss:3.402469 +step:8726 train loss:3.496967 +step:8727 train loss:3.446265 +step:8728 train loss:3.436738 +step:8729 train loss:3.455302 +step:8730 train loss:3.377190 +step:8731 train loss:3.461075 +step:8732 train loss:3.522266 +step:8733 train loss:3.523134 +step:8734 train loss:3.456626 +step:8735 train loss:3.461125 +step:8736 train loss:3.468324 +step:8737 train loss:3.467241 +step:8738 train loss:3.424947 +step:8739 train loss:3.387668 +step:8740 train loss:3.468166 +step:8741 train loss:3.373730 +step:8742 train loss:3.491865 +step:8743 train loss:3.464151 +step:8744 train loss:3.511285 +step:8745 train loss:3.441564 +step:8746 train loss:3.462536 +step:8747 train loss:3.472726 +step:8748 train loss:3.425621 +step:8749 train loss:3.395710 +step:8750 validation loss:3.422947 +step:8750 train loss:3.532845 +step:8751 train loss:3.415231 +step:8752 train loss:3.462883 +step:8753 train loss:3.511631 +step:8754 train loss:3.461624 +step:8755 train loss:3.512220 +step:8756 train loss:3.570859 +step:8757 train loss:3.439576 +step:8758 train loss:3.447773 +step:8759 train loss:3.469301 +step:8760 train loss:3.466564 +step:8761 train loss:3.450678 +step:8762 train loss:3.449937 +step:8763 train loss:3.465277 +step:8764 train loss:3.420159 +step:8765 train loss:3.394139 +step:8766 train loss:3.466316 +step:8767 train loss:3.436708 +step:8768 train loss:3.499557 +step:8769 train loss:3.436998 +step:8770 train loss:3.407905 +step:8771 train loss:3.461144 +step:8772 train loss:3.542723 +step:8773 train loss:3.422740 +step:8774 train loss:3.460917 +step:8775 train loss:3.438216 +step:8776 train loss:3.476656 +step:8777 train loss:3.542861 +step:8778 train loss:3.430917 +step:8779 train loss:3.494211 +step:8780 train loss:3.394350 +step:8781 train loss:3.461724 +step:8782 train loss:3.470263 +step:8783 train loss:3.393721 +step:8784 train loss:3.527598 +step:8785 train loss:3.436402 +step:8786 train loss:3.472617 +step:8787 train loss:3.454432 +step:8788 train loss:3.427982 +step:8789 train loss:3.514243 +step:8790 train loss:3.402256 +step:8791 train loss:3.480306 +step:8792 train loss:3.453984 +step:8793 train loss:3.452121 +step:8794 train loss:3.452372 +step:8795 train loss:3.462169 +step:8796 train loss:3.438403 +step:8797 train loss:3.430929 +step:8798 train loss:3.481374 +step:8799 train loss:3.491458 +step:8800 train loss:3.475187 +step:8801 train loss:3.452457 +step:8802 train loss:3.374775 +step:8803 train loss:3.543643 +step:8804 train loss:3.463402 +step:8805 train loss:3.426298 +step:8806 train loss:3.481113 +step:8807 train loss:3.475426 +step:8808 train loss:3.466931 +step:8809 train loss:3.439260 +step:8810 train loss:3.414331 +step:8811 train loss:3.518367 +step:8812 train loss:3.413425 +step:8813 train loss:3.450716 +step:8814 train loss:3.509553 +step:8815 train loss:3.435967 +step:8816 train loss:3.456122 +step:8817 train loss:3.457201 +step:8818 train loss:3.425454 +step:8819 train loss:3.475428 +step:8820 train loss:3.451848 +step:8821 train loss:3.453462 +step:8822 train loss:3.493418 +step:8823 train loss:3.395963 +step:8824 train loss:3.541914 +step:8825 train loss:3.438267 +step:8826 train loss:3.425163 +step:8827 train loss:3.448953 +step:8828 train loss:3.390062 +step:8829 train loss:3.424080 +step:8830 train loss:3.437739 +step:8831 train loss:3.389438 +step:8832 train loss:3.466328 +step:8833 train loss:3.469362 +step:8834 train loss:3.475874 +step:8835 train loss:3.464605 +step:8836 train loss:3.472192 +step:8837 train loss:3.426284 +step:8838 train loss:3.462922 +step:8839 train loss:3.370836 +step:8840 train loss:3.429049 +step:8841 train loss:3.451169 +step:8842 train loss:3.437137 +step:8843 train loss:3.505892 +step:8844 train loss:3.419278 +step:8845 train loss:3.440813 +step:8846 train loss:3.460261 +step:8847 train loss:3.418893 +step:8848 train loss:3.402829 +step:8849 train loss:3.439784 +step:8850 train loss:3.511297 +step:8851 train loss:3.425044 +step:8852 train loss:3.574989 +step:8853 train loss:3.436285 +step:8854 train loss:3.474692 +step:8855 train loss:3.465593 +step:8856 train loss:3.424691 +step:8857 train loss:3.417029 +step:8858 train loss:3.413435 +step:8859 train loss:3.407996 +step:8860 train loss:3.457817 +step:8861 train loss:3.493297 +step:8862 train loss:3.395799 +step:8863 train loss:3.471299 +step:8864 train loss:3.478603 +step:8865 train loss:3.395402 +step:8866 train loss:3.478030 +step:8867 train loss:3.400479 +step:8868 train loss:3.444795 +step:8869 train loss:3.493602 +step:8870 train loss:3.418741 +step:8871 train loss:3.521482 +step:8872 train loss:3.399365 +step:8873 train loss:3.405120 +step:8874 train loss:3.470273 +step:8875 train loss:3.453970 +step:8876 train loss:3.361360 +step:8877 train loss:3.454238 +step:8878 train loss:3.380951 +step:8879 train loss:3.412319 +step:8880 train loss:3.456896 +step:8881 train loss:3.393970 +step:8882 train loss:3.450715 +step:8883 train loss:3.423851 +step:8884 train loss:3.465500 +step:8885 train loss:3.439980 +step:8886 train loss:3.473422 +step:8887 train loss:3.436550 +step:8888 train loss:3.440989 +step:8889 train loss:3.433782 +step:8890 train loss:3.447857 +step:8891 train loss:3.453388 +step:8892 train loss:3.386821 +step:8893 train loss:3.450418 +step:8894 train loss:3.413802 +step:8895 train loss:3.438607 +step:8896 train loss:3.439302 +step:8897 train loss:3.441301 +step:8898 train loss:3.430521 +step:8899 train loss:3.519668 +step:8900 train loss:3.399583 +step:8901 train loss:3.486257 +step:8902 train loss:3.431061 +step:8903 train loss:3.488414 +step:8904 train loss:3.398276 +step:8905 train loss:3.456573 +step:8906 train loss:3.385070 +step:8907 train loss:3.492804 +step:8908 train loss:3.404898 +step:8909 train loss:3.552125 +step:8910 train loss:3.399616 +step:8911 train loss:3.456689 +step:8912 train loss:3.416207 +step:8913 train loss:3.443228 +step:8914 train loss:3.372940 +step:8915 train loss:3.448968 +step:8916 train loss:3.400237 +step:8917 train loss:3.446293 +step:8918 train loss:3.481074 +step:8919 train loss:3.411132 +step:8920 train loss:3.397184 +step:8921 train loss:3.477762 +step:8922 train loss:3.407238 +step:8923 train loss:3.481175 +step:8924 train loss:3.409088 +step:8925 train loss:3.426928 +step:8926 train loss:3.390052 +step:8927 train loss:3.401771 +step:8928 train loss:3.398420 +step:8929 train loss:3.455935 +step:8930 train loss:3.378385 +step:8931 train loss:3.443660 +step:8932 train loss:3.439139 +step:8933 train loss:3.458932 +step:8934 train loss:3.480243 +step:8935 train loss:3.377205 +step:8936 train loss:3.458620 +step:8937 train loss:3.389567 +step:8938 train loss:3.459877 +step:8939 train loss:3.414874 +step:8940 train loss:3.504827 +step:8941 train loss:3.352214 +step:8942 train loss:3.394913 +step:8943 train loss:3.378757 +step:8944 train loss:3.400446 +step:8945 train loss:3.391288 +step:8946 train loss:3.470732 +step:8947 train loss:3.445420 +step:8948 train loss:3.521412 +step:8949 train loss:3.432899 +step:8950 train loss:3.468493 +step:8951 train loss:3.410662 +step:8952 train loss:3.492734 +step:8953 train loss:3.392426 +step:8954 train loss:3.462304 +step:8955 train loss:3.481003 +step:8956 train loss:3.451013 +step:8957 train loss:3.514315 +step:8958 train loss:3.448147 +step:8959 train loss:3.425100 +step:8960 train loss:3.402398 +step:8961 train loss:3.455791 +step:8962 train loss:3.459030 +step:8963 train loss:3.373410 +step:8964 train loss:3.525550 +step:8965 train loss:3.416641 +step:8966 train loss:3.431153 +step:8967 train loss:3.369364 +step:8968 train loss:3.445721 +step:8969 train loss:3.345467 +step:8970 train loss:3.440689 +step:8971 train loss:3.440242 +step:8972 train loss:3.435546 +step:8973 train loss:3.442669 +step:8974 train loss:3.426347 +step:8975 train loss:3.460625 +step:8976 train loss:3.443609 +step:8977 train loss:3.402805 +step:8978 train loss:3.455264 +step:8979 train loss:3.403467 +step:8980 train loss:3.504551 +step:8981 train loss:3.406924 +step:8982 train loss:3.484633 +step:8983 train loss:3.414267 +step:8984 train loss:3.435452 +step:8985 train loss:3.468448 +step:8986 train loss:3.487840 +step:8987 train loss:3.450568 +step:8988 train loss:3.465845 +step:8989 train loss:3.373058 +step:8990 train loss:3.459030 +step:8991 train loss:3.404167 +step:8992 train loss:3.516906 +step:8993 train loss:3.454875 +step:8994 train loss:3.625279 +step:8995 train loss:3.431275 +step:8996 train loss:3.430612 +step:8997 train loss:3.429290 +step:8998 train loss:3.500475 +step:8999 train loss:3.389704 +step:9000 validation loss:3.413935 total_sharp:-3.4296e-03 L1_sharp:-1.1535e-01 L2_sharp:-1.3553e-02 L3_sharp:5.9701e-03 L4_sharp:6.4453e-03 L5_sharp:4.6042e-03 L6_sharp:2.3816e-03 L7_sharp:1.6249e-03 L8_sharp:1.5963e-03 L9_sharp:1.1926e-03 L10_sharp:6.5902e-04 L11_sharp:6.8862e-04 L12_sharp:4.9065e-04 total_fnorm:1.1001e+00 total_l1_linf:9.1971e+03 total_spectral:1.1001e+00 L1_fnorm:2.3318e-01 L2_fnorm:1.4862e-01 L3_fnorm:1.5054e-01 L4_fnorm:1.9436e-01 L5_fnorm:2.3299e-01 L6_fnorm:2.5510e-01 L7_fnorm:2.8861e-01 L8_fnorm:2.9087e-01 L9_fnorm:2.9300e-01 L10_fnorm:2.9487e-01 L11_fnorm:2.8257e-01 L12_fnorm:2.9945e-01 L1_l1linf:2.0098e-01 L2_l1linf:2.4829e-01 L3_l1linf:2.7165e-01 L4_l1linf:2.7287e-01 L5_l1linf:2.6243e-01 L6_l1linf:2.5388e-01 L7_l1linf:2.4807e-01 L8_l1linf:2.4198e-01 L9_l1linf:2.5380e-01 L10_l1linf:2.6346e-01 L11_l1linf:2.7601e-01 L12_l1linf:2.7727e-01 L1_spectral:6.0255e-03 L2_spectral:6.2333e-03 L3_spectral:6.1071e-03 L4_spectral:9.7035e-03 L5_spectral:8.9454e-03 L6_spectral:6.1215e-03 L7_spectral:6.0290e-03 L8_spectral:6.0275e-03 L9_spectral:6.0302e-03 L10_spectral:6.0282e-03 L11_spectral:6.1195e-03 L12_spectral:6.1230e-03 v_norm:1.1001e+00 cos_v_-g_hvp:1.8421e-02 g_hvp_norm:8.4357e-01 cos_v_-g_t:1.6606e-02 g_t_norm:1.3294e+00 hv_norm:6.6056e+00 cos_v_hv:-5.7119e-04 hg_norm:1.9494e+03 cos_g_hg:3.5620e-02 v_par:2.0764e-03 v_perp:1.1001e+00 L1_cos_v_neg_g:1.1450e-02 L1_v_norm:2.3318e-01 L2_cos_v_neg_g:1.0580e-02 L2_v_norm:1.4862e-01 L3_cos_v_neg_g:1.6223e-02 L3_v_norm:1.5054e-01 L4_cos_v_neg_g:2.0252e-02 L4_v_norm:1.9436e-01 L5_cos_v_neg_g:2.1757e-02 L5_v_norm:2.3299e-01 L6_cos_v_neg_g:2.2499e-02 L6_v_norm:2.5510e-01 L7_cos_v_neg_g:2.1295e-02 L7_v_norm:2.8861e-01 L8_cos_v_neg_g:2.1947e-02 L8_v_norm:2.9087e-01 L9_cos_v_neg_g:2.2690e-02 L9_v_norm:2.9300e-01 L10_cos_v_neg_g:2.5193e-02 L10_v_norm:2.9487e-01 L11_cos_v_neg_g:3.5694e-02 L11_v_norm:2.8257e-01 L12_cos_v_neg_g:6.3534e-02 L12_v_norm:2.9945e-01 +step:9000 train loss:3.427260 +step:9001 train loss:3.402411 +step:9002 train loss:3.461182 +step:9003 train loss:3.419634 +step:9004 train loss:3.441228 +step:9005 train loss:3.393501 +step:9006 train loss:3.485309 +step:9007 train loss:3.420492 +step:9008 train loss:3.482551 +step:9009 train loss:3.418380 +step:9010 train loss:3.437954 +step:9011 train loss:3.398867 +step:9012 train loss:3.454612 +step:9013 train loss:3.409704 +step:9014 train loss:3.500486 +step:9015 train loss:3.433915 +step:9016 train loss:3.477685 +step:9017 train loss:3.461364 +step:9018 train loss:3.550502 +step:9019 train loss:3.416990 +step:9020 train loss:3.451662 +step:9021 train loss:3.409001 +step:9022 train loss:3.437565 +step:9023 train loss:3.365623 +step:9024 train loss:3.448072 +step:9025 train loss:3.402105 +step:9026 train loss:3.417402 +step:9027 train loss:3.456585 +step:9028 train loss:3.488227 +step:9029 train loss:3.415156 +step:9030 train loss:3.479135 +step:9031 train loss:3.449378 +step:9032 train loss:3.496609 +step:9033 train loss:3.423593 +step:9034 train loss:3.415512 +step:9035 train loss:3.370337 +step:9036 train loss:3.470108 +step:9037 train loss:3.497452 +step:9038 train loss:3.473297 +step:9039 train loss:3.414236 +step:9040 train loss:3.438594 +step:9041 train loss:3.456105 +step:9042 train loss:3.489209 +step:9043 train loss:3.480102 +step:9044 train loss:3.454097 +step:9045 train loss:3.435620 +step:9046 train loss:3.420194 +step:9047 train loss:3.394701 +step:9048 train loss:3.433635 +step:9049 train loss:3.399296 +step:9050 train loss:3.456952 +step:9051 train loss:3.422850 +step:9052 train loss:3.418309 +step:9053 train loss:3.418130 +step:9054 train loss:3.452318 +step:9055 train loss:3.435861 +step:9056 train loss:3.394397 +step:9057 train loss:3.497147 +step:9058 train loss:3.350460 +step:9059 train loss:3.434832 +step:9060 train loss:3.374974 +step:9061 train loss:3.397267 +step:9062 train loss:3.366275 +step:9063 train loss:3.470176 +step:9064 train loss:3.430901 +step:9065 train loss:3.424709 +step:9066 train loss:3.417027 +step:9067 train loss:3.453053 +step:9068 train loss:3.462330 +step:9069 train loss:3.488384 +step:9070 train loss:3.432240 +step:9071 train loss:3.469142 +step:9072 train loss:3.424387 +step:9073 train loss:3.492565 +step:9074 train loss:3.414885 +step:9075 train loss:3.501420 +step:9076 train loss:3.421317 +step:9077 train loss:3.429081 +step:9078 train loss:3.454600 +step:9079 train loss:3.517105 +step:9080 train loss:3.506851 +step:9081 train loss:3.540365 +step:9082 train loss:3.437157 +step:9083 train loss:3.510039 +step:9084 train loss:3.480759 +step:9085 train loss:3.475735 +step:9086 train loss:3.541342 +step:9087 train loss:3.385915 +step:9088 train loss:3.554684 +step:9089 train loss:3.425582 +step:9090 train loss:3.494928 +step:9091 train loss:3.483954 +step:9092 train loss:3.475683 +step:9093 train loss:3.425293 +step:9094 train loss:3.464438 +step:9095 train loss:3.450251 +step:9096 train loss:3.465919 +step:9097 train loss:3.497410 +step:9098 train loss:3.468663 +step:9099 train loss:3.483541 +step:9100 train loss:3.420882 +step:9101 train loss:3.478881 +step:9102 train loss:3.520578 +step:9103 train loss:3.435574 +step:9104 train loss:3.446390 +step:9105 train loss:3.462784 +step:9106 train loss:3.498566 +step:9107 train loss:3.458385 +step:9108 train loss:3.504337 +step:9109 train loss:3.413482 +step:9110 train loss:3.501276 +step:9111 train loss:3.437929 +step:9112 train loss:3.445243 +step:9113 train loss:3.444449 +step:9114 train loss:3.490215 +step:9115 train loss:3.478461 +step:9116 train loss:3.463883 +step:9117 train loss:3.531807 +step:9118 train loss:3.501252 +step:9119 train loss:3.451567 +step:9120 train loss:3.437453 +step:9121 train loss:3.497608 +step:9122 train loss:3.439957 +step:9123 train loss:3.446507 +step:9124 train loss:3.469486 +step:9125 train loss:3.462588 +step:9126 train loss:3.429806 +step:9127 train loss:3.446129 +step:9128 train loss:3.406859 +step:9129 train loss:3.491033 +step:9130 train loss:3.469921 +step:9131 train loss:3.460284 +step:9132 train loss:3.467854 +step:9133 train loss:3.497206 +step:9134 train loss:3.449344 +step:9135 train loss:3.585454 +step:9136 train loss:3.454235 +step:9137 train loss:3.457663 +step:9138 train loss:3.493094 +step:9139 train loss:3.421916 +step:9140 train loss:3.485467 +step:9141 train loss:3.406207 +step:9142 train loss:3.460599 +step:9143 train loss:3.470217 +step:9144 train loss:3.474430 +step:9145 train loss:3.422654 +step:9146 train loss:3.538999 +step:9147 train loss:3.488524 +step:9148 train loss:3.493705 +step:9149 train loss:3.496792 +step:9150 train loss:3.419493 +step:9151 train loss:3.454533 +step:9152 train loss:3.424249 +step:9153 train loss:3.527546 +step:9154 train loss:3.489325 +step:9155 train loss:3.469156 +step:9156 train loss:3.512773 +step:9157 train loss:3.482640 +step:9158 train loss:3.600855 +step:9159 train loss:3.404737 +step:9160 train loss:3.492446 +step:9161 train loss:3.454006 +step:9162 train loss:3.484975 +step:9163 train loss:3.407043 +step:9164 train loss:3.456157 +step:9165 train loss:3.500555 +step:9166 train loss:3.470271 +step:9167 train loss:3.509412 +step:9168 train loss:3.448361 +step:9169 train loss:3.415202 +step:9170 train loss:3.505088 +step:9171 train loss:3.447769 +step:9172 train loss:3.521154 +step:9173 train loss:3.467914 +step:9174 train loss:3.493921 +step:9175 train loss:3.441928 +step:9176 train loss:3.481300 +step:9177 train loss:3.487899 +step:9178 train loss:3.430202 +step:9179 train loss:3.444335 +step:9180 train loss:3.491127 +step:9181 train loss:3.457983 +step:9182 train loss:3.469068 +step:9183 train loss:3.440549 +step:9184 train loss:3.517205 +step:9185 train loss:3.436744 +step:9186 train loss:3.430981 +step:9187 train loss:3.447364 +step:9188 train loss:3.391167 +step:9189 train loss:3.438661 +step:9190 train loss:3.427019 +step:9191 train loss:3.447655 +step:9192 train loss:3.414966 +step:9193 train loss:3.464962 +step:9194 train loss:3.441163 +step:9195 train loss:3.430426 +step:9196 train loss:3.544507 +step:9197 train loss:3.543082 +step:9198 train loss:3.456496 +step:9199 train loss:3.466958 +step:9200 train loss:3.479652 +step:9201 train loss:3.438916 +step:9202 train loss:3.431516 +step:9203 train loss:3.433203 +step:9204 train loss:3.464875 +step:9205 train loss:3.426663 +step:9206 train loss:3.496976 +step:9207 train loss:3.438861 +step:9208 train loss:3.498215 +step:9209 train loss:3.485908 +step:9210 train loss:3.447862 +step:9211 train loss:3.494496 +step:9212 train loss:3.463644 +step:9213 train loss:3.450781 +step:9214 train loss:3.464877 +step:9215 train loss:3.419819 +step:9216 train loss:3.420665 +step:9217 train loss:3.439597 +step:9218 train loss:3.434944 +step:9219 train loss:3.423994 +step:9220 train loss:3.486575 +step:9221 train loss:3.486010 +step:9222 train loss:3.453248 +step:9223 train loss:3.499253 +step:9224 train loss:3.382330 +step:9225 train loss:3.415506 +step:9226 train loss:3.400890 +step:9227 train loss:3.416914 +step:9228 train loss:3.470119 +step:9229 train loss:3.483740 +step:9230 train loss:3.483152 +step:9231 train loss:3.440702 +step:9232 train loss:3.487406 +step:9233 train loss:3.444814 +step:9234 train loss:3.484784 +step:9235 train loss:3.448477 +step:9236 train loss:3.468336 +step:9237 train loss:3.487683 +step:9238 train loss:3.504269 +step:9239 train loss:3.451282 +step:9240 train loss:3.502384 +step:9241 train loss:3.452552 +step:9242 train loss:3.503390 +step:9243 train loss:3.450151 +step:9244 train loss:3.435415 +step:9245 train loss:3.489491 +step:9246 train loss:3.470402 +step:9247 train loss:3.469805 +step:9248 train loss:3.388693 +step:9249 train loss:3.488339 +step:9250 validation loss:3.400063 +step:9250 train loss:3.520038 +step:9251 train loss:3.466096 +step:9252 train loss:3.503015 +step:9253 train loss:3.447941 +step:9254 train loss:3.544308 +step:9255 train loss:3.473289 +step:9256 train loss:3.551143 +step:9257 train loss:3.541368 +step:9258 train loss:3.393304 +step:9259 train loss:3.432277 +step:9260 train loss:3.531976 +step:9261 train loss:3.441174 +step:9262 train loss:3.357929 +step:9263 train loss:3.355849 +step:9264 train loss:3.454345 +step:9265 train loss:3.393768 +step:9266 train loss:3.449393 +step:9267 train loss:3.487555 +step:9268 train loss:3.485047 +step:9269 train loss:3.471211 +step:9270 train loss:3.527606 +step:9271 train loss:3.430823 +step:9272 train loss:3.497228 +step:9273 train loss:3.465501 +step:9274 train loss:3.414858 +step:9275 train loss:3.439138 +step:9276 train loss:3.460058 +step:9277 train loss:3.487741 +step:9278 train loss:3.404608 +step:9279 train loss:3.493450 +step:9280 train loss:3.493382 +step:9281 train loss:3.444246 +step:9282 train loss:3.472117 +step:9283 train loss:3.494682 +step:9284 train loss:3.475587 +step:9285 train loss:3.442215 +step:9286 train loss:3.480188 +step:9287 train loss:3.404549 +step:9288 train loss:3.466642 +step:9289 train loss:3.475825 +step:9290 train loss:3.409396 +step:9291 train loss:3.432608 +step:9292 train loss:3.475319 +step:9293 train loss:3.491074 +step:9294 train loss:3.404235 +step:9295 train loss:3.480591 +step:9296 train loss:3.462985 +step:9297 train loss:3.430151 +step:9298 train loss:3.398015 +step:9299 train loss:3.398696 +step:9300 train loss:3.445589 +step:9301 train loss:3.424118 +step:9302 train loss:3.441442 +step:9303 train loss:3.432516 +step:9304 train loss:3.455150 +step:9305 train loss:3.447237 +step:9306 train loss:3.466893 +step:9307 train loss:3.529692 +step:9308 train loss:3.444018 +step:9309 train loss:3.405482 +step:9310 train loss:3.459762 +step:9311 train loss:3.459713 +step:9312 train loss:3.439147 +step:9313 train loss:3.464299 +step:9314 train loss:3.484555 +step:9315 train loss:3.462955 +step:9316 train loss:3.452906 +step:9317 train loss:3.460221 +step:9318 train loss:3.447006 +step:9319 train loss:3.456121 +step:9320 train loss:3.484017 +step:9321 train loss:3.522895 +step:9322 train loss:3.399379 +step:9323 train loss:3.440644 +step:9324 train loss:3.438476 +step:9325 train loss:3.397546 +step:9326 train loss:3.525572 +step:9327 train loss:3.449847 +step:9328 train loss:3.422955 +step:9329 train loss:3.452150 +step:9330 train loss:3.513616 +step:9331 train loss:3.456657 +step:9332 train loss:3.466198 +step:9333 train loss:3.450975 +step:9334 train loss:3.441766 +step:9335 train loss:3.442095 +step:9336 train loss:3.446101 +step:9337 train loss:3.455020 +step:9338 train loss:3.519989 +step:9339 train loss:3.464438 +step:9340 train loss:3.430856 +step:9341 train loss:3.617977 +step:9342 train loss:3.414759 +step:9343 train loss:3.403581 +step:9344 train loss:3.506058 +step:9345 train loss:3.446292 +step:9346 train loss:3.452262 +step:9347 train loss:3.476393 +step:9348 train loss:3.458721 +step:9349 train loss:3.440331 +step:9350 train loss:3.509525 +step:9351 train loss:3.487594 +step:9352 train loss:3.456736 +step:9353 train loss:3.492468 +step:9354 train loss:3.493479 +step:9355 train loss:3.455956 +step:9356 train loss:3.467057 +step:9357 train loss:3.465921 +step:9358 train loss:3.524811 +step:9359 train loss:3.460982 +step:9360 train loss:3.482141 +step:9361 train loss:3.437734 +step:9362 train loss:3.446065 +step:9363 train loss:3.454378 +step:9364 train loss:3.419832 +step:9365 train loss:3.433239 +step:9366 train loss:3.516311 +step:9367 train loss:3.420967 +step:9368 train loss:3.476751 +step:9369 train loss:3.455530 +step:9370 train loss:3.490176 +step:9371 train loss:3.452395 +step:9372 train loss:3.420509 +step:9373 train loss:3.469001 +step:9374 train loss:3.408021 +step:9375 train loss:3.439287 +step:9376 train loss:3.419538 +step:9377 train loss:3.525577 +step:9378 train loss:3.444569 +step:9379 train loss:3.423070 +step:9380 train loss:3.454690 +step:9381 train loss:3.421073 +step:9382 train loss:3.416329 +step:9383 train loss:3.505497 +step:9384 train loss:3.422317 +step:9385 train loss:3.533581 +step:9386 train loss:3.392660 +step:9387 train loss:3.430572 +step:9388 train loss:3.456460 +step:9389 train loss:3.421579 +step:9390 train loss:3.413622 +step:9391 train loss:3.438787 +step:9392 train loss:3.467953 +step:9393 train loss:3.480904 +step:9394 train loss:3.542543 +step:9395 train loss:3.557287 +step:9396 train loss:3.482940 +step:9397 train loss:3.443351 +step:9398 train loss:3.497076 +step:9399 train loss:3.476441 +step:9400 train loss:3.415520 +step:9401 train loss:3.436563 +step:9402 train loss:3.429158 +step:9403 train loss:3.418001 +step:9404 train loss:3.453753 +step:9405 train loss:3.478044 +step:9406 train loss:3.416348 +step:9407 train loss:3.469773 +step:9408 train loss:3.473658 +step:9409 train loss:3.492608 +step:9410 train loss:3.437968 +step:9411 train loss:3.501545 +step:9412 train loss:3.487632 +step:9413 train loss:3.558357 +step:9414 train loss:3.631138 +step:9415 train loss:3.451278 +step:9416 train loss:3.462301 +step:9417 train loss:3.421033 +step:9418 train loss:3.324518 +step:9419 train loss:3.459142 +step:9420 train loss:3.504658 +step:9421 train loss:3.468622 +step:9422 train loss:3.522246 +step:9423 train loss:3.515951 +step:9424 train loss:3.483671 +step:9425 train loss:3.425538 +step:9426 train loss:3.451800 +step:9427 train loss:3.489133 +step:9428 train loss:3.457658 +step:9429 train loss:3.442043 +step:9430 train loss:3.449635 +step:9431 train loss:3.481755 +step:9432 train loss:3.408702 +step:9433 train loss:3.471723 +step:9434 train loss:3.398093 +step:9435 train loss:3.482132 +step:9436 train loss:3.451200 +step:9437 train loss:3.389007 +step:9438 train loss:3.517496 +step:9439 train loss:3.481025 +step:9440 train loss:3.467390 +step:9441 train loss:3.475991 +step:9442 train loss:3.491229 +step:9443 train loss:3.414073 +step:9444 train loss:3.398213 +step:9445 train loss:3.494541 +step:9446 train loss:3.445749 +step:9447 train loss:3.440129 +step:9448 train loss:3.434744 +step:9449 train loss:3.432261 +step:9450 train loss:3.444245 +step:9451 train loss:3.422493 +step:9452 train loss:3.433274 +step:9453 train loss:3.436600 +step:9454 train loss:3.372383 +step:9455 train loss:3.389991 +step:9456 train loss:3.458156 +step:9457 train loss:3.448200 +step:9458 train loss:3.430072 +step:9459 train loss:3.409508 +step:9460 train loss:3.453025 +step:9461 train loss:3.423968 +step:9462 train loss:3.489468 +step:9463 train loss:3.488239 +step:9464 train loss:3.391158 +step:9465 train loss:3.486663 +step:9466 train loss:3.432650 +step:9467 train loss:3.483867 +step:9468 train loss:3.499317 +step:9469 train loss:3.444250 +step:9470 train loss:3.382836 +step:9471 train loss:3.494869 +step:9472 train loss:3.396976 +step:9473 train loss:3.425130 +step:9474 train loss:3.421340 +step:9475 train loss:3.424988 +step:9476 train loss:3.375066 +step:9477 train loss:3.389211 +step:9478 train loss:3.406253 +step:9479 train loss:3.406124 +step:9480 train loss:3.422382 +step:9481 train loss:3.419686 +step:9482 train loss:3.575413 +step:9483 train loss:3.439740 +step:9484 train loss:3.549357 +step:9485 train loss:3.451597 +step:9486 train loss:3.406165 +step:9487 train loss:3.402316 +step:9488 train loss:3.453257 +step:9489 train loss:3.389960 +step:9490 train loss:3.432255 +step:9491 train loss:3.505260 +step:9492 train loss:3.460744 +step:9493 train loss:3.491968 +step:9494 train loss:3.371563 +step:9495 train loss:3.384422 +step:9496 train loss:3.388874 +step:9497 train loss:3.434660 +step:9498 train loss:3.373014 +step:9499 train loss:3.478800 +step:9500 validation loss:3.400605 total_sharp:4.4051e-02 L1_sharp:2.5688e-01 L2_sharp:1.0271e+00 L3_sharp:7.8157e-02 L4_sharp:6.8018e-03 L5_sharp:7.8535e-03 L6_sharp:3.4911e-03 L7_sharp:2.0301e-03 L8_sharp:1.7232e-03 L9_sharp:1.2649e-03 L10_sharp:6.6745e-04 L11_sharp:5.9602e-04 L12_sharp:5.3994e-04 total_fnorm:5.4032e-01 total_l1_linf:4.4574e+03 total_spectral:5.4032e-01 L1_fnorm:9.9628e-02 L2_fnorm:5.9723e-02 L3_fnorm:5.8645e-02 L4_fnorm:9.0515e-02 L5_fnorm:1.0905e-01 L6_fnorm:1.2713e-01 L7_fnorm:1.4471e-01 L8_fnorm:1.4629e-01 L9_fnorm:1.4680e-01 L10_fnorm:1.4823e-01 L11_fnorm:1.4189e-01 L12_fnorm:1.4970e-01 L1_l1linf:1.5834e-01 L2_l1linf:1.6472e-01 L3_l1linf:1.7255e-01 L4_l1linf:1.7212e-01 L5_l1linf:1.6687e-01 L6_l1linf:1.5544e-01 L7_l1linf:1.4597e-01 L8_l1linf:1.3695e-01 L9_l1linf:1.4661e-01 L10_l1linf:1.5340e-01 L11_l1linf:1.4910e-01 L12_l1linf:1.5410e-01 L1_spectral:3.3725e-03 L2_spectral:3.5826e-03 L3_spectral:3.8213e-03 L4_spectral:5.0477e-03 L5_spectral:4.4573e-03 L6_spectral:3.4315e-03 L7_spectral:3.2207e-03 L8_spectral:3.0435e-03 L9_spectral:3.2255e-03 L10_spectral:3.3926e-03 L11_spectral:3.3126e-03 L12_spectral:3.4116e-03 v_norm:5.4032e-01 cos_v_-g_hvp:4.9788e-03 g_hvp_norm:1.0997e+00 cos_v_-g_t:1.9656e-02 g_t_norm:3.1425e+00 hv_norm:1.9214e+01 cos_v_hv:1.2388e-03 hg_norm:8.2262e+04 cos_g_hg:-2.4670e-01 v_par:7.1880e-04 v_perp:5.4032e-01 L1_cos_v_neg_g:-3.1262e-02 L1_v_norm:9.9628e-02 L2_cos_v_neg_g:-3.8303e-02 L2_v_norm:5.9723e-02 L3_cos_v_neg_g:-7.6892e-03 L3_v_norm:5.8645e-02 L4_cos_v_neg_g:2.4044e-02 L4_v_norm:9.0515e-02 L5_cos_v_neg_g:2.9012e-02 L5_v_norm:1.0905e-01 L6_cos_v_neg_g:2.6084e-02 L6_v_norm:1.2713e-01 L7_cos_v_neg_g:2.2047e-02 L7_v_norm:1.4471e-01 L8_cos_v_neg_g:2.0440e-02 L8_v_norm:1.4629e-01 L9_cos_v_neg_g:2.0883e-02 L9_v_norm:1.4680e-01 L10_cos_v_neg_g:2.4585e-02 L10_v_norm:1.4823e-01 L11_cos_v_neg_g:3.2532e-02 L11_v_norm:1.4189e-01 L12_cos_v_neg_g:6.5130e-02 L12_v_norm:1.4970e-01 +step:9500 train loss:3.437392 +step:9501 train loss:3.498638 +step:9502 train loss:3.442530 +step:9503 train loss:3.511851 +step:9504 train loss:3.403741 +step:9505 train loss:3.401047 +step:9506 train loss:3.465070 +step:9507 train loss:3.450983 +step:9508 train loss:3.432842 +step:9509 train loss:3.479460 +step:9510 train loss:3.517143 +step:9511 train loss:3.382301 +step:9512 train loss:3.466673 +step:9513 train loss:3.446784 +step:9514 train loss:3.506317 +step:9515 train loss:3.401089 +step:9516 train loss:3.316235 +step:9517 train loss:3.383350 +step:9518 train loss:3.407095 +step:9519 train loss:3.411210 +step:9520 train loss:3.330403 +step:9521 train loss:3.423851 +step:9522 train loss:3.450020 +step:9523 train loss:3.387385 +step:9524 train loss:3.449174 +step:9525 train loss:3.437509 +step:9526 train loss:3.389954 +step:9527 train loss:3.379085 +step:9528 train loss:3.469240 +step:9529 train loss:3.365575 +step:9530 train loss:3.425050 +step:9531 train loss:3.461265 +step:9532 train loss:3.444681 +step:9533 train loss:3.429329 +step:9534 train loss:3.461667 +step:9535 train loss:3.389078 +step:9536 train loss:3.404125 +step:9537 train loss:3.493624 +step:9538 train loss:3.490025 +step:9539 train loss:3.393915 +step:9540 train loss:3.572693 +step:9541 train loss:3.391605 +step:9542 train loss:3.388380 +step:9543 train loss:3.384361 +step:9544 train loss:3.391071 +step:9545 train loss:3.362142 +step:9546 train loss:3.390541 +step:9547 train loss:3.519966 +step:9548 train loss:3.440438 +step:9549 train loss:3.431696 +step:9550 train loss:3.467795 +step:9551 train loss:3.379376 +step:9552 train loss:3.430936 +step:9553 train loss:3.474203 +step:9554 train loss:3.439715 +step:9555 train loss:3.376632 +step:9556 train loss:3.450742 +step:9557 train loss:3.418045 +step:9558 train loss:3.433796 +step:9559 train loss:3.425550 +step:9560 train loss:3.536250 +step:9561 train loss:3.416192 +step:9562 train loss:3.488881 +step:9563 train loss:3.627443 +step:9564 train loss:3.427734 +step:9565 train loss:3.416685 +step:9566 train loss:3.452839 +step:9567 train loss:3.398578 +step:9568 train loss:3.458661 +step:9569 train loss:3.461116 +step:9570 train loss:3.469328 +step:9571 train loss:3.447647 +step:9572 train loss:3.383229 +step:9573 train loss:3.546672 +step:9574 train loss:3.382402 +step:9575 train loss:3.460699 +step:9576 train loss:3.406274 +step:9577 train loss:3.397880 +step:9578 train loss:3.444786 +step:9579 train loss:3.435544 +step:9580 train loss:3.427426 +step:9581 train loss:3.467248 +step:9582 train loss:3.396399 +step:9583 train loss:3.518296 +step:9584 train loss:3.409209 +step:9585 train loss:3.433566 +step:9586 train loss:3.474226 +step:9587 train loss:3.453892 +step:9588 train loss:3.465594 +step:9589 train loss:3.438969 +step:9590 train loss:3.417422 +step:9591 train loss:3.383333 +step:9592 train loss:3.389736 +step:9593 train loss:3.432987 +step:9594 train loss:3.375813 +step:9595 train loss:3.433911 +step:9596 train loss:3.455909 +step:9597 train loss:3.427639 +step:9598 train loss:3.361269 +step:9599 train loss:3.367266 +step:9600 train loss:3.438448 +step:9601 train loss:3.403303 +step:9602 train loss:3.352382 +step:9603 train loss:3.473280 +step:9604 train loss:3.416587 +step:9605 train loss:3.416367 +step:9606 train loss:3.559648 +step:9607 train loss:3.433214 +step:9608 train loss:3.397848 +step:9609 train loss:3.506470 +step:9610 train loss:3.425377 +step:9611 train loss:3.368122 +step:9612 train loss:3.448099 +step:9613 train loss:3.433310 +step:9614 train loss:3.439537 +step:9615 train loss:3.376395 +step:9616 train loss:3.454688 +step:9617 train loss:3.414350 +step:9618 train loss:3.386226 +step:9619 train loss:3.367432 +step:9620 train loss:3.420015 +step:9621 train loss:3.418870 +step:9622 train loss:3.482905 +step:9623 train loss:3.410897 +step:9624 train loss:3.488760 +step:9625 train loss:3.390808 +step:9626 train loss:3.396889 +step:9627 train loss:3.435972 +step:9628 train loss:3.455228 +step:9629 train loss:3.512537 +step:9630 train loss:3.443110 +step:9631 train loss:3.450948 +step:9632 train loss:3.430796 +step:9633 train loss:3.408923 +step:9634 train loss:3.500009 +step:9635 train loss:3.470255 +step:9636 train loss:3.475879 +step:9637 train loss:3.440063 +step:9638 train loss:3.419735 +step:9639 train loss:3.439510 +step:9640 train loss:3.448126 +step:9641 train loss:3.389462 +step:9642 train loss:3.456483 +step:9643 train loss:3.415416 +step:9644 train loss:3.469714 +step:9645 train loss:3.458037 +step:9646 train loss:3.468207 +step:9647 train loss:3.429770 +step:9648 train loss:3.439322 +step:9649 train loss:3.453194 +step:9650 train loss:3.422618 +step:9651 train loss:3.434419 +step:9652 train loss:3.436804 +step:9653 train loss:3.457076 +step:9654 train loss:3.460149 +step:9655 train loss:3.364366 +step:9656 train loss:3.393884 +step:9657 train loss:3.393498 +step:9658 train loss:3.443222 +step:9659 train loss:3.446975 +step:9660 train loss:3.337308 +step:9661 train loss:3.546674 +step:9662 train loss:3.374342 +step:9663 train loss:3.437866 +step:9664 train loss:3.483173 +step:9665 train loss:3.373312 +step:9666 train loss:3.542864 +step:9667 train loss:3.440531 +step:9668 train loss:3.537231 +step:9669 train loss:3.414165 +step:9670 train loss:3.442387 +step:9671 train loss:3.405511 +step:9672 train loss:3.506830 +step:9673 train loss:3.420980 +step:9674 train loss:3.515215 +step:9675 train loss:3.452616 +step:9676 train loss:3.451545 +step:9677 train loss:3.477633 +step:9678 train loss:3.402136 +step:9679 train loss:3.438162 +step:9680 train loss:3.404669 +step:9681 train loss:3.423650 +step:9682 train loss:3.412469 +step:9683 train loss:3.446275 +step:9684 train loss:3.395026 +step:9685 train loss:3.474212 +step:9686 train loss:3.441907 +step:9687 train loss:3.419737 +step:9688 train loss:3.419703 +step:9689 train loss:3.384545 +step:9690 train loss:3.440961 +step:9691 train loss:3.430965 +step:9692 train loss:3.454738 +step:9693 train loss:3.429463 +step:9694 train loss:3.453628 +step:9695 train loss:3.495803 +step:9696 train loss:3.467136 +step:9697 train loss:3.469722 +step:9698 train loss:3.417623 +step:9699 train loss:3.431962 +step:9700 train loss:3.401710 +step:9701 train loss:3.402283 +step:9702 train loss:3.405207 +step:9703 train loss:3.399461 +step:9704 train loss:3.464957 +step:9705 train loss:3.464825 +step:9706 train loss:3.412014 +step:9707 train loss:3.415229 +step:9708 train loss:3.499895 +step:9709 train loss:3.454550 +step:9710 train loss:3.465075 +step:9711 train loss:3.451024 +step:9712 train loss:3.602179 +step:9713 train loss:3.460052 +step:9714 train loss:3.438091 +step:9715 train loss:3.399048 +step:9716 train loss:3.412359 +step:9717 train loss:3.402988 +step:9718 train loss:3.467341 +step:9719 train loss:3.427796 +step:9720 train loss:3.485308 +step:9721 train loss:3.447162 +step:9722 train loss:3.396439 +step:9723 train loss:3.389344 +step:9724 train loss:3.452345 +step:9725 train loss:3.454706 +step:9726 train loss:3.462308 +step:9727 train loss:3.421875 +step:9728 train loss:3.447551 +step:9729 train loss:3.404176 +step:9730 train loss:3.431527 +step:9731 train loss:3.431001 +step:9732 train loss:3.391424 +step:9733 train loss:3.480346 +step:9734 train loss:3.412508 +step:9735 train loss:3.474697 +step:9736 train loss:3.486207 +step:9737 train loss:3.401528 +step:9738 train loss:3.481464 +step:9739 train loss:3.429021 +step:9740 train loss:3.416320 +step:9741 train loss:3.489847 +step:9742 train loss:3.379914 +step:9743 train loss:3.434052 +step:9744 train loss:3.433117 +step:9745 train loss:3.400395 +step:9746 train loss:3.400920 +step:9747 train loss:3.396512 +step:9748 train loss:3.451529 +step:9749 train loss:3.377173 +step:9750 validation loss:3.371350 +step:9750 train loss:3.408778 +step:9751 train loss:3.480155 +step:9752 train loss:3.433135 +step:9753 train loss:3.410479 +step:9754 train loss:3.437752 +step:9755 train loss:3.363699 +step:9756 train loss:3.441325 +step:9757 train loss:3.392010 +step:9758 train loss:3.508934 +step:9759 train loss:3.428767 +step:9760 train loss:3.442818 +step:9761 train loss:3.435519 +step:9762 train loss:3.456990 +step:9763 train loss:3.442831 +step:9764 train loss:3.416956 +step:9765 train loss:3.440488 +step:9766 train loss:3.428749 +step:9767 train loss:3.387495 +step:9768 train loss:3.478434 +step:9769 train loss:3.400105 +step:9770 train loss:3.369334 +step:9771 train loss:3.446406 +step:9772 train loss:3.391471 +step:9773 train loss:3.434591 +step:9774 train loss:3.511029 +step:9775 train loss:3.437054 +step:9776 train loss:3.515241 +step:9777 train loss:3.371580 +step:9778 train loss:3.426669 +step:9779 train loss:3.429832 +step:9780 train loss:3.442312 +step:9781 train loss:3.440113 +step:9782 train loss:3.405308 +step:9783 train loss:3.453703 +step:9784 train loss:3.364957 +step:9785 train loss:3.395940 +step:9786 train loss:3.430782 +step:9787 train loss:3.441055 +step:9788 train loss:3.432475 +step:9789 train loss:3.442457 +step:9790 train loss:3.397599 +step:9791 train loss:3.446903 +step:9792 train loss:3.389242 +step:9793 train loss:3.432282 +step:9794 train loss:3.481216 +step:9795 train loss:3.459770 +step:9796 train loss:3.450983 +step:9797 train loss:3.360353 +step:9798 train loss:3.396719 +step:9799 train loss:3.488258 +step:9800 train loss:3.500324 +step:9801 train loss:3.430147 +step:9802 train loss:3.379256 +step:9803 train loss:3.398592 +step:9804 train loss:3.422474 +step:9805 train loss:3.395880 +step:9806 train loss:3.430040 +step:9807 train loss:3.424128 +step:9808 train loss:3.325648 +step:9809 train loss:3.400255 +step:9810 train loss:3.404322 +step:9811 train loss:3.453606 +step:9812 train loss:3.456021 +step:9813 train loss:3.438988 +step:9814 train loss:3.443510 +step:9815 train loss:3.420946 +step:9816 train loss:3.389699 +step:9817 train loss:3.385056 +step:9818 train loss:3.379840 +step:9819 train loss:3.459726 +step:9820 train loss:3.469069 +step:9821 train loss:3.376983 +step:9822 train loss:3.382691 +step:9823 train loss:3.444678 +step:9824 train loss:3.370124 +step:9825 train loss:3.450090 +step:9826 train loss:3.436582 +step:9827 train loss:3.419462 +step:9828 train loss:3.459279 +step:9829 train loss:3.446994 +step:9830 train loss:3.401660 +step:9831 train loss:3.364047 +step:9832 train loss:3.466436 +step:9833 train loss:3.381449 +step:9834 train loss:3.451399 +step:9835 train loss:3.428518 +step:9836 train loss:3.412336 +step:9837 train loss:3.395967 +step:9838 train loss:3.346694 +step:9839 train loss:3.406770 +step:9840 train loss:3.375666 +step:9841 train loss:3.367733 +step:9842 train loss:3.394157 +step:9843 train loss:3.424317 +step:9844 train loss:3.468463 +step:9845 train loss:3.367598 +step:9846 train loss:3.464778 +step:9847 train loss:3.436274 +step:9848 train loss:3.391061 +step:9849 train loss:3.420518 +step:9850 train loss:3.441129 +step:9851 train loss:3.446501 +step:9852 train loss:3.417502 +step:9853 train loss:3.446780 +step:9854 train loss:3.438683 +step:9855 train loss:3.410226 +step:9856 train loss:3.510720 +step:9857 train loss:3.489991 +step:9858 train loss:3.508180 +step:9859 train loss:3.411817 +step:9860 train loss:3.433433 +step:9861 train loss:3.513417 +step:9862 train loss:3.382067 +step:9863 train loss:3.367289 +step:9864 train loss:3.433723 +step:9865 train loss:3.408402 +step:9866 train loss:3.393148 +step:9867 train loss:3.445319 +step:9868 train loss:3.466836 +step:9869 train loss:3.455061 +step:9870 train loss:3.471040 +step:9871 train loss:3.435167 +step:9872 train loss:3.418188 +step:9873 train loss:3.444213 +step:9874 train loss:3.437006 +step:9875 train loss:3.485129 +step:9876 train loss:3.454854 +step:9877 train loss:3.444545 +step:9878 train loss:3.505631 +step:9879 train loss:3.448542 +step:9880 train loss:3.351384 +step:9881 train loss:3.412092 +step:9882 train loss:3.320202 +step:9883 train loss:3.456690 +step:9884 train loss:3.404767 +step:9885 train loss:3.436759 +step:9886 train loss:3.402155 +step:9887 train loss:3.426169 +step:9888 train loss:3.557642 +step:9889 train loss:3.403707 +step:9890 train loss:3.408820 +step:9891 train loss:3.446342 +step:9892 train loss:3.440998 +step:9893 train loss:3.401854 +step:9894 train loss:3.514569 +step:9895 train loss:3.434443 +step:9896 train loss:3.388974 +step:9897 train loss:3.386330 +step:9898 train loss:3.375743 +step:9899 train loss:3.376327 +step:9900 train loss:3.352866 +step:9901 train loss:3.380279 +step:9902 train loss:3.441244 +step:9903 train loss:3.426456 +step:9904 train loss:3.351336 +step:9905 train loss:3.457628 +step:9906 train loss:3.448623 +step:9907 train loss:3.347822 +step:9908 train loss:3.430562 +step:9909 train loss:3.407820 +step:9910 train loss:3.386755 +step:9911 train loss:3.398687 +step:9912 train loss:3.421793 +step:9913 train loss:3.392439 +step:9914 train loss:3.381442 +step:9915 train loss:3.411469 +step:9916 train loss:3.441201 +step:9917 train loss:3.352457 +step:9918 train loss:3.405260 +step:9919 train loss:3.415753 +step:9920 train loss:3.398778 +step:9921 train loss:3.437010 +step:9922 train loss:3.424158 +step:9923 train loss:3.459760 +step:9924 train loss:3.436692 +step:9925 train loss:3.444265 +step:9926 train loss:3.406077 +step:9927 train loss:3.429751 +step:9928 train loss:3.422252 +step:9929 train loss:3.453682 +step:9930 train loss:3.440566 +step:9931 train loss:3.434183 +step:9932 train loss:3.434820 +step:9933 train loss:3.407421 +step:9934 train loss:3.440860 +step:9935 train loss:3.455516 +step:9936 train loss:3.402617 +step:9937 train loss:3.403781 +step:9938 train loss:3.429569 +step:9939 train loss:3.524442 +step:9940 train loss:3.377822 +step:9941 train loss:3.418122 +step:9942 train loss:3.385259 +step:9943 train loss:3.447408 +step:9944 train loss:3.497026 +step:9945 train loss:3.487713 +step:9946 train loss:3.480170 +step:9947 train loss:3.395067 +step:9948 train loss:3.387362 +step:9949 train loss:3.437746 +step:9950 train loss:3.403004 +step:9951 train loss:3.405374 +step:9952 train loss:3.425482 +step:9953 train loss:3.461343 +step:9954 train loss:3.398043 +step:9955 train loss:3.425568 +step:9956 train loss:3.423803 +step:9957 train loss:3.398226 +step:9958 train loss:3.385063 +step:9959 train loss:3.356710 +step:9960 train loss:3.433194 +step:9961 train loss:3.475249 +step:9962 train loss:3.335818 +step:9963 train loss:3.416253 +step:9964 train loss:3.405383 +step:9965 train loss:3.385749 +step:9966 train loss:3.429117 +step:9967 train loss:3.413319 +step:9968 train loss:3.444006 +step:9969 train loss:3.410202 +step:9970 train loss:3.390589 +step:9971 train loss:3.422159 +step:9972 train loss:3.406225 +step:9973 train loss:3.390544 +step:9974 train loss:3.447150 +step:9975 train loss:3.462534 +step:9976 train loss:3.396949 +step:9977 train loss:3.388915 +step:9978 train loss:3.427120 +step:9979 train loss:3.431468 +step:9980 train loss:3.416396 +step:9981 train loss:3.442886 +step:9982 train loss:3.406355 +step:9983 train loss:3.459554 +step:9984 train loss:3.384689 +step:9985 train loss:3.397471 +step:9986 train loss:3.428419 +step:9987 train loss:3.450228 +step:9988 train loss:3.411014 +step:9989 train loss:3.422435 +step:9990 train loss:3.464776 +step:9991 train loss:3.579291 +step:9992 train loss:3.433605 +step:9993 train loss:3.405612 +step:9994 train loss:3.375104 +step:9995 train loss:3.434159 +step:9996 train loss:3.360069 +step:9997 train loss:3.393976 +step:9998 train loss:3.370408 +step:9999 train loss:3.419197 +step:10000 validation loss:3.360465 total_sharp:7.0901e-02 L1_sharp:6.3325e-01 L2_sharp:2.6610e-01 L3_sharp:3.7853e-02 L4_sharp:3.9505e-02 L5_sharp:8.5613e-03 L6_sharp:4.9671e-03 L7_sharp:3.9526e-03 L8_sharp:3.6329e-03 L9_sharp:1.9072e-03 L10_sharp:9.5694e-04 L11_sharp:8.3234e-04 L12_sharp:9.1250e-04 total_fnorm:1.0684e-03 total_l1_linf:8.7819e+00 total_spectral:1.0684e-03 L1_fnorm:1.8714e-04 L2_fnorm:1.1354e-04 L3_fnorm:1.2411e-04 L4_fnorm:1.7757e-04 L5_fnorm:2.1714e-04 L6_fnorm:2.5105e-04 L7_fnorm:2.8286e-04 L8_fnorm:2.8397e-04 L9_fnorm:2.8891e-04 L10_fnorm:2.9268e-04 L11_fnorm:2.8024e-04 L12_fnorm:2.9949e-04 L1_l1linf:2.8369e-04 L2_l1linf:3.2170e-04 L3_l1linf:3.6840e-04 L4_l1linf:3.9971e-04 L5_l1linf:3.7337e-04 L6_l1linf:3.7780e-04 L7_l1linf:3.8197e-04 L8_l1linf:4.0596e-04 L9_l1linf:4.0802e-04 L10_l1linf:3.7801e-04 L11_l1linf:3.4434e-04 L12_l1linf:3.3881e-04 L1_spectral:6.2284e-06 L2_spectral:7.1963e-06 L3_spectral:8.2389e-06 L4_spectral:1.0648e-05 L5_spectral:9.7318e-06 L6_spectral:8.3330e-06 L7_spectral:8.4750e-06 L8_spectral:8.8753e-06 L9_spectral:8.9262e-06 L10_spectral:8.2933e-06 L11_spectral:7.6638e-06 L12_spectral:7.5429e-06 v_norm:1.0684e-03 cos_v_-g_hvp:2.1670e-02 g_hvp_norm:5.2936e-01 cos_v_-g_t:7.2864e-03 g_t_norm:1.5720e+00 hv_norm:2.1398e-02 cos_v_hv:3.5400e-03 hg_norm:2.0848e+04 cos_g_hg:6.8222e-02 v_par:2.9433e-06 v_perp:1.0684e-03 L1_cos_v_neg_g:1.3302e-02 L1_v_norm:1.8714e-04 L2_cos_v_neg_g:3.8287e-03 L2_v_norm:1.1354e-04 L3_cos_v_neg_g:1.5998e-02 L3_v_norm:1.2411e-04 L4_cos_v_neg_g:1.3182e-02 L4_v_norm:1.7757e-04 L5_cos_v_neg_g:1.8505e-02 L5_v_norm:2.1714e-04 L6_cos_v_neg_g:2.3321e-02 L6_v_norm:2.5105e-04 L7_cos_v_neg_g:2.3015e-02 L7_v_norm:2.8286e-04 L8_cos_v_neg_g:2.3356e-02 L8_v_norm:2.8398e-04 L9_cos_v_neg_g:2.5934e-02 L9_v_norm:2.8891e-04 L10_cos_v_neg_g:2.9619e-02 L10_v_norm:2.9268e-04 L11_cos_v_neg_g:3.8425e-02 L11_v_norm:2.8024e-04 L12_cos_v_neg_g:6.4478e-02 L12_v_norm:2.9949e-04 diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/config.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/config.json new file mode 100644 index 0000000000000000000000000000000000000000..be09fb38272bd150476c0f3d54451ca976248d5c --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure_qk_nonorm_no_clip/layer_wise_new_code_rand", + "model": "d12", + "batch_size": 4, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 10000.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "shuffle_files": true, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 44, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500 + }, + "run_uuid": "693e530c-4417-4bdf-b2fa-b0da1b08f67f", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..38d12b187afd116b07333c4296629d77926cbdaa --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.420264959335327, + "total_l1_linf_norm": 20720.23046875, + "total_spectral_norm": 2.4202651977539062, + "embed_lm_head_update_fnorm": 1.3379110097885132, + "embed_lm_head_max_l1_linf_norm": 0.37328004837036133, + "embed_lm_head_max_spectral_norm": 0.2912978529930115, + "layer_1_update_fnorm": 0.607286810874939, + "layer_1_max_l1_linf_norm": 0.42722225189208984, + "layer_1_max_spectral_norm": 0.012055221013724804, + "layer_2_update_fnorm": 0.5589085221290588, + "layer_2_max_l1_linf_norm": 0.4202122688293457, + "layer_2_max_spectral_norm": 0.012064319103956223, + "layer_3_update_fnorm": 0.537213921546936, + "layer_3_max_l1_linf_norm": 0.4209657907485962, + "layer_3_max_spectral_norm": 0.012059340253472328, + "layer_4_update_fnorm": 0.547039270401001, + "layer_4_max_l1_linf_norm": 0.394218385219574, + "layer_4_max_spectral_norm": 0.01204503420740366, + "layer_5_update_fnorm": 0.5776973962783813, + "layer_5_max_l1_linf_norm": 0.39950674772262573, + "layer_5_max_spectral_norm": 0.012043493799865246, + "layer_6_update_fnorm": 0.5786569118499756, + "layer_6_max_l1_linf_norm": 0.4035523533821106, + "layer_6_max_spectral_norm": 0.012056355364620686, + "layer_7_update_fnorm": 0.5903180241584778, + "layer_7_max_l1_linf_norm": 0.40309667587280273, + "layer_7_max_spectral_norm": 0.012043040245771408, + "layer_8_update_fnorm": 0.5911942720413208, + "layer_8_max_l1_linf_norm": 0.4043227732181549, + "layer_8_max_spectral_norm": 0.012043028138577938, + "layer_9_update_fnorm": 0.5963476300239563, + "layer_9_max_l1_linf_norm": 0.40406614542007446, + "layer_9_max_spectral_norm": 0.012046926654875278, + "layer_10_update_fnorm": 0.5979031324386597, + "layer_10_max_l1_linf_norm": 0.40296903252601624, + "layer_10_max_spectral_norm": 0.012049154378473759, + "layer_11_update_fnorm": 0.5983462929725647, + "layer_11_max_l1_linf_norm": 0.4019870162010193, + "layer_11_max_spectral_norm": 0.012043618597090244, + "layer_12_update_fnorm": 0.6007912158966064, + "layer_12_max_l1_linf_norm": 0.39783698320388794, + "layer_12_max_spectral_norm": 0.012042006477713585, + "block0_q_update_fnorm": 0.2476375550031662, + "block0_q_max_l1_linf_norm": 0.2077496498823166, + "block0_q_max_spectral_norm": 0.01203979179263115, + "block0_k_update_fnorm": 0.24807077646255493, + "block0_k_max_l1_linf_norm": 0.20890244841575623, + "block0_k_max_spectral_norm": 0.01204119436442852, + "block0_v_update_fnorm": 0.23700550198554993, + "block0_v_max_l1_linf_norm": 0.20393821597099304, + "block0_v_max_spectral_norm": 0.012039748951792717, + "block0_o_update_fnorm": 0.24176056683063507, + "block0_o_max_l1_linf_norm": 0.20434267818927765, + "block0_o_max_spectral_norm": 0.01203934010118246, + "block0_mlp_win_update_fnorm": 0.25413304567337036, + "block0_mlp_win_max_l1_linf_norm": 0.14722031354904175, + "block0_mlp_win_max_spectral_norm": 0.012055221013724804, + "block0_mlp_wout_update_fnorm": 0.2580987811088562, + "block0_mlp_wout_max_l1_linf_norm": 0.42722225189208984, + "block0_mlp_wout_max_spectral_norm": 0.012047355063259602, + "block3_q_update_fnorm": 0.2101326584815979, + "block3_q_max_l1_linf_norm": 0.21379989385604858, + "block3_q_max_spectral_norm": 0.012031913734972477, + "block3_k_update_fnorm": 0.1801643669605255, + "block3_k_max_l1_linf_norm": 0.21433794498443604, + "block3_k_max_spectral_norm": 0.012032558210194111, + "block3_v_update_fnorm": 0.1985885351896286, + "block3_v_max_l1_linf_norm": 0.20472903549671173, + "block3_v_max_spectral_norm": 0.012035289779305458, + "block3_o_update_fnorm": 0.22995302081108093, + "block3_o_max_l1_linf_norm": 0.19391587376594543, + "block3_o_max_spectral_norm": 0.01204333920031786, + "block3_mlp_win_update_fnorm": 0.26916244626045227, + "block3_mlp_win_max_l1_linf_norm": 0.19064101576805115, + "block3_mlp_win_max_spectral_norm": 0.01204503420740366, + "block3_mlp_wout_update_fnorm": 0.24034442007541656, + "block3_mlp_wout_max_l1_linf_norm": 0.394218385219574, + "block3_mlp_wout_max_spectral_norm": 0.011394831351935863, + "block7_q_update_fnorm": 0.2395796775817871, + "block7_q_max_l1_linf_norm": 0.21734465658664703, + "block7_q_max_spectral_norm": 0.012039423920214176, + "block7_k_update_fnorm": 0.23834672570228577, + "block7_k_max_l1_linf_norm": 0.2176668345928192, + "block7_k_max_spectral_norm": 0.012041364796459675, + "block7_v_update_fnorm": 0.23580215871334076, + "block7_v_max_l1_linf_norm": 0.20771043002605438, + "block7_v_max_spectral_norm": 0.012043028138577938, + "block7_o_update_fnorm": 0.24697275459766388, + "block7_o_max_l1_linf_norm": 0.2090151607990265, + "block7_o_max_spectral_norm": 0.012041536159813404, + "block7_mlp_win_update_fnorm": 0.24261285364627838, + "block7_mlp_win_max_l1_linf_norm": 0.152042418718338, + "block7_mlp_win_max_spectral_norm": 0.011498424224555492, + "block7_mlp_wout_update_fnorm": 0.24441257119178772, + "block7_mlp_wout_max_l1_linf_norm": 0.4043227732181549, + "block7_mlp_wout_max_spectral_norm": 0.01136432308703661, + "block11_q_update_fnorm": 0.247995063662529, + "block11_q_max_l1_linf_norm": 0.21161115169525146, + "block11_q_max_spectral_norm": 0.012037674896419048, + "block11_k_update_fnorm": 0.2482757717370987, + "block11_k_max_l1_linf_norm": 0.2110171616077423, + "block11_k_max_spectral_norm": 0.012040381319820881, + "block11_v_update_fnorm": 0.24489636719226837, + "block11_v_max_l1_linf_norm": 0.208087757229805, + "block11_v_max_spectral_norm": 0.0120398486033082, + "block11_o_update_fnorm": 0.24786080420017242, + "block11_o_max_l1_linf_norm": 0.20744240283966064, + "block11_o_max_spectral_norm": 0.012042006477713585, + "block11_mlp_win_update_fnorm": 0.24183955788612366, + "block11_mlp_win_max_l1_linf_norm": 0.1515951007604599, + "block11_mlp_win_max_spectral_norm": 0.011407546699047089, + "block11_mlp_wout_update_fnorm": 0.24043241143226624, + "block11_mlp_wout_max_l1_linf_norm": 0.39783698320388794, + "block11_mlp_wout_max_spectral_norm": 0.01138265896588564, + "total_sharpness": 0.008836549706757069, + "block_total_sharpness": 0.011376108974218369, + "v_norm_block": 2.01684832572937, + "v_T_H_v_block": 0.0462743416428566, + "v_norm": 2.420264959335327, + "ip_v_neg_g_hvp": 0.0744582861661911, + "cos_v_neg_g_hvp": 0.05753175541758537, + "g_hvp_norm": 0.5347397923469543, + "ip_v_neg_g_t": 0.074612095952034, + "cos_v_neg_g_t": 0.06429950892925262, + "g_t_norm": 0.47944483160972595, + "g_norm": 0.5347397923469543, + "hv_norm": 0.7417309880256653, + "cos_v_hv": 0.028833622112870216, + "hg_norm": 13.485848426818848, + "cos_g_hg": 0.5663636326789856, + "v_parallel_norm": 0.007935401983559132, + "v_perp_norm": 2.4202520847320557, + "embed_lm_head_v_norm": 1.3379110097885132, + "embed_lm_head_cos_v_neg_g": 0.11564847081899643, + "layer_1_v_norm": 0.607286810874939, + "layer_1_cos_v_neg_g": 0.05289197340607643, + "layer_2_v_norm": 0.5589085221290588, + "layer_2_cos_v_neg_g": 0.04133853688836098, + "layer_3_v_norm": 0.5372139811515808, + "layer_3_cos_v_neg_g": 0.03974657878279686, + "layer_4_v_norm": 0.547039270401001, + "layer_4_cos_v_neg_g": 0.04608471319079399, + "layer_5_v_norm": 0.5776973962783813, + "layer_5_cos_v_neg_g": 0.05622696876525879, + "layer_6_v_norm": 0.5786568522453308, + "layer_6_cos_v_neg_g": 0.05845779925584793, + "layer_7_v_norm": 0.5903180241584778, + "layer_7_cos_v_neg_g": 0.06264299154281616, + "layer_8_v_norm": 0.5911942720413208, + "layer_8_cos_v_neg_g": 0.06355760991573334, + "layer_9_v_norm": 0.5963476300239563, + "layer_9_cos_v_neg_g": 0.060843706130981445, + "layer_10_v_norm": 0.5979031324386597, + "layer_10_cos_v_neg_g": 0.06611096113920212, + "layer_11_v_norm": 0.5983462333679199, + "layer_11_cos_v_neg_g": 0.07554740458726883, + "layer_12_v_norm": 0.6007912158966064, + "layer_12_cos_v_neg_g": 0.07881957292556763, + "block0_q_v_norm": 0.2476375550031662, + "block0_q_cos_v_neg_g": 0.07842277735471725, + "block0_k_v_norm": 0.24807077646255493, + "block0_k_cos_v_neg_g": 0.08501500636339188, + "block0_v_v_norm": 0.23700550198554993, + "block0_v_cos_v_neg_g": 0.043904561549425125, + "block0_o_v_norm": 0.24176056683063507, + "block0_o_cos_v_neg_g": 0.05952088162302971, + "block0_mlp_win_v_norm": 0.25413304567337036, + "block0_mlp_win_cos_v_neg_g": 0.08361098170280457, + "block0_mlp_wout_v_norm": 0.2580987811088562, + "block0_mlp_wout_cos_v_neg_g": 0.09644706547260284, + "block3_q_v_norm": 0.2101326584815979, + "block3_q_cos_v_neg_g": 0.05969712510704994, + "block3_k_v_norm": 0.1801643669605255, + "block3_k_cos_v_neg_g": 0.050884269177913666, + "block3_v_v_norm": 0.1985885351896286, + "block3_v_cos_v_neg_g": 0.0393306240439415, + "block3_o_v_norm": 0.22995302081108093, + "block3_o_cos_v_neg_g": 0.0646250993013382, + "block3_mlp_win_v_norm": 0.26916244626045227, + "block3_mlp_win_cos_v_neg_g": 0.06117134913802147, + "block3_mlp_wout_v_norm": 0.24034442007541656, + "block3_mlp_wout_cos_v_neg_g": 0.09092433750629425, + "block7_q_v_norm": 0.2395796775817871, + "block7_q_cos_v_neg_g": 0.07392630726099014, + "block7_k_v_norm": 0.23834672570228577, + "block7_k_cos_v_neg_g": 0.08328374475240707, + "block7_v_v_norm": 0.23580215871334076, + "block7_v_cos_v_neg_g": 0.048127416521310806, + "block7_o_v_norm": 0.24697275459766388, + "block7_o_cos_v_neg_g": 0.07447317242622375, + "block7_mlp_win_v_norm": 0.24261285364627838, + "block7_mlp_win_cos_v_neg_g": 0.09791580587625504, + "block7_mlp_wout_v_norm": 0.24441257119178772, + "block7_mlp_wout_cos_v_neg_g": 0.13307799398899078, + "block11_q_v_norm": 0.247995063662529, + "block11_q_cos_v_neg_g": 0.09734322875738144, + "block11_k_v_norm": 0.2482757717370987, + "block11_k_cos_v_neg_g": 0.1017773374915123, + "block11_v_v_norm": 0.24489636719226837, + "block11_v_cos_v_neg_g": 0.0711805447936058, + "block11_o_v_norm": 0.24786080420017242, + "block11_o_cos_v_neg_g": 0.09017936885356903, + "block11_mlp_win_v_norm": 0.24183955788612366, + "block11_mlp_win_cos_v_neg_g": 0.10614701360464096, + "block11_mlp_wout_v_norm": 0.24043241143226624, + "block11_mlp_wout_cos_v_neg_g": 0.09006444364786148, + "embed_lm_head_sharpness": 0.0006306685390882194, + "layer_1_sharpness": 0.0063314782455563545, + "layer_2_sharpness": 0.0008535328670404851, + "layer_3_sharpness": 0.0018060726579278708, + "layer_4_sharpness": 0.0018477868288755417, + "layer_5_sharpness": 0.0021030944772064686, + "layer_6_sharpness": 0.002216267166659236, + "layer_7_sharpness": 0.002340595703572035, + "layer_8_sharpness": 0.00164677738212049, + "layer_9_sharpness": 0.00086189218563959, + "layer_10_sharpness": 0.000617676938418299, + "layer_11_sharpness": 0.0006888004718348384, + "layer_12_sharpness": 0.0006625990499742329, + "block0_q_sharpness": 0.0001231495989486575, + "block0_k_sharpness": 0.00011882451508427039, + "block0_v_sharpness": 0.008238974958658218, + "block0_o_sharpness": 0.0010105161927640438, + "block0_mlp_win_sharpness": 0.002370433649048209, + "block0_mlp_wout_sharpness": 0.002466470468789339, + "block3_q_sharpness": 0.00010560348891885951, + "block3_k_sharpness": 0.0027917998377233744, + "block3_v_sharpness": 0.002485630102455616, + "block3_o_sharpness": 0.0005890052998438478, + "block3_mlp_win_sharpness": 0.00016423870692960918, + "block3_mlp_wout_sharpness": 0.00017735060828272253, + "block7_q_sharpness": 0.0004667376633733511, + "block7_k_sharpness": 0.00037838050047867, + "block7_v_sharpness": 0.0018312997417524457, + "block7_o_sharpness": 0.00025381348677910864, + "block7_mlp_win_sharpness": 0.0004236694658175111, + "block7_mlp_wout_sharpness": 0.00025431750691495836, + "block11_q_sharpness": 2.772202788037248e-05, + "block11_k_sharpness": 5.055016663391143e-05, + "block11_v_sharpness": 0.00021761568496003747, + "block11_o_sharpness": 0.00010069616109831259, + "block11_mlp_win_sharpness": 0.0003785965673159808, + "block11_mlp_wout_sharpness": 0.0007809912785887718, + "sum_layer_numerators": 0.007524121551906602, + "block_diag_sharpness": 0.001849734194555369, + "cross_layer_sharpness": 0.009526374779663 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_10000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..b63ed9cf5f69d6fdd4dadc413f374ceeeed45949 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_10000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.001088015385903418, + "total_l1_linf_norm": 9.034649848937988, + "total_spectral_norm": 0.0010880155023187399, + "embed_lm_head_update_fnorm": 0.000660622667055577, + "embed_lm_head_max_l1_linf_norm": 0.00017876236233860254, + "embed_lm_head_max_spectral_norm": 0.0001124303526012227, + "layer_1_update_fnorm": 0.00024876263341866434, + "layer_1_max_l1_linf_norm": 0.00020246111671440303, + "layer_1_max_spectral_norm": 6.039070740371244e-06, + "layer_2_update_fnorm": 0.00013222743291407824, + "layer_2_max_l1_linf_norm": 0.0002635781711433083, + "layer_2_max_spectral_norm": 6.019557531544706e-06, + "layer_3_update_fnorm": 0.00014247938815969974, + "layer_3_max_l1_linf_norm": 0.0003151031560264528, + "layer_3_max_spectral_norm": 7.058132723614108e-06, + "layer_4_update_fnorm": 0.00017612420197110623, + "layer_4_max_l1_linf_norm": 0.0003872811794281006, + "layer_4_max_spectral_norm": 9.479209438723046e-06, + "layer_5_update_fnorm": 0.0002041154948528856, + "layer_5_max_l1_linf_norm": 0.0003567128733266145, + "layer_5_max_spectral_norm": 9.514439625490922e-06, + "layer_6_update_fnorm": 0.0002626843343023211, + "layer_6_max_l1_linf_norm": 0.0003574589209165424, + "layer_6_max_spectral_norm": 7.830802132957615e-06, + "layer_7_update_fnorm": 0.0002842726244125515, + "layer_7_max_l1_linf_norm": 0.0003538333694450557, + "layer_7_max_spectral_norm": 7.891340828791726e-06, + "layer_8_update_fnorm": 0.0002895998186431825, + "layer_8_max_l1_linf_norm": 0.00036034610820934176, + "layer_8_max_spectral_norm": 7.923748853500001e-06, + "layer_9_update_fnorm": 0.00029149287729524076, + "layer_9_max_l1_linf_norm": 0.0003618074697442353, + "layer_9_max_spectral_norm": 8.027842341107316e-06, + "layer_10_update_fnorm": 0.000294562109047547, + "layer_10_max_l1_linf_norm": 0.00033864134456962347, + "layer_10_max_spectral_norm": 7.531617939093849e-06, + "layer_11_update_fnorm": 0.00028294429648667574, + "layer_11_max_l1_linf_norm": 0.0003195968456566334, + "layer_11_max_spectral_norm": 7.1171762101585045e-06, + "layer_12_update_fnorm": 0.0002992169465869665, + "layer_12_max_l1_linf_norm": 0.00031823740573599935, + "layer_12_max_spectral_norm": 7.115884727681987e-06, + "block0_q_update_fnorm": 0.00011581298895180225, + "block0_q_max_l1_linf_norm": 0.00010436940647196025, + "block0_q_max_spectral_norm": 6.035856131347828e-06, + "block0_k_update_fnorm": 0.00011361343058524653, + "block0_k_max_l1_linf_norm": 0.00010457133612362668, + "block0_k_max_spectral_norm": 6.039070740371244e-06, + "block0_v_update_fnorm": 8.950188930612057e-05, + "block0_v_max_l1_linf_norm": 9.202929504681379e-05, + "block0_v_max_spectral_norm": 6.017009582137689e-06, + "block0_o_update_fnorm": 9.648276318330318e-05, + "block0_o_max_l1_linf_norm": 8.737316238693893e-05, + "block0_o_max_spectral_norm": 6.016455699864309e-06, + "block0_mlp_win_update_fnorm": 9.809061157284304e-05, + "block0_mlp_win_max_l1_linf_norm": 9.414309170097113e-05, + "block0_mlp_win_max_spectral_norm": 6.027202289260458e-06, + "block0_mlp_wout_update_fnorm": 9.254858014173806e-05, + "block0_mlp_wout_max_l1_linf_norm": 0.0001469815179007128, + "block0_mlp_wout_max_spectral_norm": 6.02400723437313e-06, + "block3_q_update_fnorm": 1.886822428787127e-05, + "block3_q_max_l1_linf_norm": 3.7891033571213484e-05, + "block3_q_max_spectral_norm": 5.9201147450949065e-06, + "block3_k_update_fnorm": 2.052064701274503e-05, + "block3_k_max_l1_linf_norm": 4.4906610128236935e-05, + "block3_k_max_spectral_norm": 5.990366389596602e-06, + "block3_v_update_fnorm": 5.172913006390445e-05, + "block3_v_max_l1_linf_norm": 6.511797255370766e-05, + "block3_v_max_spectral_norm": 6.017301075189607e-06, + "block3_o_update_fnorm": 6.245735130505636e-05, + "block3_o_max_l1_linf_norm": 5.3246676543494686e-05, + "block3_o_max_spectral_norm": 6.021136414346984e-06, + "block3_mlp_win_update_fnorm": 8.618136052973568e-05, + "block3_mlp_win_max_l1_linf_norm": 9.266461711376905e-05, + "block3_mlp_win_max_spectral_norm": 6.027099516359158e-06, + "block3_mlp_wout_update_fnorm": 0.00012622952635865659, + "block3_mlp_wout_max_l1_linf_norm": 0.0002024041605181992, + "block3_mlp_wout_max_spectral_norm": 6.035873411747161e-06, + "block7_q_update_fnorm": 0.00011749553959816694, + "block7_q_max_l1_linf_norm": 0.00010436458978801966, + "block7_q_max_spectral_norm": 6.041600954631576e-06, + "block7_k_update_fnorm": 0.00012241268996149302, + "block7_k_max_l1_linf_norm": 0.00010365841444581747, + "block7_k_max_spectral_norm": 6.039313575456617e-06, + "block7_v_update_fnorm": 8.198736031772569e-05, + "block7_v_max_l1_linf_norm": 9.691205195849761e-05, + "block7_v_max_spectral_norm": 6.036113518348429e-06, + "block7_o_update_fnorm": 0.00012342078844085336, + "block7_o_max_l1_linf_norm": 0.0001039319031406194, + "block7_o_max_spectral_norm": 6.04021261096932e-06, + "block7_mlp_win_update_fnorm": 0.00013668162864632905, + "block7_mlp_win_max_l1_linf_norm": 8.628900104667991e-05, + "block7_mlp_win_max_spectral_norm": 6.044013389328029e-06, + "block7_mlp_wout_update_fnorm": 0.00011958312097704038, + "block7_mlp_wout_max_l1_linf_norm": 0.00019823104958049953, + "block7_mlp_wout_max_spectral_norm": 5.713327936973656e-06, + "block11_q_update_fnorm": 0.00012366010923869908, + "block11_q_max_l1_linf_norm": 0.00010884425137192011, + "block11_q_max_spectral_norm": 6.02580803388264e-06, + "block11_k_update_fnorm": 0.00012434863310772926, + "block11_k_max_l1_linf_norm": 0.00011166770127601922, + "block11_k_max_spectral_norm": 6.022305115038762e-06, + "block11_v_update_fnorm": 0.00012070316006429493, + "block11_v_max_l1_linf_norm": 0.00010266597382724285, + "block11_v_max_spectral_norm": 6.038823812559713e-06, + "block11_o_update_fnorm": 0.00012402335414662957, + "block11_o_max_l1_linf_norm": 0.00010380702588008717, + "block11_o_max_spectral_norm": 6.0296124502201565e-06, + "block11_mlp_win_update_fnorm": 0.0001173157143057324, + "block11_mlp_win_max_l1_linf_norm": 8.099983097054064e-05, + "block11_mlp_win_max_spectral_norm": 5.6862522797018755e-06, + "block11_mlp_wout_update_fnorm": 0.00012227478146087378, + "block11_mlp_wout_max_l1_linf_norm": 0.00022443577472586185, + "block11_mlp_wout_max_spectral_norm": 6.0100064729340374e-06, + "total_sharpness": 0.010202804580330849, + "block_total_sharpness": 0.016221659258008003, + "v_norm_block": 0.0008644972112961113, + "v_T_H_v_block": 1.2123344106385048e-08, + "v_norm": 0.0010880158515647054, + "ip_v_neg_g_hvp": 1.2906952179037035e-05, + "cos_v_neg_g_hvp": 0.025715691968798637, + "g_hvp_norm": 0.46130722761154175, + "ip_v_neg_g_t": 1.783440529834479e-05, + "cos_v_neg_g_t": 0.02657085470855236, + "g_t_norm": 0.6169044375419617, + "g_norm": 0.46130722761154175, + "hv_norm": 0.0029491563327610493, + "cos_v_hv": 0.003764059627428651, + "hg_norm": 2478.93408203125, + "cos_g_hg": -0.029606202617287636, + "v_parallel_norm": 2.945024107248173e-06, + "v_perp_norm": 0.0010880123591050506, + "embed_lm_head_v_norm": 0.0006606234237551689, + "embed_lm_head_cos_v_neg_g": 0.05212100222706795, + "layer_1_v_norm": 0.00024876464158296585, + "layer_1_cos_v_neg_g": 0.009262265637516975, + "layer_2_v_norm": 0.0001322312164120376, + "layer_2_cos_v_neg_g": 0.016967525705695152, + "layer_3_v_norm": 0.00014248289517126977, + "layer_3_cos_v_neg_g": 0.024125976487994194, + "layer_4_v_norm": 0.00017612703959457576, + "layer_4_cos_v_neg_g": 0.022599948570132256, + "layer_5_v_norm": 0.00020411793957464397, + "layer_5_cos_v_neg_g": 0.022891957312822342, + "layer_6_v_norm": 0.0002626862551551312, + "layer_6_cos_v_neg_g": 0.023905431851744652, + "layer_7_v_norm": 0.0002842743997462094, + "layer_7_cos_v_neg_g": 0.024764589965343475, + "layer_8_v_norm": 0.00028960153576917946, + "layer_8_cos_v_neg_g": 0.024523694068193436, + "layer_9_v_norm": 0.0002914945944212377, + "layer_9_cos_v_neg_g": 0.026354921981692314, + "layer_10_v_norm": 0.00029456382617354393, + "layer_10_cos_v_neg_g": 0.029511556029319763, + "layer_11_v_norm": 0.00028294610092416406, + "layer_11_cos_v_neg_g": 0.037805598229169846, + "layer_12_v_norm": 0.000299218634609133, + "layer_12_cos_v_neg_g": 0.06677436083555222, + "block0_q_v_norm": 0.00011581731087062508, + "block0_q_cos_v_neg_g": 0.003469460643827915, + "block0_k_v_norm": 0.00011361783253960311, + "block0_k_cos_v_neg_g": 0.003445676062256098, + "block0_v_v_norm": 8.950747724156827e-05, + "block0_v_cos_v_neg_g": 0.007627957500517368, + "block0_o_v_norm": 9.648794366512448e-05, + "block0_o_cos_v_neg_g": 0.00919001828879118, + "block0_mlp_win_v_norm": 9.809571201913059e-05, + "block0_mlp_win_cos_v_neg_g": 0.015942974016070366, + "block0_mlp_wout_v_norm": 9.255397890228778e-05, + "block0_mlp_wout_cos_v_neg_g": 0.029105080291628838, + "block3_q_v_norm": 1.8894705135608092e-05, + "block3_q_cos_v_neg_g": -0.002419603057205677, + "block3_k_v_norm": 2.05449978238903e-05, + "block3_k_cos_v_neg_g": 0.05510314926505089, + "block3_v_v_norm": 5.173879617359489e-05, + "block3_v_cos_v_neg_g": 0.024460380896925926, + "block3_o_v_norm": 6.246535485843197e-05, + "block3_o_cos_v_neg_g": 0.021555671468377113, + "block3_mlp_win_v_norm": 8.618715946795419e-05, + "block3_mlp_win_cos_v_neg_g": 0.02388894557952881, + "block3_mlp_wout_v_norm": 0.0001262334844795987, + "block3_mlp_wout_cos_v_neg_g": 0.03917960450053215, + "block7_q_v_norm": 0.00011749979603337124, + "block7_q_cos_v_neg_g": 0.02981020137667656, + "block7_k_v_norm": 0.00012241677904967219, + "block7_k_cos_v_neg_g": 0.07104320824146271, + "block7_v_v_norm": 8.199345757020637e-05, + "block7_v_cos_v_neg_g": 0.019599048420786858, + "block7_o_v_norm": 0.00012342483387328684, + "block7_o_cos_v_neg_g": 0.07811260968446732, + "block7_mlp_win_v_norm": 0.00013668528117705137, + "block7_mlp_win_cos_v_neg_g": 0.026330778375267982, + "block7_mlp_wout_v_norm": 0.00011958729737671092, + "block7_mlp_wout_cos_v_neg_g": 0.10985865443944931, + "block11_q_v_norm": 0.00012366415467113256, + "block11_q_cos_v_neg_g": 0.0653134137392044, + "block11_k_v_norm": 0.00012435264943633229, + "block11_k_cos_v_neg_g": 0.08884192258119583, + "block11_v_v_norm": 0.00012070730736013502, + "block11_v_cos_v_neg_g": 0.038264572620391846, + "block11_o_v_norm": 0.00012402738502714783, + "block11_o_cos_v_neg_g": 0.08185035735368729, + "block11_mlp_win_v_norm": 0.00011731997801689431, + "block11_mlp_win_cos_v_neg_g": 0.08655618131160736, + "block11_mlp_wout_v_norm": 0.00012227887054905295, + "block11_mlp_wout_cos_v_neg_g": 0.07967892289161682, + "embed_lm_head_sharpness": 0.00012921060260850936, + "layer_1_sharpness": 0.01018849853426218, + "layer_2_sharpness": 0.017307041212916374, + "layer_3_sharpness": 0.020725633949041367, + "layer_4_sharpness": 0.015529551543295383, + "layer_5_sharpness": 0.007348486687988043, + "layer_6_sharpness": 0.003514521522447467, + "layer_7_sharpness": 0.003309049643576145, + "layer_8_sharpness": 0.002366789383813739, + "layer_9_sharpness": 0.0018344823038205504, + "layer_10_sharpness": 0.0008132180664688349, + "layer_11_sharpness": 0.0007050490821711719, + "layer_12_sharpness": 0.0008082650019787252, + "block0_q_sharpness": -0.0008236616267822683, + "block0_k_sharpness": -0.0019157049246132374, + "block0_v_sharpness": 0.05159910023212433, + "block0_o_sharpness": 0.0028645910788327456, + "block0_mlp_win_sharpness": 0.0033143910113722086, + "block0_mlp_wout_sharpness": 0.006559945177286863, + "block3_q_sharpness": 6.346029113046825e-05, + "block3_k_sharpness": 0.012475087307393551, + "block3_v_sharpness": 0.015003159642219543, + "block3_o_sharpness": 0.0070779151283204556, + "block3_mlp_win_sharpness": 0.00840276200324297, + "block3_mlp_wout_sharpness": 0.0016134968027472496, + "block7_q_sharpness": 0.00013407404185272753, + "block7_k_sharpness": 0.00013393598783295602, + "block7_v_sharpness": 0.007897457107901573, + "block7_o_sharpness": 9.120763570535928e-05, + "block7_mlp_win_sharpness": 0.001188574475236237, + "block7_mlp_wout_sharpness": 0.0001281352451769635, + "block11_q_sharpness": 0.00010178105003433302, + "block11_k_sharpness": 6.180921627674252e-05, + "block11_v_sharpness": 0.00016077165491878986, + "block11_o_sharpness": 4.9516944272909313e-05, + "block11_mlp_win_sharpness": 0.0005713713471777737, + "block11_mlp_wout_sharpness": 0.0011453048791736364, + "sum_layer_numerators": 3.205457463741386e-09, + "block_diag_sharpness": 0.004289066944849232, + "cross_layer_sharpness": 0.011932592313158772 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..7866949c6437b6eecec2fd259a4d2ed9f35e471c --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_1500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.404176712036133, + "total_l1_linf_norm": 20566.9375, + "total_spectral_norm": 2.404176712036133, + "embed_lm_head_update_fnorm": 1.3272491693496704, + "embed_lm_head_max_l1_linf_norm": 0.33891117572784424, + "embed_lm_head_max_spectral_norm": 0.27564737200737, + "layer_1_update_fnorm": 0.5947783589363098, + "layer_1_max_l1_linf_norm": 0.4290948510169983, + "layer_1_max_spectral_norm": 0.012049337849020958, + "layer_2_update_fnorm": 0.5195087194442749, + "layer_2_max_l1_linf_norm": 0.40862488746643066, + "layer_2_max_spectral_norm": 0.012040439061820507, + "layer_3_update_fnorm": 0.5199149250984192, + "layer_3_max_l1_linf_norm": 0.4077576994895935, + "layer_3_max_spectral_norm": 0.012046548537909985, + "layer_4_update_fnorm": 0.5464586019515991, + "layer_4_max_l1_linf_norm": 0.39501142501831055, + "layer_4_max_spectral_norm": 0.01204915065318346, + "layer_5_update_fnorm": 0.5798744559288025, + "layer_5_max_l1_linf_norm": 0.4068715572357178, + "layer_5_max_spectral_norm": 0.012047024443745613, + "layer_6_update_fnorm": 0.5836203694343567, + "layer_6_max_l1_linf_norm": 0.40821969509124756, + "layer_6_max_spectral_norm": 0.012062153778970242, + "layer_7_update_fnorm": 0.5941206812858582, + "layer_7_max_l1_linf_norm": 0.40988457202911377, + "layer_7_max_spectral_norm": 0.012043129652738571, + "layer_8_update_fnorm": 0.5959949493408203, + "layer_8_max_l1_linf_norm": 0.4123774766921997, + "layer_8_max_spectral_norm": 0.012046522460877895, + "layer_9_update_fnorm": 0.5984171032905579, + "layer_9_max_l1_linf_norm": 0.4110386371612549, + "layer_9_max_spectral_norm": 0.012043513357639313, + "layer_10_update_fnorm": 0.5997675657272339, + "layer_10_max_l1_linf_norm": 0.4104965329170227, + "layer_10_max_spectral_norm": 0.012042968533933163, + "layer_11_update_fnorm": 0.5998514294624329, + "layer_11_max_l1_linf_norm": 0.4030265212059021, + "layer_11_max_spectral_norm": 0.012042188085615635, + "layer_12_update_fnorm": 0.6026557087898254, + "layer_12_max_l1_linf_norm": 0.3977199196815491, + "layer_12_max_spectral_norm": 0.012043679133057594, + "block0_q_update_fnorm": 0.2465597242116928, + "block0_q_max_l1_linf_norm": 0.21101367473602295, + "block0_q_max_spectral_norm": 0.012044472619891167, + "block0_k_update_fnorm": 0.24695663154125214, + "block0_k_max_l1_linf_norm": 0.20713286101818085, + "block0_k_max_spectral_norm": 0.012041378766298294, + "block0_v_update_fnorm": 0.19680200517177582, + "block0_v_max_l1_linf_norm": 0.20897018909454346, + "block0_v_max_spectral_norm": 0.012029342353343964, + "block0_o_update_fnorm": 0.2338569611310959, + "block0_o_max_l1_linf_norm": 0.1964579075574875, + "block0_o_max_spectral_norm": 0.012040538713335991, + "block0_mlp_win_update_fnorm": 0.2651343047618866, + "block0_mlp_win_max_l1_linf_norm": 0.15306991338729858, + "block0_mlp_win_max_spectral_norm": 0.012044860050082207, + "block0_mlp_wout_update_fnorm": 0.2610522508621216, + "block0_mlp_wout_max_l1_linf_norm": 0.4290948510169983, + "block0_mlp_wout_max_spectral_norm": 0.012049337849020958, + "block3_q_update_fnorm": 0.20565494894981384, + "block3_q_max_l1_linf_norm": 0.21220895648002625, + "block3_q_max_spectral_norm": 0.01203786302357912, + "block3_k_update_fnorm": 0.18867816030979156, + "block3_k_max_l1_linf_norm": 0.20889247953891754, + "block3_k_max_spectral_norm": 0.012040204368531704, + "block3_v_update_fnorm": 0.19190089404582977, + "block3_v_max_l1_linf_norm": 0.20232990384101868, + "block3_v_max_spectral_norm": 0.012031912803649902, + "block3_o_update_fnorm": 0.23420079052448273, + "block3_o_max_l1_linf_norm": 0.1988789141178131, + "block3_o_max_spectral_norm": 0.01204040925949812, + "block3_mlp_win_update_fnorm": 0.266922265291214, + "block3_mlp_win_max_l1_linf_norm": 0.19254419207572937, + "block3_mlp_win_max_spectral_norm": 0.01204915065318346, + "block3_mlp_wout_update_fnorm": 0.2401140332221985, + "block3_mlp_wout_max_l1_linf_norm": 0.39501142501831055, + "block3_mlp_wout_max_spectral_norm": 0.011399143375456333, + "block7_q_update_fnorm": 0.2436571568250656, + "block7_q_max_l1_linf_norm": 0.2110472023487091, + "block7_q_max_spectral_norm": 0.012042930349707603, + "block7_k_update_fnorm": 0.24329519271850586, + "block7_k_max_l1_linf_norm": 0.21720978617668152, + "block7_k_max_spectral_norm": 0.012041337788105011, + "block7_v_update_fnorm": 0.23382915556430817, + "block7_v_max_l1_linf_norm": 0.20908507704734802, + "block7_v_max_spectral_norm": 0.012041650712490082, + "block7_o_update_fnorm": 0.24810820817947388, + "block7_o_max_l1_linf_norm": 0.20724040269851685, + "block7_o_max_spectral_norm": 0.012046522460877895, + "block7_mlp_win_update_fnorm": 0.24264614284038544, + "block7_mlp_win_max_l1_linf_norm": 0.1473357230424881, + "block7_mlp_win_max_spectral_norm": 0.01182837039232254, + "block7_mlp_wout_update_fnorm": 0.24784409999847412, + "block7_mlp_wout_max_l1_linf_norm": 0.4123774766921997, + "block7_mlp_wout_max_spectral_norm": 0.011385242454707623, + "block11_q_update_fnorm": 0.2484433948993683, + "block11_q_max_l1_linf_norm": 0.21107083559036255, + "block11_q_max_spectral_norm": 0.012040847912430763, + "block11_k_update_fnorm": 0.24921013414859772, + "block11_k_max_l1_linf_norm": 0.21223482489585876, + "block11_k_max_spectral_norm": 0.012042132206261158, + "block11_v_update_fnorm": 0.2464466094970703, + "block11_v_max_l1_linf_norm": 0.20621857047080994, + "block11_v_max_spectral_norm": 0.012042277492582798, + "block11_o_update_fnorm": 0.24919436872005463, + "block11_o_max_l1_linf_norm": 0.21059733629226685, + "block11_o_max_spectral_norm": 0.012043679133057594, + "block11_mlp_win_update_fnorm": 0.24334119260311127, + "block11_mlp_win_max_l1_linf_norm": 0.16474056243896484, + "block11_mlp_win_max_spectral_norm": 0.011368620209395885, + "block11_mlp_wout_update_fnorm": 0.23916210234165192, + "block11_mlp_wout_max_l1_linf_norm": 0.3977199196815491, + "block11_mlp_wout_max_spectral_norm": 0.011384894140064716, + "total_sharpness": 0.009492932818830013, + "block_total_sharpness": 0.012220649048686028, + "v_norm_block": 2.0046136379241943, + "v_T_H_v_block": 0.04910837858915329, + "v_norm": 2.404176712036133, + "ip_v_neg_g_hvp": 0.07420071959495544, + "cos_v_neg_g_hvp": 0.050184465944767, + "g_hvp_norm": 0.6149961948394775, + "ip_v_neg_g_t": 0.07462121546268463, + "cos_v_neg_g_t": 0.05510789155960083, + "g_t_norm": 0.563225269317627, + "g_norm": 0.6149961948394775, + "hv_norm": 0.9699245691299438, + "cos_v_hv": 0.023530373349785805, + "hg_norm": 30.95500373840332, + "cos_g_hg": 0.4499667286872864, + "v_parallel_norm": 0.006662032566964626, + "v_perp_norm": 2.404167413711548, + "embed_lm_head_v_norm": 1.3272491693496704, + "embed_lm_head_cos_v_neg_g": 0.10093697905540466, + "layer_1_v_norm": 0.5947783589363098, + "layer_1_cos_v_neg_g": 0.040372662246227264, + "layer_2_v_norm": 0.5195087194442749, + "layer_2_cos_v_neg_g": 0.04706532508134842, + "layer_3_v_norm": 0.5199149250984192, + "layer_3_cos_v_neg_g": 0.04265698045492172, + "layer_4_v_norm": 0.5464586019515991, + "layer_4_cos_v_neg_g": 0.046613819897174835, + "layer_5_v_norm": 0.5798744559288025, + "layer_5_cos_v_neg_g": 0.051804523915052414, + "layer_6_v_norm": 0.5836203694343567, + "layer_6_cos_v_neg_g": 0.052584122866392136, + "layer_7_v_norm": 0.5941206812858582, + "layer_7_cos_v_neg_g": 0.052412427961826324, + "layer_8_v_norm": 0.5959949493408203, + "layer_8_cos_v_neg_g": 0.052801817655563354, + "layer_9_v_norm": 0.5984171032905579, + "layer_9_cos_v_neg_g": 0.055898673832416534, + "layer_10_v_norm": 0.5997675657272339, + "layer_10_cos_v_neg_g": 0.0627235621213913, + "layer_11_v_norm": 0.5998514294624329, + "layer_11_cos_v_neg_g": 0.072148397564888, + "layer_12_v_norm": 0.6026557087898254, + "layer_12_cos_v_neg_g": 0.09640049934387207, + "block0_q_v_norm": 0.2465597242116928, + "block0_q_cos_v_neg_g": 0.10165169090032578, + "block0_k_v_norm": 0.24695663154125214, + "block0_k_cos_v_neg_g": 0.10941299051046371, + "block0_v_v_norm": 0.19680200517177582, + "block0_v_cos_v_neg_g": 0.03467781841754913, + "block0_o_v_norm": 0.2338569611310959, + "block0_o_cos_v_neg_g": 0.0634324923157692, + "block0_mlp_win_v_norm": 0.2651343047618866, + "block0_mlp_win_cos_v_neg_g": 0.07506114989519119, + "block0_mlp_wout_v_norm": 0.2610522508621216, + "block0_mlp_wout_cos_v_neg_g": 0.10162253677845001, + "block3_q_v_norm": 0.20565494894981384, + "block3_q_cos_v_neg_g": 0.0625138059258461, + "block3_k_v_norm": 0.18867816030979156, + "block3_k_cos_v_neg_g": 0.06875654309988022, + "block3_v_v_norm": 0.19190089404582977, + "block3_v_cos_v_neg_g": 0.04080302640795708, + "block3_o_v_norm": 0.23420079052448273, + "block3_o_cos_v_neg_g": 0.07215580344200134, + "block3_mlp_win_v_norm": 0.266922265291214, + "block3_mlp_win_cos_v_neg_g": 0.06241511553525925, + "block3_mlp_wout_v_norm": 0.2401140332221985, + "block3_mlp_wout_cos_v_neg_g": 0.10836340487003326, + "block7_q_v_norm": 0.2436571568250656, + "block7_q_cos_v_neg_g": 0.0698859840631485, + "block7_k_v_norm": 0.24329519271850586, + "block7_k_cos_v_neg_g": 0.08932751417160034, + "block7_v_v_norm": 0.23382915556430817, + "block7_v_cos_v_neg_g": 0.04170912131667137, + "block7_o_v_norm": 0.24810820817947388, + "block7_o_cos_v_neg_g": 0.07953336089849472, + "block7_mlp_win_v_norm": 0.24264614284038544, + "block7_mlp_win_cos_v_neg_g": 0.08518832921981812, + "block7_mlp_wout_v_norm": 0.24784409999847412, + "block7_mlp_wout_cos_v_neg_g": 0.14641578495502472, + "block11_q_v_norm": 0.2484433948993683, + "block11_q_cos_v_neg_g": 0.10445814579725266, + "block11_k_v_norm": 0.24921013414859772, + "block11_k_cos_v_neg_g": 0.1070055291056633, + "block11_v_v_norm": 0.2464466094970703, + "block11_v_cos_v_neg_g": 0.0750250592827797, + "block11_o_v_norm": 0.24919436872005463, + "block11_o_cos_v_neg_g": 0.10756808519363403, + "block11_mlp_win_v_norm": 0.24334119260311127, + "block11_mlp_win_cos_v_neg_g": 0.13631606101989746, + "block11_mlp_wout_v_norm": 0.23916210234165192, + "block11_mlp_wout_cos_v_neg_g": 0.11229289323091507, + "embed_lm_head_sharpness": 0.0005782688385806978, + "layer_1_sharpness": 0.01212252676486969, + "layer_2_sharpness": 0.004726111423224211, + "layer_3_sharpness": 0.002687735017389059, + "layer_4_sharpness": 0.001824838574975729, + "layer_5_sharpness": 0.0015700801741331816, + "layer_6_sharpness": 0.0019213989144191146, + "layer_7_sharpness": 0.0017767903627827764, + "layer_8_sharpness": 0.0013302412116900086, + "layer_9_sharpness": 0.0008754126029089093, + "layer_10_sharpness": 0.0005535500822588801, + "layer_11_sharpness": 0.0005748710827901959, + "layer_12_sharpness": 0.0005524351145140827, + "block0_q_sharpness": 0.0016391556710004807, + "block0_k_sharpness": 0.0013584032421931624, + "block0_v_sharpness": 0.015419090166687965, + "block0_o_sharpness": 0.0015607448294758797, + "block0_mlp_win_sharpness": 0.0023516553919762373, + "block0_mlp_wout_sharpness": 0.0022671043407171965, + "block3_q_sharpness": 0.000286459835479036, + "block3_k_sharpness": 0.002098771510645747, + "block3_v_sharpness": 0.003319665789604187, + "block3_o_sharpness": 0.00034211514866910875, + "block3_mlp_win_sharpness": 0.00019066508684772998, + "block3_mlp_wout_sharpness": 0.00012361542030703276, + "block7_q_sharpness": 0.0001488085399614647, + "block7_k_sharpness": 0.00021208054386079311, + "block7_v_sharpness": 0.002127759624272585, + "block7_o_sharpness": 0.0001848968240665272, + "block7_mlp_win_sharpness": 0.000471900129923597, + "block7_mlp_wout_sharpness": 0.00017275837308261544, + "block11_q_sharpness": 5.189191870158538e-05, + "block11_k_sharpness": 6.130630936240777e-05, + "block11_v_sharpness": 0.00016266453894786537, + "block11_o_sharpness": 6.932493124622852e-05, + "block11_mlp_win_sharpness": 0.00041604888974688947, + "block11_mlp_wout_sharpness": 0.0006529017118737102, + "sum_layer_numerators": 0.010037649671549787, + "block_diag_sharpness": 0.0024978748355906446, + "cross_layer_sharpness": 0.009722774213095383 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..2e009b1f550828ebc8e28644d1769cc9ffe1b31b --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.396167278289795, + "total_l1_linf_norm": 20498.14453125, + "total_spectral_norm": 2.396167516708374, + "embed_lm_head_update_fnorm": 1.3202900886535645, + "embed_lm_head_max_l1_linf_norm": 0.3447968661785126, + "embed_lm_head_max_spectral_norm": 0.2470567226409912, + "layer_1_update_fnorm": 0.5884466767311096, + "layer_1_max_l1_linf_norm": 0.43535977602005005, + "layer_1_max_spectral_norm": 0.01205347292125225, + "layer_2_update_fnorm": 0.5116452574729919, + "layer_2_max_l1_linf_norm": 0.40476173162460327, + "layer_2_max_spectral_norm": 0.012048748321831226, + "layer_3_update_fnorm": 0.510837972164154, + "layer_3_max_l1_linf_norm": 0.3968157172203064, + "layer_3_max_spectral_norm": 0.012040392495691776, + "layer_4_update_fnorm": 0.5495195388793945, + "layer_4_max_l1_linf_norm": 0.397685170173645, + "layer_4_max_spectral_norm": 0.012045402079820633, + "layer_5_update_fnorm": 0.5815387964248657, + "layer_5_max_l1_linf_norm": 0.40909484028816223, + "layer_5_max_spectral_norm": 0.012044004164636135, + "layer_6_update_fnorm": 0.5845109224319458, + "layer_6_max_l1_linf_norm": 0.4123333692550659, + "layer_6_max_spectral_norm": 0.0120625589042902, + "layer_7_update_fnorm": 0.5968261361122131, + "layer_7_max_l1_linf_norm": 0.4144711196422577, + "layer_7_max_spectral_norm": 0.012067040428519249, + "layer_8_update_fnorm": 0.5966382026672363, + "layer_8_max_l1_linf_norm": 0.41288846731185913, + "layer_8_max_spectral_norm": 0.012058527208864689, + "layer_9_update_fnorm": 0.5972806215286255, + "layer_9_max_l1_linf_norm": 0.416190505027771, + "layer_9_max_spectral_norm": 0.012043662369251251, + "layer_10_update_fnorm": 0.5976732969284058, + "layer_10_max_l1_linf_norm": 0.4090890884399414, + "layer_10_max_spectral_norm": 0.012044291011989117, + "layer_11_update_fnorm": 0.597813606262207, + "layer_11_max_l1_linf_norm": 0.40620896220207214, + "layer_11_max_spectral_norm": 0.012045810930430889, + "layer_12_update_fnorm": 0.6034218668937683, + "layer_12_max_l1_linf_norm": 0.39944541454315186, + "layer_12_max_spectral_norm": 0.012049352750182152, + "block0_q_update_fnorm": 0.24508239328861237, + "block0_q_max_l1_linf_norm": 0.20977464318275452, + "block0_q_max_spectral_norm": 0.012044829316437244, + "block0_k_update_fnorm": 0.24539954960346222, + "block0_k_max_l1_linf_norm": 0.20675525069236755, + "block0_k_max_spectral_norm": 0.012040662579238415, + "block0_v_update_fnorm": 0.17429043352603912, + "block0_v_max_l1_linf_norm": 0.2105305790901184, + "block0_v_max_spectral_norm": 0.012031195685267448, + "block0_o_update_fnorm": 0.22874364256858826, + "block0_o_max_l1_linf_norm": 0.19216188788414001, + "block0_o_max_spectral_norm": 0.012047926895320415, + "block0_mlp_win_update_fnorm": 0.27118828892707825, + "block0_mlp_win_max_l1_linf_norm": 0.16318967938423157, + "block0_mlp_win_max_spectral_norm": 0.01205347292125225, + "block0_mlp_wout_update_fnorm": 0.26391151547431946, + "block0_mlp_wout_max_l1_linf_norm": 0.43535977602005005, + "block0_mlp_wout_max_spectral_norm": 0.0120443906635046, + "block3_q_update_fnorm": 0.20751892030239105, + "block3_q_max_l1_linf_norm": 0.21236008405685425, + "block3_q_max_spectral_norm": 0.012045402079820633, + "block3_k_update_fnorm": 0.19516023993492126, + "block3_k_max_l1_linf_norm": 0.21510857343673706, + "block3_k_max_spectral_norm": 0.012039680033922195, + "block3_v_update_fnorm": 0.1895708590745926, + "block3_v_max_l1_linf_norm": 0.2028961330652237, + "block3_v_max_spectral_norm": 0.012033645063638687, + "block3_o_update_fnorm": 0.23638032376766205, + "block3_o_max_l1_linf_norm": 0.20286408066749573, + "block3_o_max_spectral_norm": 0.012039379216730595, + "block3_mlp_win_update_fnorm": 0.26602834463119507, + "block3_mlp_win_max_l1_linf_norm": 0.183834046125412, + "block3_mlp_win_max_spectral_norm": 0.012044482864439487, + "block3_mlp_wout_update_fnorm": 0.24098080396652222, + "block3_mlp_wout_max_l1_linf_norm": 0.397685170173645, + "block3_mlp_wout_max_spectral_norm": 0.011397271417081356, + "block7_q_update_fnorm": 0.24293465912342072, + "block7_q_max_l1_linf_norm": 0.20921042561531067, + "block7_q_max_spectral_norm": 0.012044759467244148, + "block7_k_update_fnorm": 0.24518805742263794, + "block7_k_max_l1_linf_norm": 0.21687453985214233, + "block7_k_max_spectral_norm": 0.012038406915962696, + "block7_v_update_fnorm": 0.227500781416893, + "block7_v_max_l1_linf_norm": 0.2083244025707245, + "block7_v_max_spectral_norm": 0.012040870264172554, + "block7_o_update_fnorm": 0.24796098470687866, + "block7_o_max_l1_linf_norm": 0.20786146819591522, + "block7_o_max_spectral_norm": 0.0120431212708354, + "block7_mlp_win_update_fnorm": 0.24877063930034637, + "block7_mlp_win_max_l1_linf_norm": 0.14824488759040833, + "block7_mlp_win_max_spectral_norm": 0.012058527208864689, + "block7_mlp_wout_update_fnorm": 0.2481827437877655, + "block7_mlp_wout_max_l1_linf_norm": 0.41288846731185913, + "block7_mlp_wout_max_spectral_norm": 0.011391297914087772, + "block11_q_update_fnorm": 0.24940809607505798, + "block11_q_max_l1_linf_norm": 0.2093649059534073, + "block11_q_max_spectral_norm": 0.012042593210935593, + "block11_k_update_fnorm": 0.24970673024654388, + "block11_k_max_l1_linf_norm": 0.2098284363746643, + "block11_k_max_spectral_norm": 0.012037646025419235, + "block11_v_update_fnorm": 0.24674636125564575, + "block11_v_max_l1_linf_norm": 0.20759183168411255, + "block11_v_max_spectral_norm": 0.012036966159939766, + "block11_o_update_fnorm": 0.24898724257946014, + "block11_o_max_l1_linf_norm": 0.2076956033706665, + "block11_o_max_spectral_norm": 0.012049352750182152, + "block11_mlp_win_update_fnorm": 0.24367524683475494, + "block11_mlp_win_max_l1_linf_norm": 0.17323139309883118, + "block11_mlp_win_max_spectral_norm": 0.011368121020495892, + "block11_mlp_wout_update_fnorm": 0.239089235663414, + "block11_mlp_wout_max_l1_linf_norm": 0.39944541454315186, + "block11_mlp_wout_max_spectral_norm": 0.011641241610050201, + "total_sharpness": 0.00816325843334198, + "block_total_sharpness": 0.010536200366914272, + "v_norm_block": 1.9996135234832764, + "v_T_H_v_block": 0.042128514498472214, + "v_norm": 2.396167278289795, + "ip_v_neg_g_hvp": 0.062037162482738495, + "cos_v_neg_g_hvp": 0.040396105498075485, + "g_hvp_norm": 0.6409074068069458, + "ip_v_neg_g_t": 0.062378186732530594, + "cos_v_neg_g_t": 0.04484356939792633, + "g_t_norm": 0.5805176496505737, + "g_norm": 0.6409074068069458, + "hv_norm": 0.9188031554222107, + "cos_v_hv": 0.021289145573973656, + "hg_norm": 28.504566192626953, + "cos_g_hg": 0.6052826046943665, + "v_parallel_norm": 0.006323051638901234, + "v_perp_norm": 2.3961589336395264, + "embed_lm_head_v_norm": 1.3202900886535645, + "embed_lm_head_cos_v_neg_g": 0.07199116796255112, + "layer_1_v_norm": 0.5884466767311096, + "layer_1_cos_v_neg_g": 0.029077032580971718, + "layer_2_v_norm": 0.5116452574729919, + "layer_2_cos_v_neg_g": 0.04069242626428604, + "layer_3_v_norm": 0.510837972164154, + "layer_3_cos_v_neg_g": 0.0359453447163105, + "layer_4_v_norm": 0.5495195388793945, + "layer_4_cos_v_neg_g": 0.037740275263786316, + "layer_5_v_norm": 0.5815387964248657, + "layer_5_cos_v_neg_g": 0.04416332393884659, + "layer_6_v_norm": 0.5845109224319458, + "layer_6_cos_v_neg_g": 0.04503170773386955, + "layer_7_v_norm": 0.5968261361122131, + "layer_7_cos_v_neg_g": 0.04449835792183876, + "layer_8_v_norm": 0.5966382026672363, + "layer_8_cos_v_neg_g": 0.04509764909744263, + "layer_9_v_norm": 0.5972806215286255, + "layer_9_cos_v_neg_g": 0.04893798008561134, + "layer_10_v_norm": 0.5976732969284058, + "layer_10_cos_v_neg_g": 0.05298268795013428, + "layer_11_v_norm": 0.597813606262207, + "layer_11_cos_v_neg_g": 0.0607638955116272, + "layer_12_v_norm": 0.6034218668937683, + "layer_12_cos_v_neg_g": 0.08677932620048523, + "block0_q_v_norm": 0.24508239328861237, + "block0_q_cos_v_neg_g": 0.08534916490316391, + "block0_k_v_norm": 0.24539954960346222, + "block0_k_cos_v_neg_g": 0.08478745073080063, + "block0_v_v_norm": 0.17429043352603912, + "block0_v_cos_v_neg_g": 0.030283134430646896, + "block0_o_v_norm": 0.22874364256858826, + "block0_o_cos_v_neg_g": 0.05651683360338211, + "block0_mlp_win_v_norm": 0.27118828892707825, + "block0_mlp_win_cos_v_neg_g": 0.06283022463321686, + "block0_mlp_wout_v_norm": 0.26391151547431946, + "block0_mlp_wout_cos_v_neg_g": 0.07797862589359283, + "block3_q_v_norm": 0.20751892030239105, + "block3_q_cos_v_neg_g": 0.041585199534893036, + "block3_k_v_norm": 0.19516023993492126, + "block3_k_cos_v_neg_g": 0.05503986030817032, + "block3_v_v_norm": 0.1895708590745926, + "block3_v_cos_v_neg_g": 0.038147129118442535, + "block3_o_v_norm": 0.23638032376766205, + "block3_o_cos_v_neg_g": 0.0681215301156044, + "block3_mlp_win_v_norm": 0.26602834463119507, + "block3_mlp_win_cos_v_neg_g": 0.049491267651319504, + "block3_mlp_wout_v_norm": 0.24098080396652222, + "block3_mlp_wout_cos_v_neg_g": 0.10252926498651505, + "block7_q_v_norm": 0.24293465912342072, + "block7_q_cos_v_neg_g": 0.05734309181571007, + "block7_k_v_norm": 0.24518805742263794, + "block7_k_cos_v_neg_g": 0.08475713431835175, + "block7_v_v_norm": 0.227500781416893, + "block7_v_cos_v_neg_g": 0.04158809036016464, + "block7_o_v_norm": 0.24796098470687866, + "block7_o_cos_v_neg_g": 0.07829897105693817, + "block7_mlp_win_v_norm": 0.24877063930034637, + "block7_mlp_win_cos_v_neg_g": 0.07156062871217728, + "block7_mlp_wout_v_norm": 0.2481827437877655, + "block7_mlp_wout_cos_v_neg_g": 0.13323408365249634, + "block11_q_v_norm": 0.24940809607505798, + "block11_q_cos_v_neg_g": 0.09663964062929153, + "block11_k_v_norm": 0.24970673024654388, + "block11_k_cos_v_neg_g": 0.10340792685747147, + "block11_v_v_norm": 0.24674636125564575, + "block11_v_cos_v_neg_g": 0.07600625604391098, + "block11_o_v_norm": 0.24898724257946014, + "block11_o_cos_v_neg_g": 0.10278112441301346, + "block11_mlp_win_v_norm": 0.24367524683475494, + "block11_mlp_win_cos_v_neg_g": 0.11991474032402039, + "block11_mlp_wout_v_norm": 0.239089235663414, + "block11_mlp_wout_cos_v_neg_g": 0.09905853122472763, + "embed_lm_head_sharpness": 0.0005165440961718559, + "layer_1_sharpness": 0.009276346303522587, + "layer_2_sharpness": 0.0030768767464905977, + "layer_3_sharpness": 0.0023015213664621115, + "layer_4_sharpness": 0.0013351152883842587, + "layer_5_sharpness": 0.001375789986923337, + "layer_6_sharpness": 0.0018052567029371858, + "layer_7_sharpness": 0.0019522052025422454, + "layer_8_sharpness": 0.0017809951677918434, + "layer_9_sharpness": 0.0010590769816190004, + "layer_10_sharpness": 0.0006186729879118502, + "layer_11_sharpness": 0.000654999224934727, + "layer_12_sharpness": 0.0011463833507150412, + "block0_q_sharpness": 0.0016471869312226772, + "block0_k_sharpness": 0.0009781067492440343, + "block0_v_sharpness": 0.023785272613167763, + "block0_o_sharpness": 0.0012388875475153327, + "block0_mlp_win_sharpness": 0.0017207054188475013, + "block0_mlp_wout_sharpness": 0.0010965286055579782, + "block3_q_sharpness": 0.00010371702956035733, + "block3_k_sharpness": 0.000652568822260946, + "block3_v_sharpness": 0.0033047061879187822, + "block3_o_sharpness": 0.0003221530932933092, + "block3_mlp_win_sharpness": 0.00016130911535583436, + "block3_mlp_wout_sharpness": 9.060547017725185e-05, + "block7_q_sharpness": 0.0001213634095620364, + "block7_k_sharpness": 0.00015453995729330927, + "block7_v_sharpness": 0.003895337227731943, + "block7_o_sharpness": 0.00015024036110844463, + "block7_mlp_win_sharpness": 0.0006592809222638607, + "block7_mlp_wout_sharpness": 0.00013991372543387115, + "block11_q_sharpness": 3.891078813467175e-05, + "block11_k_sharpness": 4.4732430978911e-05, + "block11_v_sharpness": 0.00019374371913727373, + "block11_o_sharpness": 6.810419290559366e-05, + "block11_mlp_win_sharpness": 0.000646893517114222, + "block11_mlp_wout_sharpness": 0.002283174078911543, + "sum_layer_numerators": 0.008683083706195062, + "block_diag_sharpness": 0.0021716101217741637, + "cross_layer_sharpness": 0.008364590245140108 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..ab5340100eeef834b6098ed4c148d3865097e3be --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_2500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.398477792739868, + "total_l1_linf_norm": 20519.19140625, + "total_spectral_norm": 2.398477554321289, + "embed_lm_head_update_fnorm": 1.3401840925216675, + "embed_lm_head_max_l1_linf_norm": 0.35589152574539185, + "embed_lm_head_max_spectral_norm": 0.2213902622461319, + "layer_1_update_fnorm": 0.5864013433456421, + "layer_1_max_l1_linf_norm": 0.44299495220184326, + "layer_1_max_spectral_norm": 0.012051028199493885, + "layer_2_update_fnorm": 0.49333158135414124, + "layer_2_max_l1_linf_norm": 0.4139810800552368, + "layer_2_max_spectral_norm": 0.012069799937307835, + "layer_3_update_fnorm": 0.4787745475769043, + "layer_3_max_l1_linf_norm": 0.39314255118370056, + "layer_3_max_spectral_norm": 0.01522454060614109, + "layer_4_update_fnorm": 0.5502219796180725, + "layer_4_max_l1_linf_norm": 0.4001847803592682, + "layer_4_max_spectral_norm": 0.012046626769006252, + "layer_5_update_fnorm": 0.5848572254180908, + "layer_5_max_l1_linf_norm": 0.4070858359336853, + "layer_5_max_spectral_norm": 0.012045511044561863, + "layer_6_update_fnorm": 0.5872504711151123, + "layer_6_max_l1_linf_norm": 0.4101170003414154, + "layer_6_max_spectral_norm": 0.012048461474478245, + "layer_7_update_fnorm": 0.5982955098152161, + "layer_7_max_l1_linf_norm": 0.4087429642677307, + "layer_7_max_spectral_norm": 0.012068921700119972, + "layer_8_update_fnorm": 0.5977628827095032, + "layer_8_max_l1_linf_norm": 0.4132162928581238, + "layer_8_max_spectral_norm": 0.012055646628141403, + "layer_9_update_fnorm": 0.5978825688362122, + "layer_9_max_l1_linf_norm": 0.4133129119873047, + "layer_9_max_spectral_norm": 0.012044219300150871, + "layer_10_update_fnorm": 0.5976496934890747, + "layer_10_max_l1_linf_norm": 0.4109601378440857, + "layer_10_max_spectral_norm": 0.01204411219805479, + "layer_11_update_fnorm": 0.5969665050506592, + "layer_11_max_l1_linf_norm": 0.40678393840789795, + "layer_11_max_spectral_norm": 0.01204822026193142, + "layer_12_update_fnorm": 0.6034663915634155, + "layer_12_max_l1_linf_norm": 0.3958076238632202, + "layer_12_max_spectral_norm": 0.012043307535350323, + "block0_q_update_fnorm": 0.2454487383365631, + "block0_q_max_l1_linf_norm": 0.22073468565940857, + "block0_q_max_spectral_norm": 0.012039878405630589, + "block0_k_update_fnorm": 0.24493667483329773, + "block0_k_max_l1_linf_norm": 0.21510300040245056, + "block0_k_max_spectral_norm": 0.012043021619319916, + "block0_v_update_fnorm": 0.16115473210811615, + "block0_v_max_l1_linf_norm": 0.20128455758094788, + "block0_v_max_spectral_norm": 0.012029758654534817, + "block0_o_update_fnorm": 0.22520875930786133, + "block0_o_max_l1_linf_norm": 0.19287563860416412, + "block0_o_max_spectral_norm": 0.012042909860610962, + "block0_mlp_win_update_fnorm": 0.2741796374320984, + "block0_mlp_win_max_l1_linf_norm": 0.1662103831768036, + "block0_mlp_win_max_spectral_norm": 0.012051028199493885, + "block0_mlp_wout_update_fnorm": 0.2676747739315033, + "block0_mlp_wout_max_l1_linf_norm": 0.44299495220184326, + "block0_mlp_wout_max_spectral_norm": 0.012047134339809418, + "block3_q_update_fnorm": 0.2064283937215805, + "block3_q_max_l1_linf_norm": 0.21159258484840393, + "block3_q_max_spectral_norm": 0.012040269561111927, + "block3_k_update_fnorm": 0.19687627255916595, + "block3_k_max_l1_linf_norm": 0.21249565482139587, + "block3_k_max_spectral_norm": 0.012038758024573326, + "block3_v_update_fnorm": 0.1887294352054596, + "block3_v_max_l1_linf_norm": 0.20000869035720825, + "block3_v_max_spectral_norm": 0.012034294195473194, + "block3_o_update_fnorm": 0.23742777109146118, + "block3_o_max_l1_linf_norm": 0.1989845633506775, + "block3_o_max_spectral_norm": 0.0120400944724679, + "block3_mlp_win_update_fnorm": 0.26604679226875305, + "block3_mlp_win_max_l1_linf_norm": 0.183086097240448, + "block3_mlp_win_max_spectral_norm": 0.012046626769006252, + "block3_mlp_wout_update_fnorm": 0.24173516035079956, + "block3_mlp_wout_max_l1_linf_norm": 0.4001847803592682, + "block3_mlp_wout_max_spectral_norm": 0.011390985921025276, + "block7_q_update_fnorm": 0.24258647859096527, + "block7_q_max_l1_linf_norm": 0.20645107328891754, + "block7_q_max_spectral_norm": 0.012042715214192867, + "block7_k_update_fnorm": 0.2454080879688263, + "block7_k_max_l1_linf_norm": 0.21142619848251343, + "block7_k_max_spectral_norm": 0.012040385976433754, + "block7_v_update_fnorm": 0.22806431353092194, + "block7_v_max_l1_linf_norm": 0.2088702917098999, + "block7_v_max_spectral_norm": 0.01203698106110096, + "block7_o_update_fnorm": 0.247970849275589, + "block7_o_max_l1_linf_norm": 0.2079852819442749, + "block7_o_max_spectral_norm": 0.012046102434396744, + "block7_mlp_win_update_fnorm": 0.2506806254386902, + "block7_mlp_win_max_l1_linf_norm": 0.1520671546459198, + "block7_mlp_win_max_spectral_norm": 0.012055646628141403, + "block7_mlp_wout_update_fnorm": 0.2485750913619995, + "block7_mlp_wout_max_l1_linf_norm": 0.4132162928581238, + "block7_mlp_wout_max_spectral_norm": 0.011388488113880157, + "block11_q_update_fnorm": 0.24897131323814392, + "block11_q_max_l1_linf_norm": 0.2095324695110321, + "block11_q_max_spectral_norm": 0.012036759406328201, + "block11_k_update_fnorm": 0.2496328204870224, + "block11_k_max_l1_linf_norm": 0.21243320405483246, + "block11_k_max_spectral_norm": 0.012043307535350323, + "block11_v_update_fnorm": 0.24651195108890533, + "block11_v_max_l1_linf_norm": 0.2071218192577362, + "block11_v_max_spectral_norm": 0.0120391258969903, + "block11_o_update_fnorm": 0.24893292784690857, + "block11_o_max_l1_linf_norm": 0.2077639400959015, + "block11_o_max_spectral_norm": 0.012038183398544788, + "block11_mlp_win_update_fnorm": 0.24547839164733887, + "block11_mlp_win_max_l1_linf_norm": 0.16826468706130981, + "block11_mlp_win_max_spectral_norm": 0.011374758556485176, + "block11_mlp_wout_update_fnorm": 0.2381732165813446, + "block11_mlp_wout_max_l1_linf_norm": 0.3958076238632202, + "block11_mlp_wout_max_spectral_norm": 0.01138617005199194, + "total_sharpness": 0.006639120168983936, + "block_total_sharpness": 0.00865782517939806, + "v_norm_block": 1.9891204833984375, + "v_T_H_v_block": 0.03425555303692818, + "v_norm": 2.398477792739868, + "ip_v_neg_g_hvp": 0.05726982653141022, + "cos_v_neg_g_hvp": 0.038090892136096954, + "g_hvp_norm": 0.6268578171730042, + "ip_v_neg_g_t": 0.05783175304532051, + "cos_v_neg_g_t": 0.04295934736728668, + "g_t_norm": 0.5612714886665344, + "g_norm": 0.6268578171730042, + "hv_norm": 0.8496294617652893, + "cos_v_hv": 0.0187420304864645, + "hg_norm": 31.973106384277344, + "cos_g_hg": 0.547687292098999, + "v_parallel_norm": 0.006247909273952246, + "v_perp_norm": 2.3984696865081787, + "embed_lm_head_v_norm": 1.3401840925216675, + "embed_lm_head_cos_v_neg_g": 0.0750366598367691, + "layer_1_v_norm": 0.5864013433456421, + "layer_1_cos_v_neg_g": 0.029897402971982956, + "layer_2_v_norm": 0.49333158135414124, + "layer_2_cos_v_neg_g": 0.03945867344737053, + "layer_3_v_norm": 0.4787745177745819, + "layer_3_cos_v_neg_g": 0.03381085395812988, + "layer_4_v_norm": 0.5502219796180725, + "layer_4_cos_v_neg_g": 0.03406454995274544, + "layer_5_v_norm": 0.5848572254180908, + "layer_5_cos_v_neg_g": 0.0383748859167099, + "layer_6_v_norm": 0.5872504711151123, + "layer_6_cos_v_neg_g": 0.03915161266922951, + "layer_7_v_norm": 0.5982955098152161, + "layer_7_cos_v_neg_g": 0.04012781009078026, + "layer_8_v_norm": 0.5977628827095032, + "layer_8_cos_v_neg_g": 0.04051178693771362, + "layer_9_v_norm": 0.5978825688362122, + "layer_9_cos_v_neg_g": 0.042038436979055405, + "layer_10_v_norm": 0.5976496934890747, + "layer_10_cos_v_neg_g": 0.049176327884197235, + "layer_11_v_norm": 0.5969664454460144, + "layer_11_cos_v_neg_g": 0.05828635394573212, + "layer_12_v_norm": 0.6034663915634155, + "layer_12_cos_v_neg_g": 0.08709774166345596, + "block0_q_v_norm": 0.2454487383365631, + "block0_q_cos_v_neg_g": 0.08635369688272476, + "block0_k_v_norm": 0.24493667483329773, + "block0_k_cos_v_neg_g": 0.10123366117477417, + "block0_v_v_norm": 0.16115473210811615, + "block0_v_cos_v_neg_g": 0.036650728434324265, + "block0_o_v_norm": 0.22520875930786133, + "block0_o_cos_v_neg_g": 0.058880604803562164, + "block0_mlp_win_v_norm": 0.2741796374320984, + "block0_mlp_win_cos_v_neg_g": 0.053157705813646317, + "block0_mlp_wout_v_norm": 0.2676747739315033, + "block0_mlp_wout_cos_v_neg_g": 0.07626786828041077, + "block3_q_v_norm": 0.2064283937215805, + "block3_q_cos_v_neg_g": 0.03728882968425751, + "block3_k_v_norm": 0.19687627255916595, + "block3_k_cos_v_neg_g": 0.058553919196128845, + "block3_v_v_norm": 0.1887294352054596, + "block3_v_cos_v_neg_g": 0.03328011929988861, + "block3_o_v_norm": 0.23742777109146118, + "block3_o_cos_v_neg_g": 0.06513679772615433, + "block3_mlp_win_v_norm": 0.26604679226875305, + "block3_mlp_win_cos_v_neg_g": 0.0417165644466877, + "block3_mlp_wout_v_norm": 0.24173516035079956, + "block3_mlp_wout_cos_v_neg_g": 0.10218881070613861, + "block7_q_v_norm": 0.24258647859096527, + "block7_q_cos_v_neg_g": 0.04722020402550697, + "block7_k_v_norm": 0.2454080879688263, + "block7_k_cos_v_neg_g": 0.08509974926710129, + "block7_v_v_norm": 0.22806431353092194, + "block7_v_cos_v_neg_g": 0.03461311757564545, + "block7_o_v_norm": 0.247970849275589, + "block7_o_cos_v_neg_g": 0.07712599635124207, + "block7_mlp_win_v_norm": 0.2506806254386902, + "block7_mlp_win_cos_v_neg_g": 0.059153828769922256, + "block7_mlp_wout_v_norm": 0.2485750913619995, + "block7_mlp_wout_cos_v_neg_g": 0.1320916712284088, + "block11_q_v_norm": 0.24897131323814392, + "block11_q_cos_v_neg_g": 0.09773647040128708, + "block11_k_v_norm": 0.2496328204870224, + "block11_k_cos_v_neg_g": 0.10549093782901764, + "block11_v_v_norm": 0.24651195108890533, + "block11_v_cos_v_neg_g": 0.07240801304578781, + "block11_o_v_norm": 0.24893292784690857, + "block11_o_cos_v_neg_g": 0.10352382808923721, + "block11_mlp_win_v_norm": 0.24547839164733887, + "block11_mlp_win_cos_v_neg_g": 0.11689486354589462, + "block11_mlp_wout_v_norm": 0.2381732165813446, + "block11_mlp_wout_cos_v_neg_g": 0.09439072757959366, + "embed_lm_head_sharpness": 0.0004258153203409165, + "layer_1_sharpness": 0.01033696997910738, + "layer_2_sharpness": 0.004190447740256786, + "layer_3_sharpness": 0.002973361872136593, + "layer_4_sharpness": 0.0012736220378428698, + "layer_5_sharpness": 0.0010987049899995327, + "layer_6_sharpness": 0.0014652301324531436, + "layer_7_sharpness": 0.0015931619564071298, + "layer_8_sharpness": 0.0012784989085048437, + "layer_9_sharpness": 0.0008240524912253022, + "layer_10_sharpness": 0.0005172879318706691, + "layer_11_sharpness": 0.0004518904024735093, + "layer_12_sharpness": 0.0003539249300956726, + "block0_q_sharpness": 0.0010560021037235856, + "block0_k_sharpness": 0.0012496180133894086, + "block0_v_sharpness": 0.03345657140016556, + "block0_o_sharpness": 0.00048578463611193, + "block0_mlp_win_sharpness": 0.0013828701339662075, + "block0_mlp_wout_sharpness": 0.0016127604758366942, + "block3_q_sharpness": 7.31521868146956e-05, + "block3_k_sharpness": 0.001389788230881095, + "block3_v_sharpness": 0.0032712032552808523, + "block3_o_sharpness": 0.00021164253121241927, + "block3_mlp_win_sharpness": 0.00011948131577810273, + "block3_mlp_wout_sharpness": 7.057658513076603e-05, + "block7_q_sharpness": 0.0001787857909221202, + "block7_k_sharpness": 0.00016968222917057574, + "block7_v_sharpness": 0.0029025189578533173, + "block7_o_sharpness": 0.00010533810564083979, + "block7_mlp_win_sharpness": 0.00036600494058802724, + "block7_mlp_wout_sharpness": 0.00010483751248102635, + "block11_q_sharpness": 4.008960968349129e-05, + "block11_k_sharpness": 4.128173168282956e-05, + "block11_v_sharpness": 0.00016175402561202645, + "block11_o_sharpness": 5.0088903662981465e-05, + "block11_mlp_win_sharpness": 0.00024409899197053164, + "block11_mlp_wout_sharpness": 0.0004203837306704372, + "sum_layer_numerators": 0.008319051586091352, + "block_diag_sharpness": 0.0021025756863537275, + "cross_layer_sharpness": 0.006555249493044332 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..20f748ab813b624bd639be47137e015b842a700b --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3852689266204834, + "total_l1_linf_norm": 20389.685546875, + "total_spectral_norm": 2.3852686882019043, + "embed_lm_head_update_fnorm": 1.340356469154358, + "embed_lm_head_max_l1_linf_norm": 0.34064149856567383, + "embed_lm_head_max_spectral_norm": 0.21683913469314575, + "layer_1_update_fnorm": 0.5770775675773621, + "layer_1_max_l1_linf_norm": 0.4413537383079529, + "layer_1_max_spectral_norm": 0.012050571851432323, + "layer_2_update_fnorm": 0.47114670276641846, + "layer_2_max_l1_linf_norm": 0.41911470890045166, + "layer_2_max_spectral_norm": 0.012061954475939274, + "layer_3_update_fnorm": 0.4442155957221985, + "layer_3_max_l1_linf_norm": 0.4105856418609619, + "layer_3_max_spectral_norm": 0.015686972066760063, + "layer_4_update_fnorm": 0.5514392852783203, + "layer_4_max_l1_linf_norm": 0.40124136209487915, + "layer_4_max_spectral_norm": 0.012041333131492138, + "layer_5_update_fnorm": 0.5846719145774841, + "layer_5_max_l1_linf_norm": 0.4085673689842224, + "layer_5_max_spectral_norm": 0.012047662399709225, + "layer_6_update_fnorm": 0.5892316699028015, + "layer_6_max_l1_linf_norm": 0.4098547697067261, + "layer_6_max_spectral_norm": 0.012043640948832035, + "layer_7_update_fnorm": 0.5986748933792114, + "layer_7_max_l1_linf_norm": 0.41072946786880493, + "layer_7_max_spectral_norm": 0.01205459889024496, + "layer_8_update_fnorm": 0.5994632244110107, + "layer_8_max_l1_linf_norm": 0.4141135513782501, + "layer_8_max_spectral_norm": 0.01206604391336441, + "layer_9_update_fnorm": 0.5976564288139343, + "layer_9_max_l1_linf_norm": 0.4149824380874634, + "layer_9_max_spectral_norm": 0.012047112919390202, + "layer_10_update_fnorm": 0.5976598858833313, + "layer_10_max_l1_linf_norm": 0.4109655022621155, + "layer_10_max_spectral_norm": 0.012043459340929985, + "layer_11_update_fnorm": 0.5940834879875183, + "layer_11_max_l1_linf_norm": 0.40795448422431946, + "layer_11_max_spectral_norm": 0.012046481482684612, + "layer_12_update_fnorm": 0.6020066738128662, + "layer_12_max_l1_linf_norm": 0.3941143751144409, + "layer_12_max_spectral_norm": 0.012043784372508526, + "block0_q_update_fnorm": 0.24196937680244446, + "block0_q_max_l1_linf_norm": 0.2177281230688095, + "block0_q_max_spectral_norm": 0.012045305222272873, + "block0_k_update_fnorm": 0.2408914566040039, + "block0_k_max_l1_linf_norm": 0.2114042341709137, + "block0_k_max_spectral_norm": 0.01204102486371994, + "block0_v_update_fnorm": 0.13768590986728668, + "block0_v_max_l1_linf_norm": 0.19123855233192444, + "block0_v_max_spectral_norm": 0.012025267817080021, + "block0_o_update_fnorm": 0.22426265478134155, + "block0_o_max_l1_linf_norm": 0.1926659494638443, + "block0_o_max_spectral_norm": 0.012043344788253307, + "block0_mlp_win_update_fnorm": 0.273743599653244, + "block0_mlp_win_max_l1_linf_norm": 0.16822075843811035, + "block0_mlp_win_max_spectral_norm": 0.012047668918967247, + "block0_mlp_wout_update_fnorm": 0.2685757577419281, + "block0_mlp_wout_max_l1_linf_norm": 0.4413537383079529, + "block0_mlp_wout_max_spectral_norm": 0.012050571851432323, + "block3_q_update_fnorm": 0.2069067656993866, + "block3_q_max_l1_linf_norm": 0.21325400471687317, + "block3_q_max_spectral_norm": 0.01203817967325449, + "block3_k_update_fnorm": 0.1989373117685318, + "block3_k_max_l1_linf_norm": 0.21356046199798584, + "block3_k_max_spectral_norm": 0.01203653309494257, + "block3_v_update_fnorm": 0.1873670518398285, + "block3_v_max_l1_linf_norm": 0.20275361835956573, + "block3_v_max_spectral_norm": 0.012033184990286827, + "block3_o_update_fnorm": 0.23815812170505524, + "block3_o_max_l1_linf_norm": 0.19984200596809387, + "block3_o_max_spectral_norm": 0.012035784311592579, + "block3_mlp_win_update_fnorm": 0.2660077214241028, + "block3_mlp_win_max_l1_linf_norm": 0.17755842208862305, + "block3_mlp_win_max_spectral_norm": 0.012041333131492138, + "block3_mlp_wout_update_fnorm": 0.24280285835266113, + "block3_mlp_wout_max_l1_linf_norm": 0.40124136209487915, + "block3_mlp_wout_max_spectral_norm": 0.011391779407858849, + "block7_q_update_fnorm": 0.24263602495193481, + "block7_q_max_l1_linf_norm": 0.20713981986045837, + "block7_q_max_spectral_norm": 0.012047030963003635, + "block7_k_update_fnorm": 0.24605602025985718, + "block7_k_max_l1_linf_norm": 0.2115974873304367, + "block7_k_max_spectral_norm": 0.0120407585054636, + "block7_v_update_fnorm": 0.22608794271945953, + "block7_v_max_l1_linf_norm": 0.21007055044174194, + "block7_v_max_spectral_norm": 0.012038784101605415, + "block7_o_update_fnorm": 0.24808034300804138, + "block7_o_max_l1_linf_norm": 0.20684736967086792, + "block7_o_max_spectral_norm": 0.01204517763108015, + "block7_mlp_win_update_fnorm": 0.25634753704071045, + "block7_mlp_win_max_l1_linf_norm": 0.1527434140443802, + "block7_mlp_win_max_spectral_norm": 0.01206604391336441, + "block7_mlp_wout_update_fnorm": 0.24788853526115417, + "block7_mlp_wout_max_l1_linf_norm": 0.4141135513782501, + "block7_mlp_wout_max_spectral_norm": 0.011384010314941406, + "block11_q_update_fnorm": 0.24848246574401855, + "block11_q_max_l1_linf_norm": 0.21027207374572754, + "block11_q_max_spectral_norm": 0.012042207643389702, + "block11_k_update_fnorm": 0.2494414895772934, + "block11_k_max_l1_linf_norm": 0.2126205861568451, + "block11_k_max_spectral_norm": 0.012039288878440857, + "block11_v_update_fnorm": 0.24651770293712616, + "block11_v_max_l1_linf_norm": 0.20634937286376953, + "block11_v_max_spectral_norm": 0.012043784372508526, + "block11_o_update_fnorm": 0.2488507181406021, + "block11_o_max_l1_linf_norm": 0.20789498090744019, + "block11_o_max_spectral_norm": 0.012043287977576256, + "block11_mlp_win_update_fnorm": 0.24335257709026337, + "block11_mlp_win_max_l1_linf_norm": 0.179067924618721, + "block11_mlp_win_max_spectral_norm": 0.011370730586349964, + "block11_mlp_wout_update_fnorm": 0.23745602369308472, + "block11_mlp_wout_max_l1_linf_norm": 0.3941143751144409, + "block11_mlp_wout_max_spectral_norm": 0.011369260028004646, + "total_sharpness": 0.008962444961071014, + "block_total_sharpness": 0.01102939248085022, + "v_norm_block": 1.9730563163757324, + "v_T_H_v_block": 0.04293688386678696, + "v_norm": 2.3852689266204834, + "ip_v_neg_g_hvp": 0.058609869331121445, + "cos_v_neg_g_hvp": 0.028201255947351456, + "g_hvp_norm": 0.8712944984436035, + "ip_v_neg_g_t": 0.05778893828392029, + "cos_v_neg_g_t": 0.03181483969092369, + "g_t_norm": 0.7615135312080383, + "g_norm": 0.8712944984436035, + "hv_norm": 1.4234119653701782, + "cos_v_hv": 0.015018732286989689, + "hg_norm": 91.54182434082031, + "cos_g_hg": 0.7167571783065796, + "v_parallel_norm": 0.006655807141214609, + "v_perp_norm": 2.3852596282958984, + "embed_lm_head_v_norm": 1.340356469154358, + "embed_lm_head_cos_v_neg_g": 0.041476089507341385, + "layer_1_v_norm": 0.5770775675773621, + "layer_1_cos_v_neg_g": 0.027237024158239365, + "layer_2_v_norm": 0.47114670276641846, + "layer_2_cos_v_neg_g": 0.03499672934412956, + "layer_3_v_norm": 0.4442155659198761, + "layer_3_cos_v_neg_g": 0.02781316079199314, + "layer_4_v_norm": 0.5514392852783203, + "layer_4_cos_v_neg_g": 0.03241507336497307, + "layer_5_v_norm": 0.5846719145774841, + "layer_5_cos_v_neg_g": 0.03513188660144806, + "layer_6_v_norm": 0.5892316699028015, + "layer_6_cos_v_neg_g": 0.03450668975710869, + "layer_7_v_norm": 0.5986748933792114, + "layer_7_cos_v_neg_g": 0.0343497134745121, + "layer_8_v_norm": 0.5994632244110107, + "layer_8_cos_v_neg_g": 0.03442300856113434, + "layer_9_v_norm": 0.5976564288139343, + "layer_9_cos_v_neg_g": 0.03625880926847458, + "layer_10_v_norm": 0.5976598858833313, + "layer_10_cos_v_neg_g": 0.03822321444749832, + "layer_11_v_norm": 0.5940834879875183, + "layer_11_cos_v_neg_g": 0.045805446803569794, + "layer_12_v_norm": 0.6020066738128662, + "layer_12_cos_v_neg_g": 0.07759566605091095, + "block0_q_v_norm": 0.24196937680244446, + "block0_q_cos_v_neg_g": 0.10549445450305939, + "block0_k_v_norm": 0.2408914566040039, + "block0_k_cos_v_neg_g": 0.10891228914260864, + "block0_v_v_norm": 0.13768590986728668, + "block0_v_cos_v_neg_g": 0.053311631083488464, + "block0_o_v_norm": 0.22426265478134155, + "block0_o_cos_v_neg_g": 0.06148684024810791, + "block0_mlp_win_v_norm": 0.273743599653244, + "block0_mlp_win_cos_v_neg_g": 0.043794263154268265, + "block0_mlp_wout_v_norm": 0.2685757577419281, + "block0_mlp_wout_cos_v_neg_g": 0.07010045647621155, + "block3_q_v_norm": 0.2069067656993866, + "block3_q_cos_v_neg_g": 0.03470272943377495, + "block3_k_v_norm": 0.1989373117685318, + "block3_k_cos_v_neg_g": 0.048327550292015076, + "block3_v_v_norm": 0.1873670518398285, + "block3_v_cos_v_neg_g": 0.033554807305336, + "block3_o_v_norm": 0.23815812170505524, + "block3_o_cos_v_neg_g": 0.06650138646364212, + "block3_mlp_win_v_norm": 0.2660077214241028, + "block3_mlp_win_cos_v_neg_g": 0.037241652607917786, + "block3_mlp_wout_v_norm": 0.24280285835266113, + "block3_mlp_wout_cos_v_neg_g": 0.10523776710033417, + "block7_q_v_norm": 0.24263602495193481, + "block7_q_cos_v_neg_g": 0.04221363365650177, + "block7_k_v_norm": 0.24605602025985718, + "block7_k_cos_v_neg_g": 0.07934468239545822, + "block7_v_v_norm": 0.22608794271945953, + "block7_v_cos_v_neg_g": 0.03218260407447815, + "block7_o_v_norm": 0.24808034300804138, + "block7_o_cos_v_neg_g": 0.06980378925800323, + "block7_mlp_win_v_norm": 0.25634753704071045, + "block7_mlp_win_cos_v_neg_g": 0.047696929425001144, + "block7_mlp_wout_v_norm": 0.24788853526115417, + "block7_mlp_wout_cos_v_neg_g": 0.12245399504899979, + "block11_q_v_norm": 0.24848246574401855, + "block11_q_cos_v_neg_g": 0.08370684832334518, + "block11_k_v_norm": 0.2494414895772934, + "block11_k_cos_v_neg_g": 0.09902781248092651, + "block11_v_v_norm": 0.24651770293712616, + "block11_v_cos_v_neg_g": 0.05560735613107681, + "block11_o_v_norm": 0.2488507181406021, + "block11_o_cos_v_neg_g": 0.09297984093427658, + "block11_mlp_win_v_norm": 0.24335257709026337, + "block11_mlp_win_cos_v_neg_g": 0.10804881155490875, + "block11_mlp_wout_v_norm": 0.23745602369308472, + "block11_mlp_wout_cos_v_neg_g": 0.08782574534416199, + "embed_lm_head_sharpness": 0.0006332214688882232, + "layer_1_sharpness": 0.026817891746759415, + "layer_2_sharpness": 0.009076928719878197, + "layer_3_sharpness": 0.0019178532529622316, + "layer_4_sharpness": 0.0012047764612361789, + "layer_5_sharpness": 0.0011265170760452747, + "layer_6_sharpness": 0.0014937605010345578, + "layer_7_sharpness": 0.0013397332513704896, + "layer_8_sharpness": 0.0011517074890434742, + "layer_9_sharpness": 0.0008328010444529355, + "layer_10_sharpness": 0.0004848799726460129, + "layer_11_sharpness": 0.0004818691231776029, + "layer_12_sharpness": 0.0004199254617560655, + "block0_q_sharpness": 0.004015455953776836, + "block0_k_sharpness": 0.0034635637421160936, + "block0_v_sharpness": 0.14341169595718384, + "block0_o_sharpness": 0.0016263171564787626, + "block0_mlp_win_sharpness": 0.0015918693970888853, + "block0_mlp_wout_sharpness": 0.0021990167442709208, + "block3_q_sharpness": 0.00013368006329983473, + "block3_k_sharpness": 0.0009590281406417489, + "block3_v_sharpness": 0.0037400920409709215, + "block3_o_sharpness": 0.0002018262748606503, + "block3_mlp_win_sharpness": 0.00011800818174378946, + "block3_mlp_wout_sharpness": 8.017253276193514e-05, + "block7_q_sharpness": 6.838946865173057e-05, + "block7_k_sharpness": 0.00010626961739035323, + "block7_v_sharpness": 0.002919161692261696, + "block7_o_sharpness": 0.000102548576251138, + "block7_mlp_win_sharpness": 0.00032855229801498353, + "block7_mlp_wout_sharpness": 9.555012366035953e-05, + "block11_q_sharpness": 3.498702426441014e-05, + "block11_k_sharpness": 5.048251478001475e-05, + "block11_v_sharpness": 0.0001686080649960786, + "block11_o_sharpness": 4.410600013216026e-05, + "block11_mlp_win_sharpness": 0.0003144074871670455, + "block11_mlp_wout_sharpness": 0.0005045608268119395, + "sum_layer_numerators": 0.014281231136281769, + "block_diag_sharpness": 0.003668484473955811, + "cross_layer_sharpness": 0.007360908006894409 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..6f0bba3c83b10c816b6c71ed2edf0e040bb1eaf0 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_3500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3783669471740723, + "total_l1_linf_norm": 20329.2265625, + "total_spectral_norm": 2.3783669471740723, + "embed_lm_head_update_fnorm": 1.3322020769119263, + "embed_lm_head_max_l1_linf_norm": 0.357219934463501, + "embed_lm_head_max_spectral_norm": 0.20471079647541046, + "layer_1_update_fnorm": 0.5717149972915649, + "layer_1_max_l1_linf_norm": 0.4455358386039734, + "layer_1_max_spectral_norm": 0.012053369544446468, + "layer_2_update_fnorm": 0.47738292813301086, + "layer_2_max_l1_linf_norm": 0.42698097229003906, + "layer_2_max_spectral_norm": 0.012051979079842567, + "layer_3_update_fnorm": 0.42614325881004333, + "layer_3_max_l1_linf_norm": 0.5030791759490967, + "layer_3_max_spectral_norm": 0.019951479509472847, + "layer_4_update_fnorm": 0.5543665885925293, + "layer_4_max_l1_linf_norm": 0.4057074189186096, + "layer_4_max_spectral_norm": 0.012044846080243587, + "layer_5_update_fnorm": 0.5850269198417664, + "layer_5_max_l1_linf_norm": 0.4072495996952057, + "layer_5_max_spectral_norm": 0.01205280888825655, + "layer_6_update_fnorm": 0.5895346999168396, + "layer_6_max_l1_linf_norm": 0.41223418712615967, + "layer_6_max_spectral_norm": 0.01204465702176094, + "layer_7_update_fnorm": 0.60000079870224, + "layer_7_max_l1_linf_norm": 0.41517728567123413, + "layer_7_max_spectral_norm": 0.012057210318744183, + "layer_8_update_fnorm": 0.6004412174224854, + "layer_8_max_l1_linf_norm": 0.41637468338012695, + "layer_8_max_spectral_norm": 0.012049409560859203, + "layer_9_update_fnorm": 0.5987502932548523, + "layer_9_max_l1_linf_norm": 0.4112504720687866, + "layer_9_max_spectral_norm": 0.012068009935319424, + "layer_10_update_fnorm": 0.5971854329109192, + "layer_10_max_l1_linf_norm": 0.41590172052383423, + "layer_10_max_spectral_norm": 0.01204598881304264, + "layer_11_update_fnorm": 0.5921174883842468, + "layer_11_max_l1_linf_norm": 0.4098063111305237, + "layer_11_max_spectral_norm": 0.0120469955727458, + "layer_12_update_fnorm": 0.6017645597457886, + "layer_12_max_l1_linf_norm": 0.3961770236492157, + "layer_12_max_spectral_norm": 0.012044653296470642, + "block0_q_update_fnorm": 0.23634953796863556, + "block0_q_max_l1_linf_norm": 0.21188366413116455, + "block0_q_max_spectral_norm": 0.012041805312037468, + "block0_k_update_fnorm": 0.22806578874588013, + "block0_k_max_l1_linf_norm": 0.21406054496765137, + "block0_k_max_spectral_norm": 0.01203745137900114, + "block0_v_update_fnorm": 0.15101121366024017, + "block0_v_max_l1_linf_norm": 0.2023838758468628, + "block0_v_max_spectral_norm": 0.01203239243477583, + "block0_o_update_fnorm": 0.22235454618930817, + "block0_o_max_l1_linf_norm": 0.19265826046466827, + "block0_o_max_spectral_norm": 0.012038305401802063, + "block0_mlp_win_update_fnorm": 0.2733736038208008, + "block0_mlp_win_max_l1_linf_norm": 0.17262515425682068, + "block0_mlp_win_max_spectral_norm": 0.01204647496342659, + "block0_mlp_wout_update_fnorm": 0.2681211829185486, + "block0_mlp_wout_max_l1_linf_norm": 0.4455358386039734, + "block0_mlp_wout_max_spectral_norm": 0.012053369544446468, + "block3_q_update_fnorm": 0.20942890644073486, + "block3_q_max_l1_linf_norm": 0.21167679131031036, + "block3_q_max_spectral_norm": 0.0120379738509655, + "block3_k_update_fnorm": 0.20413938164710999, + "block3_k_max_l1_linf_norm": 0.2135513722896576, + "block3_k_max_spectral_norm": 0.012038216926157475, + "block3_v_update_fnorm": 0.1875927597284317, + "block3_v_max_l1_linf_norm": 0.2036588490009308, + "block3_v_max_spectral_norm": 0.012031654827296734, + "block3_o_update_fnorm": 0.2384939193725586, + "block3_o_max_l1_linf_norm": 0.2009028196334839, + "block3_o_max_spectral_norm": 0.012037057429552078, + "block3_mlp_win_update_fnorm": 0.26609086990356445, + "block3_mlp_win_max_l1_linf_norm": 0.18891486525535583, + "block3_mlp_win_max_spectral_norm": 0.012044846080243587, + "block3_mlp_wout_update_fnorm": 0.2423810213804245, + "block3_mlp_wout_max_l1_linf_norm": 0.40020063519477844, + "block3_mlp_wout_max_spectral_norm": 0.011396312154829502, + "block7_q_update_fnorm": 0.2424864023923874, + "block7_q_max_l1_linf_norm": 0.20660188794136047, + "block7_q_max_spectral_norm": 0.012042902410030365, + "block7_k_update_fnorm": 0.24697816371917725, + "block7_k_max_l1_linf_norm": 0.21064534783363342, + "block7_k_max_spectral_norm": 0.012035515159368515, + "block7_v_update_fnorm": 0.22317799925804138, + "block7_v_max_l1_linf_norm": 0.20943902432918549, + "block7_v_max_spectral_norm": 0.01203703973442316, + "block7_o_update_fnorm": 0.24818290770053864, + "block7_o_max_l1_linf_norm": 0.206935316324234, + "block7_o_max_spectral_norm": 0.012041810899972916, + "block7_mlp_win_update_fnorm": 0.26033884286880493, + "block7_mlp_win_max_l1_linf_norm": 0.1608043909072876, + "block7_mlp_win_max_spectral_norm": 0.012049409560859203, + "block7_mlp_wout_update_fnorm": 0.24784916639328003, + "block7_mlp_wout_max_l1_linf_norm": 0.41637468338012695, + "block7_mlp_wout_max_spectral_norm": 0.011377732269465923, + "block11_q_update_fnorm": 0.24810540676116943, + "block11_q_max_l1_linf_norm": 0.214752197265625, + "block11_q_max_spectral_norm": 0.012040561996400356, + "block11_k_update_fnorm": 0.24932454526424408, + "block11_k_max_l1_linf_norm": 0.21499940752983093, + "block11_k_max_spectral_norm": 0.012039011344313622, + "block11_v_update_fnorm": 0.24612711369991302, + "block11_v_max_l1_linf_norm": 0.2056717872619629, + "block11_v_max_spectral_norm": 0.012044653296470642, + "block11_o_update_fnorm": 0.2488916665315628, + "block11_o_max_l1_linf_norm": 0.20768558979034424, + "block11_o_max_spectral_norm": 0.012039528228342533, + "block11_mlp_win_update_fnorm": 0.2435946762561798, + "block11_mlp_win_max_l1_linf_norm": 0.1706688404083252, + "block11_mlp_win_max_spectral_norm": 0.011375327594578266, + "block11_mlp_wout_update_fnorm": 0.23743578791618347, + "block11_mlp_wout_max_l1_linf_norm": 0.3961770236492157, + "block11_mlp_wout_max_spectral_norm": 0.011364975944161415, + "total_sharpness": 0.005859576165676117, + "block_total_sharpness": 0.007340069394558668, + "v_norm_block": 1.9702454805374146, + "v_T_H_v_block": 0.02849317528307438, + "v_norm": 2.3783669471740723, + "ip_v_neg_g_hvp": 0.04915792495012283, + "cos_v_neg_g_hvp": 0.032695427536964417, + "g_hvp_norm": 0.6321609616279602, + "ip_v_neg_g_t": 0.04935974255204201, + "cos_v_neg_g_t": 0.03783012554049492, + "g_t_norm": 0.5486005544662476, + "g_norm": 0.6321609616279602, + "hv_norm": 1.0122119188308716, + "cos_v_hv": 0.013768087141215801, + "hg_norm": 56.289512634277344, + "cos_g_hg": 0.5153041481971741, + "v_parallel_norm": 0.006374965887516737, + "v_perp_norm": 2.3783583641052246, + "embed_lm_head_v_norm": 1.3322020769119263, + "embed_lm_head_cos_v_neg_g": 0.05875484645366669, + "layer_1_v_norm": 0.5717149972915649, + "layer_1_cos_v_neg_g": 0.02637113817036152, + "layer_2_v_norm": 0.47738292813301086, + "layer_2_cos_v_neg_g": 0.033454135060310364, + "layer_3_v_norm": 0.42614322900772095, + "layer_3_cos_v_neg_g": 0.028330104425549507, + "layer_4_v_norm": 0.5543665885925293, + "layer_4_cos_v_neg_g": 0.028918784111738205, + "layer_5_v_norm": 0.5850269198417664, + "layer_5_cos_v_neg_g": 0.03371110558509827, + "layer_6_v_norm": 0.5895346999168396, + "layer_6_cos_v_neg_g": 0.03361774608492851, + "layer_7_v_norm": 0.60000079870224, + "layer_7_cos_v_neg_g": 0.03232013061642647, + "layer_8_v_norm": 0.6004412174224854, + "layer_8_cos_v_neg_g": 0.03339286148548126, + "layer_9_v_norm": 0.5987502932548523, + "layer_9_cos_v_neg_g": 0.03294571116566658, + "layer_10_v_norm": 0.5971854329109192, + "layer_10_cos_v_neg_g": 0.03769275173544884, + "layer_11_v_norm": 0.5921174883842468, + "layer_11_cos_v_neg_g": 0.04502395540475845, + "layer_12_v_norm": 0.6017645597457886, + "layer_12_cos_v_neg_g": 0.07243576645851135, + "block0_q_v_norm": 0.23634953796863556, + "block0_q_cos_v_neg_g": 0.05792305991053581, + "block0_k_v_norm": 0.22806578874588013, + "block0_k_cos_v_neg_g": 0.06997338682413101, + "block0_v_v_norm": 0.15101121366024017, + "block0_v_cos_v_neg_g": 0.038782574236392975, + "block0_o_v_norm": 0.22235454618930817, + "block0_o_cos_v_neg_g": 0.05090736597776413, + "block0_mlp_win_v_norm": 0.2733736038208008, + "block0_mlp_win_cos_v_neg_g": 0.04235316067934036, + "block0_mlp_wout_v_norm": 0.2681211829185486, + "block0_mlp_wout_cos_v_neg_g": 0.06488367915153503, + "block3_q_v_norm": 0.20942890644073486, + "block3_q_cos_v_neg_g": 0.028619591146707535, + "block3_k_v_norm": 0.20413938164710999, + "block3_k_cos_v_neg_g": 0.04030594974756241, + "block3_v_v_norm": 0.1875927597284317, + "block3_v_cos_v_neg_g": 0.027840537950396538, + "block3_o_v_norm": 0.2384939193725586, + "block3_o_cos_v_neg_g": 0.06455040723085403, + "block3_mlp_win_v_norm": 0.26609086990356445, + "block3_mlp_win_cos_v_neg_g": 0.03635517135262489, + "block3_mlp_wout_v_norm": 0.2423810213804245, + "block3_mlp_wout_cos_v_neg_g": 0.10149551182985306, + "block7_q_v_norm": 0.2424864023923874, + "block7_q_cos_v_neg_g": 0.03865302354097366, + "block7_k_v_norm": 0.24697816371917725, + "block7_k_cos_v_neg_g": 0.07564988732337952, + "block7_v_v_norm": 0.22317799925804138, + "block7_v_cos_v_neg_g": 0.030997592955827713, + "block7_o_v_norm": 0.24818290770053864, + "block7_o_cos_v_neg_g": 0.07345114648342133, + "block7_mlp_win_v_norm": 0.26033884286880493, + "block7_mlp_win_cos_v_neg_g": 0.04396592825651169, + "block7_mlp_wout_v_norm": 0.24784916639328003, + "block7_mlp_wout_cos_v_neg_g": 0.12285932153463364, + "block11_q_v_norm": 0.24810540676116943, + "block11_q_cos_v_neg_g": 0.07969103753566742, + "block11_k_v_norm": 0.24932454526424408, + "block11_k_cos_v_neg_g": 0.09635505080223083, + "block11_v_v_norm": 0.24612711369991302, + "block11_v_cos_v_neg_g": 0.05735956132411957, + "block11_o_v_norm": 0.2488916665315628, + "block11_o_cos_v_neg_g": 0.09014005959033966, + "block11_mlp_win_v_norm": 0.2435946762561798, + "block11_mlp_win_cos_v_neg_g": 0.1038408949971199, + "block11_mlp_wout_v_norm": 0.23743578791618347, + "block11_mlp_wout_cos_v_neg_g": 0.07515967637300491, + "embed_lm_head_sharpness": 0.0004657529934775084, + "layer_1_sharpness": 0.012045607902109623, + "layer_2_sharpness": 0.0035329454112797976, + "layer_3_sharpness": 0.0022351141087710857, + "layer_4_sharpness": 0.0010759405558928847, + "layer_5_sharpness": 0.0010405005887150764, + "layer_6_sharpness": 0.0012512708781287074, + "layer_7_sharpness": 0.00115744408685714, + "layer_8_sharpness": 0.000975963135715574, + "layer_9_sharpness": 0.0006436265539377928, + "layer_10_sharpness": 0.00043580628698691726, + "layer_11_sharpness": 0.00046148072578944266, + "layer_12_sharpness": 0.00039326134719885886, + "block0_q_sharpness": 0.0009361312841065228, + "block0_k_sharpness": 0.0009493993711657822, + "block0_v_sharpness": 0.05996125563979149, + "block0_o_sharpness": 0.0011424497934058309, + "block0_mlp_win_sharpness": 0.0010974061442539096, + "block0_mlp_wout_sharpness": 0.0012150286929681897, + "block3_q_sharpness": 5.4761720093665645e-05, + "block3_k_sharpness": 0.0005946309538558125, + "block3_v_sharpness": 0.003516026306897402, + "block3_o_sharpness": 0.00022577974596060812, + "block3_mlp_win_sharpness": 0.00012231012806296349, + "block3_mlp_wout_sharpness": 5.544541636481881e-05, + "block7_q_sharpness": 5.430267992778681e-05, + "block7_k_sharpness": 7.033784640952945e-05, + "block7_v_sharpness": 0.0027269453275948763, + "block7_o_sharpness": 9.168797987513244e-05, + "block7_mlp_win_sharpness": 0.00028969591949135065, + "block7_mlp_wout_sharpness": 8.533561049262062e-05, + "block11_q_sharpness": 3.2828087569214404e-05, + "block11_k_sharpness": 4.527833152678795e-05, + "block11_v_sharpness": 0.00016896569286473095, + "block11_o_sharpness": 4.4333261030260473e-05, + "block11_mlp_win_sharpness": 0.0002512666105758399, + "block11_mlp_wout_sharpness": 0.000501974718645215, + "sum_layer_numerators": 0.007728806482988401, + "block_diag_sharpness": 0.001991002262085181, + "cross_layer_sharpness": 0.005349067132473487 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..f1c284e887547b6a5539920f47b2f78de5d2e4ed --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3709793090820312, + "total_l1_linf_norm": 20240.94140625, + "total_spectral_norm": 2.3709795475006104, + "embed_lm_head_update_fnorm": 1.3386107683181763, + "embed_lm_head_max_l1_linf_norm": 0.36332058906555176, + "embed_lm_head_max_spectral_norm": 0.2049541175365448, + "layer_1_update_fnorm": 0.5685282349586487, + "layer_1_max_l1_linf_norm": 0.4366875886917114, + "layer_1_max_spectral_norm": 0.012045011855661869, + "layer_2_update_fnorm": 0.4604824483394623, + "layer_2_max_l1_linf_norm": 0.4402814507484436, + "layer_2_max_spectral_norm": 0.012040235102176666, + "layer_3_update_fnorm": 0.40746718645095825, + "layer_3_max_l1_linf_norm": 0.6108834743499756, + "layer_3_max_spectral_norm": 0.02256355993449688, + "layer_4_update_fnorm": 0.5461910367012024, + "layer_4_max_l1_linf_norm": 0.42184314131736755, + "layer_4_max_spectral_norm": 0.012043311260640621, + "layer_5_update_fnorm": 0.5853157043457031, + "layer_5_max_l1_linf_norm": 0.4058167338371277, + "layer_5_max_spectral_norm": 0.012046667747199535, + "layer_6_update_fnorm": 0.589257001876831, + "layer_6_max_l1_linf_norm": 0.4121590852737427, + "layer_6_max_spectral_norm": 0.012053197249770164, + "layer_7_update_fnorm": 0.5994583964347839, + "layer_7_max_l1_linf_norm": 0.4105184078216553, + "layer_7_max_spectral_norm": 0.01204642467200756, + "layer_8_update_fnorm": 0.6008884310722351, + "layer_8_max_l1_linf_norm": 0.4135693907737732, + "layer_8_max_spectral_norm": 0.012048342265188694, + "layer_9_update_fnorm": 0.5974819660186768, + "layer_9_max_l1_linf_norm": 0.40914827585220337, + "layer_9_max_spectral_norm": 0.012051373720169067, + "layer_10_update_fnorm": 0.5970580577850342, + "layer_10_max_l1_linf_norm": 0.4129597246646881, + "layer_10_max_spectral_norm": 0.01205163449048996, + "layer_11_update_fnorm": 0.5875459909439087, + "layer_11_max_l1_linf_norm": 0.40888091921806335, + "layer_11_max_spectral_norm": 0.012040817178785801, + "layer_12_update_fnorm": 0.6009435057640076, + "layer_12_max_l1_linf_norm": 0.4051668345928192, + "layer_12_max_spectral_norm": 0.01204534713178873, + "block0_q_update_fnorm": 0.23310145735740662, + "block0_q_max_l1_linf_norm": 0.21160607039928436, + "block0_q_max_spectral_norm": 0.012039807625114918, + "block0_k_update_fnorm": 0.2232716679573059, + "block0_k_max_l1_linf_norm": 0.21705353260040283, + "block0_k_max_spectral_norm": 0.012037709355354309, + "block0_v_update_fnorm": 0.16760914027690887, + "block0_v_max_l1_linf_norm": 0.20227740705013275, + "block0_v_max_spectral_norm": 0.012032059952616692, + "block0_o_update_fnorm": 0.22182655334472656, + "block0_o_max_l1_linf_norm": 0.19167166948318481, + "block0_o_max_spectral_norm": 0.012035599909722805, + "block0_mlp_win_update_fnorm": 0.26591354608535767, + "block0_mlp_win_max_l1_linf_norm": 0.1742306351661682, + "block0_mlp_win_max_spectral_norm": 0.012040962465107441, + "block0_mlp_wout_update_fnorm": 0.2661920189857483, + "block0_mlp_wout_max_l1_linf_norm": 0.4366875886917114, + "block0_mlp_wout_max_spectral_norm": 0.012045011855661869, + "block3_q_update_fnorm": 0.20874683558940887, + "block3_q_max_l1_linf_norm": 0.2111869752407074, + "block3_q_max_spectral_norm": 0.012042236514389515, + "block3_k_update_fnorm": 0.20382961630821228, + "block3_k_max_l1_linf_norm": 0.21235373616218567, + "block3_k_max_spectral_norm": 0.012040188536047935, + "block3_v_update_fnorm": 0.17580805718898773, + "block3_v_max_l1_linf_norm": 0.1910034418106079, + "block3_v_max_spectral_norm": 0.012027540244162083, + "block3_o_update_fnorm": 0.23119376599788666, + "block3_o_max_l1_linf_norm": 0.1946558803319931, + "block3_o_max_spectral_norm": 0.012038212269544601, + "block3_mlp_win_update_fnorm": 0.2649744749069214, + "block3_mlp_win_max_l1_linf_norm": 0.17588874697685242, + "block3_mlp_win_max_spectral_norm": 0.012043311260640621, + "block3_mlp_wout_update_fnorm": 0.2416536509990692, + "block3_mlp_wout_max_l1_linf_norm": 0.40010565519332886, + "block3_mlp_wout_max_spectral_norm": 0.01140262559056282, + "block7_q_update_fnorm": 0.24258297681808472, + "block7_q_max_l1_linf_norm": 0.20970982313156128, + "block7_q_max_spectral_norm": 0.012043270282447338, + "block7_k_update_fnorm": 0.24679435789585114, + "block7_k_max_l1_linf_norm": 0.21253028512001038, + "block7_k_max_spectral_norm": 0.012039807625114918, + "block7_v_update_fnorm": 0.21771548688411713, + "block7_v_max_l1_linf_norm": 0.20736537873744965, + "block7_v_max_spectral_norm": 0.012040314264595509, + "block7_o_update_fnorm": 0.24831198155879974, + "block7_o_max_l1_linf_norm": 0.21241061389446259, + "block7_o_max_spectral_norm": 0.012048342265188694, + "block7_mlp_win_update_fnorm": 0.26654088497161865, + "block7_mlp_win_max_l1_linf_norm": 0.1600589156150818, + "block7_mlp_win_max_spectral_norm": 0.012046050280332565, + "block7_mlp_wout_update_fnorm": 0.24711869657039642, + "block7_mlp_wout_max_l1_linf_norm": 0.4135693907737732, + "block7_mlp_wout_max_spectral_norm": 0.011376811191439629, + "block11_q_update_fnorm": 0.24831010401248932, + "block11_q_max_l1_linf_norm": 0.2148638665676117, + "block11_q_max_spectral_norm": 0.012043544091284275, + "block11_k_update_fnorm": 0.24932120740413666, + "block11_k_max_l1_linf_norm": 0.21448183059692383, + "block11_k_max_spectral_norm": 0.01203879900276661, + "block11_v_update_fnorm": 0.24616506695747375, + "block11_v_max_l1_linf_norm": 0.2059468924999237, + "block11_v_max_spectral_norm": 0.01204534713178873, + "block11_o_update_fnorm": 0.24889683723449707, + "block11_o_max_l1_linf_norm": 0.20961540937423706, + "block11_o_max_spectral_norm": 0.01203909981995821, + "block11_mlp_win_update_fnorm": 0.24189667403697968, + "block11_mlp_win_max_l1_linf_norm": 0.17257101833820343, + "block11_mlp_win_max_spectral_norm": 0.011393113993108273, + "block11_mlp_wout_update_fnorm": 0.2368234246969223, + "block11_mlp_wout_max_l1_linf_norm": 0.3941464424133301, + "block11_mlp_wout_max_spectral_norm": 0.011345155537128448, + "total_sharpness": 0.00547356903553009, + "block_total_sharpness": 0.007190933916717768, + "v_norm_block": 1.9569530487060547, + "v_T_H_v_block": 0.02753886952996254, + "v_norm": 2.3709793090820312, + "ip_v_neg_g_hvp": 0.04736507311463356, + "cos_v_neg_g_hvp": 0.03036215715110302, + "g_hvp_norm": 0.6579574346542358, + "ip_v_neg_g_t": 0.052579957991838455, + "cos_v_neg_g_t": 0.035323213785886765, + "g_t_norm": 0.627815842628479, + "g_norm": 0.6579574346542358, + "hv_norm": 0.7458423376083374, + "cos_v_hv": 0.017400085926055908, + "hg_norm": 34.05084228515625, + "cos_g_hg": 0.6104370355606079, + "v_parallel_norm": 0.005895886104553938, + "v_perp_norm": 2.370972156524658, + "embed_lm_head_v_norm": 1.3386107683181763, + "embed_lm_head_cos_v_neg_g": 0.04824243485927582, + "layer_1_v_norm": 0.5685282349586487, + "layer_1_cos_v_neg_g": 0.01691846176981926, + "layer_2_v_norm": 0.4604824483394623, + "layer_2_cos_v_neg_g": 0.026029201224446297, + "layer_3_v_norm": 0.40746718645095825, + "layer_3_cos_v_neg_g": 0.027058715000748634, + "layer_4_v_norm": 0.5461910367012024, + "layer_4_cos_v_neg_g": 0.029693057760596275, + "layer_5_v_norm": 0.5853157043457031, + "layer_5_cos_v_neg_g": 0.03361472487449646, + "layer_6_v_norm": 0.589257001876831, + "layer_6_cos_v_neg_g": 0.0335950031876564, + "layer_7_v_norm": 0.5994583964347839, + "layer_7_cos_v_neg_g": 0.03367406874895096, + "layer_8_v_norm": 0.6008884310722351, + "layer_8_cos_v_neg_g": 0.03308729827404022, + "layer_9_v_norm": 0.5974819660186768, + "layer_9_cos_v_neg_g": 0.03316006809473038, + "layer_10_v_norm": 0.5970580577850342, + "layer_10_cos_v_neg_g": 0.036538489162921906, + "layer_11_v_norm": 0.5875459313392639, + "layer_11_cos_v_neg_g": 0.04604576155543327, + "layer_12_v_norm": 0.6009435057640076, + "layer_12_cos_v_neg_g": 0.07867898046970367, + "block0_q_v_norm": 0.23310145735740662, + "block0_q_cos_v_neg_g": 0.02186989225447178, + "block0_k_v_norm": 0.2232716679573059, + "block0_k_cos_v_neg_g": 0.01289454847574234, + "block0_v_v_norm": 0.16760914027690887, + "block0_v_cos_v_neg_g": 0.017461402341723442, + "block0_o_v_norm": 0.22182655334472656, + "block0_o_cos_v_neg_g": 0.03881533443927765, + "block0_mlp_win_v_norm": 0.26591354608535767, + "block0_mlp_win_cos_v_neg_g": 0.03463489189743996, + "block0_mlp_wout_v_norm": 0.2661920189857483, + "block0_mlp_wout_cos_v_neg_g": 0.054115116596221924, + "block3_q_v_norm": 0.20874683558940887, + "block3_q_cos_v_neg_g": 0.0288058090955019, + "block3_k_v_norm": 0.20382961630821228, + "block3_k_cos_v_neg_g": 0.05504998937249184, + "block3_v_v_norm": 0.17580805718898773, + "block3_v_cos_v_neg_g": 0.03193780034780502, + "block3_o_v_norm": 0.23119376599788666, + "block3_o_cos_v_neg_g": 0.052105341106653214, + "block3_mlp_win_v_norm": 0.2649744749069214, + "block3_mlp_win_cos_v_neg_g": 0.03540141135454178, + "block3_mlp_wout_v_norm": 0.2416536509990692, + "block3_mlp_wout_cos_v_neg_g": 0.10783951729536057, + "block7_q_v_norm": 0.24258297681808472, + "block7_q_cos_v_neg_g": 0.04243745654821396, + "block7_k_v_norm": 0.24679435789585114, + "block7_k_cos_v_neg_g": 0.08411436527967453, + "block7_v_v_norm": 0.21771548688411713, + "block7_v_cos_v_neg_g": 0.032924991101026535, + "block7_o_v_norm": 0.24831198155879974, + "block7_o_cos_v_neg_g": 0.07822367548942566, + "block7_mlp_win_v_norm": 0.26654088497161865, + "block7_mlp_win_cos_v_neg_g": 0.04190412908792496, + "block7_mlp_wout_v_norm": 0.24711869657039642, + "block7_mlp_wout_cos_v_neg_g": 0.1275082230567932, + "block11_q_v_norm": 0.24831010401248932, + "block11_q_cos_v_neg_g": 0.07510805130004883, + "block11_k_v_norm": 0.24932120740413666, + "block11_k_cos_v_neg_g": 0.09415596723556519, + "block11_v_v_norm": 0.24616506695747375, + "block11_v_cos_v_neg_g": 0.05712464824318886, + "block11_o_v_norm": 0.24889683723449707, + "block11_o_cos_v_neg_g": 0.09033068269491196, + "block11_mlp_win_v_norm": 0.24189667403697968, + "block11_mlp_win_cos_v_neg_g": 0.10715366899967194, + "block11_mlp_wout_v_norm": 0.2368234246969223, + "block11_mlp_wout_cos_v_neg_g": 0.09313558042049408, + "embed_lm_head_sharpness": 0.000368914712453261, + "layer_1_sharpness": 0.006148649845272303, + "layer_2_sharpness": 0.001677398569881916, + "layer_3_sharpness": 0.0033678486943244934, + "layer_4_sharpness": 0.0016084671951830387, + "layer_5_sharpness": 0.001108331955038011, + "layer_6_sharpness": 0.0015002754516899586, + "layer_7_sharpness": 0.0015237020561471581, + "layer_8_sharpness": 0.0013087410479784012, + "layer_9_sharpness": 0.0008675839635543525, + "layer_10_sharpness": 0.0005088203470222652, + "layer_11_sharpness": 0.0005733264260925353, + "layer_12_sharpness": 0.00046771191409789026, + "block0_q_sharpness": 0.0001600032701389864, + "block0_k_sharpness": 0.0001285864709643647, + "block0_v_sharpness": 0.02162213809788227, + "block0_o_sharpness": 0.0015790816396474838, + "block0_mlp_win_sharpness": 0.001067533390596509, + "block0_mlp_wout_sharpness": 0.0009950785897672176, + "block3_q_sharpness": 6.336912338156253e-05, + "block3_k_sharpness": 0.001175608835183084, + "block3_v_sharpness": 0.004468389321118593, + "block3_o_sharpness": 0.0003629992133937776, + "block3_mlp_win_sharpness": 0.00014887146244291216, + "block3_mlp_wout_sharpness": 6.346739246509969e-05, + "block7_q_sharpness": 6.173281144583598e-05, + "block7_k_sharpness": 8.752895519137383e-05, + "block7_v_sharpness": 0.003226344706490636, + "block7_o_sharpness": 0.00010781676974147558, + "block7_mlp_win_sharpness": 0.000447644415544346, + "block7_mlp_wout_sharpness": 9.847588808042929e-05, + "block11_q_sharpness": 3.969591489294544e-05, + "block11_k_sharpness": 3.824480882030912e-05, + "block11_v_sharpness": 0.00020178848353680223, + "block11_o_sharpness": 4.6905461204005405e-05, + "block11_mlp_win_sharpness": 0.0002217097789980471, + "block11_mlp_wout_sharpness": 0.0007131047896109521, + "sum_layer_numerators": 0.0061607312574021664, + "block_diag_sharpness": 0.0016086866291485872, + "cross_layer_sharpness": 0.0055822472875691805 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..2e48efd7f2a3116c7bc1b237d3f5a0582b4f908d --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_4500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3326401710510254, + "total_l1_linf_norm": 19791.595703125, + "total_spectral_norm": 2.3326401710510254, + "embed_lm_head_update_fnorm": 1.3365681171417236, + "embed_lm_head_max_l1_linf_norm": 0.3383038640022278, + "embed_lm_head_max_spectral_norm": 0.20581671595573425, + "layer_1_update_fnorm": 0.551188588142395, + "layer_1_max_l1_linf_norm": 0.42368268966674805, + "layer_1_max_spectral_norm": 0.012039858847856522, + "layer_2_update_fnorm": 0.35869067907333374, + "layer_2_max_l1_linf_norm": 0.421293705701828, + "layer_2_max_spectral_norm": 0.013772976584732533, + "layer_3_update_fnorm": 0.3630863130092621, + "layer_3_max_l1_linf_norm": 0.45767518877983093, + "layer_3_max_spectral_norm": 0.01881822571158409, + "layer_4_update_fnorm": 0.521769106388092, + "layer_4_max_l1_linf_norm": 0.4108601212501526, + "layer_4_max_spectral_norm": 0.012039441615343094, + "layer_5_update_fnorm": 0.5825273990631104, + "layer_5_max_l1_linf_norm": 0.40250009298324585, + "layer_5_max_spectral_norm": 0.012048257514834404, + "layer_6_update_fnorm": 0.5894091725349426, + "layer_6_max_l1_linf_norm": 0.4066470265388489, + "layer_6_max_spectral_norm": 0.012046963907778263, + "layer_7_update_fnorm": 0.5988823175430298, + "layer_7_max_l1_linf_norm": 0.40844789147377014, + "layer_7_max_spectral_norm": 0.012054127641022205, + "layer_8_update_fnorm": 0.6011049747467041, + "layer_8_max_l1_linf_norm": 0.40728577971458435, + "layer_8_max_spectral_norm": 0.012055695988237858, + "layer_9_update_fnorm": 0.5966216325759888, + "layer_9_max_l1_linf_norm": 0.4068279266357422, + "layer_9_max_spectral_norm": 0.012048009783029556, + "layer_10_update_fnorm": 0.5961884260177612, + "layer_10_max_l1_linf_norm": 0.41249021887779236, + "layer_10_max_spectral_norm": 0.012070836499333382, + "layer_11_update_fnorm": 0.5831321477890015, + "layer_11_max_l1_linf_norm": 0.4120422601699829, + "layer_11_max_spectral_norm": 0.012044815346598625, + "layer_12_update_fnorm": 0.6000690460205078, + "layer_12_max_l1_linf_norm": 0.3988398313522339, + "layer_12_max_spectral_norm": 0.012043697759509087, + "block0_q_update_fnorm": 0.23841547966003418, + "block0_q_max_l1_linf_norm": 0.2109011858701706, + "block0_q_max_spectral_norm": 0.012038923799991608, + "block0_k_update_fnorm": 0.22456564009189606, + "block0_k_max_l1_linf_norm": 0.21021199226379395, + "block0_k_max_spectral_norm": 0.012035270221531391, + "block0_v_update_fnorm": 0.13024432957172394, + "block0_v_max_l1_linf_norm": 0.15510031580924988, + "block0_v_max_spectral_norm": 0.01202484779059887, + "block0_o_update_fnorm": 0.21168111264705658, + "block0_o_max_l1_linf_norm": 0.1882920116186142, + "block0_o_max_spectral_norm": 0.012034891173243523, + "block0_mlp_win_update_fnorm": 0.2618072032928467, + "block0_mlp_win_max_l1_linf_norm": 0.18417206406593323, + "block0_mlp_win_max_spectral_norm": 0.012039858847856522, + "block0_mlp_wout_update_fnorm": 0.2570408880710602, + "block0_mlp_wout_max_l1_linf_norm": 0.42368268966674805, + "block0_mlp_wout_max_spectral_norm": 0.012039365246891975, + "block3_q_update_fnorm": 0.18169865012168884, + "block3_q_max_l1_linf_norm": 0.206577330827713, + "block3_q_max_spectral_norm": 0.012030956335365772, + "block3_k_update_fnorm": 0.17402249574661255, + "block3_k_max_l1_linf_norm": 0.20131124556064606, + "block3_k_max_spectral_norm": 0.012031033635139465, + "block3_v_update_fnorm": 0.17850875854492188, + "block3_v_max_l1_linf_norm": 0.18747973442077637, + "block3_v_max_spectral_norm": 0.012030906975269318, + "block3_o_update_fnorm": 0.22479936480522156, + "block3_o_max_l1_linf_norm": 0.19196689128875732, + "block3_o_max_spectral_norm": 0.012035243213176727, + "block3_mlp_win_update_fnorm": 0.26423224806785583, + "block3_mlp_win_max_l1_linf_norm": 0.17218759655952454, + "block3_mlp_win_max_spectral_norm": 0.012039441615343094, + "block3_mlp_wout_update_fnorm": 0.23759663105010986, + "block3_mlp_wout_max_l1_linf_norm": 0.3907557725906372, + "block3_mlp_wout_max_spectral_norm": 0.011393602937459946, + "block7_q_update_fnorm": 0.2426474690437317, + "block7_q_max_l1_linf_norm": 0.20681631565093994, + "block7_q_max_spectral_norm": 0.012039657682180405, + "block7_k_update_fnorm": 0.24700433015823364, + "block7_k_max_l1_linf_norm": 0.20957505702972412, + "block7_k_max_spectral_norm": 0.012038175947964191, + "block7_v_update_fnorm": 0.21258988976478577, + "block7_v_max_l1_linf_norm": 0.21496140956878662, + "block7_v_max_spectral_norm": 0.012035494670271873, + "block7_o_update_fnorm": 0.24808774888515472, + "block7_o_max_l1_linf_norm": 0.20911459624767303, + "block7_o_max_spectral_norm": 0.01204678788781166, + "block7_mlp_win_update_fnorm": 0.27240198850631714, + "block7_mlp_win_max_l1_linf_norm": 0.15768378973007202, + "block7_mlp_win_max_spectral_norm": 0.012055695988237858, + "block7_mlp_wout_update_fnorm": 0.24568423628807068, + "block7_mlp_wout_max_l1_linf_norm": 0.40728577971458435, + "block7_mlp_wout_max_spectral_norm": 0.011365311220288277, + "block11_q_update_fnorm": 0.24762220680713654, + "block11_q_max_l1_linf_norm": 0.21385151147842407, + "block11_q_max_spectral_norm": 0.012042471207678318, + "block11_k_update_fnorm": 0.24907554686069489, + "block11_k_max_l1_linf_norm": 0.21603244543075562, + "block11_k_max_spectral_norm": 0.012043697759509087, + "block11_v_update_fnorm": 0.2457425892353058, + "block11_v_max_l1_linf_norm": 0.2088647186756134, + "block11_v_max_spectral_norm": 0.012040268629789352, + "block11_o_update_fnorm": 0.2488124668598175, + "block11_o_max_l1_linf_norm": 0.2086770236492157, + "block11_o_max_spectral_norm": 0.012040828354656696, + "block11_mlp_win_update_fnorm": 0.2412443608045578, + "block11_mlp_win_max_l1_linf_norm": 0.16462135314941406, + "block11_mlp_win_max_spectral_norm": 0.01138890627771616, + "block11_mlp_wout_update_fnorm": 0.23679304122924805, + "block11_mlp_wout_max_l1_linf_norm": 0.3916589915752411, + "block11_mlp_wout_max_spectral_norm": 0.01135654654353857, + "total_sharpness": 0.018458686769008636, + "block_total_sharpness": 0.02534155547618866, + "v_norm_block": 1.911751627922058, + "v_T_H_v_block": 0.0926181748509407, + "v_norm": 2.3326401710510254, + "ip_v_neg_g_hvp": 0.06688199192285538, + "cos_v_neg_g_hvp": 0.02493705227971077, + "g_hvp_norm": 1.1497842073440552, + "ip_v_neg_g_t": 0.0735660120844841, + "cos_v_neg_g_t": 0.026639776304364204, + "g_t_norm": 1.1838560104370117, + "g_norm": 1.1497842073440552, + "hv_norm": 3.2009222507476807, + "cos_v_hv": 0.013451583683490753, + "hg_norm": 588.1919555664062, + "cos_g_hg": 0.2696572244167328, + "v_parallel_norm": 0.0055525475181639194, + "v_perp_norm": 2.3326334953308105, + "embed_lm_head_v_norm": 1.3365681171417236, + "embed_lm_head_cos_v_neg_g": 0.021002473309636116, + "layer_1_v_norm": 0.551188588142395, + "layer_1_cos_v_neg_g": 0.02017032541334629, + "layer_2_v_norm": 0.35869067907333374, + "layer_2_cos_v_neg_g": 0.05182664096355438, + "layer_3_v_norm": 0.3630863130092621, + "layer_3_cos_v_neg_g": 0.04755866527557373, + "layer_4_v_norm": 0.521769106388092, + "layer_4_cos_v_neg_g": 0.03499186038970947, + "layer_5_v_norm": 0.5825273990631104, + "layer_5_cos_v_neg_g": 0.032322272658348083, + "layer_6_v_norm": 0.5894091129302979, + "layer_6_cos_v_neg_g": 0.032546188682317734, + "layer_7_v_norm": 0.5988823175430298, + "layer_7_cos_v_neg_g": 0.031286340206861496, + "layer_8_v_norm": 0.6011050343513489, + "layer_8_cos_v_neg_g": 0.030477996915578842, + "layer_9_v_norm": 0.5966216325759888, + "layer_9_cos_v_neg_g": 0.03133062273263931, + "layer_10_v_norm": 0.5961884260177612, + "layer_10_cos_v_neg_g": 0.03379824012517929, + "layer_11_v_norm": 0.5831321477890015, + "layer_11_cos_v_neg_g": 0.04335416480898857, + "layer_12_v_norm": 0.6000690460205078, + "layer_12_cos_v_neg_g": 0.07812508940696716, + "block0_q_v_norm": 0.23841547966003418, + "block0_q_cos_v_neg_g": 0.025283083319664, + "block0_k_v_norm": 0.22456564009189606, + "block0_k_cos_v_neg_g": 0.012697186321020126, + "block0_v_v_norm": 0.13024432957172394, + "block0_v_cos_v_neg_g": 0.039353929460048676, + "block0_o_v_norm": 0.21168111264705658, + "block0_o_cos_v_neg_g": 0.04596857354044914, + "block0_mlp_win_v_norm": 0.2618072032928467, + "block0_mlp_win_cos_v_neg_g": 0.035600002855062485, + "block0_mlp_wout_v_norm": 0.2570408880710602, + "block0_mlp_wout_cos_v_neg_g": 0.057631898671388626, + "block3_q_v_norm": 0.18169865012168884, + "block3_q_cos_v_neg_g": 0.02951875887811184, + "block3_k_v_norm": 0.17402249574661255, + "block3_k_cos_v_neg_g": 0.058622799813747406, + "block3_v_v_norm": 0.17850875854492188, + "block3_v_cos_v_neg_g": 0.034609440714120865, + "block3_o_v_norm": 0.22479936480522156, + "block3_o_cos_v_neg_g": 0.04931569844484329, + "block3_mlp_win_v_norm": 0.26423224806785583, + "block3_mlp_win_cos_v_neg_g": 0.03501877188682556, + "block3_mlp_wout_v_norm": 0.23759663105010986, + "block3_mlp_wout_cos_v_neg_g": 0.10782089829444885, + "block7_q_v_norm": 0.2426474690437317, + "block7_q_cos_v_neg_g": 0.03294682130217552, + "block7_k_v_norm": 0.24700433015823364, + "block7_k_cos_v_neg_g": 0.07158737629652023, + "block7_v_v_norm": 0.21258988976478577, + "block7_v_cos_v_neg_g": 0.03484639152884483, + "block7_o_v_norm": 0.24808774888515472, + "block7_o_cos_v_neg_g": 0.07705511897802353, + "block7_mlp_win_v_norm": 0.27240198850631714, + "block7_mlp_win_cos_v_neg_g": 0.03751275688409805, + "block7_mlp_wout_v_norm": 0.24568423628807068, + "block7_mlp_wout_cos_v_neg_g": 0.12499386817216873, + "block11_q_v_norm": 0.24762220680713654, + "block11_q_cos_v_neg_g": 0.07832350581884384, + "block11_k_v_norm": 0.24907554686069489, + "block11_k_cos_v_neg_g": 0.10036836564540863, + "block11_v_v_norm": 0.2457425892353058, + "block11_v_cos_v_neg_g": 0.05816595256328583, + "block11_o_v_norm": 0.2488124668598175, + "block11_o_cos_v_neg_g": 0.09387410432100296, + "block11_mlp_win_v_norm": 0.2412443608045578, + "block11_mlp_win_cos_v_neg_g": 0.10170590132474899, + "block11_mlp_wout_v_norm": 0.23679304122924805, + "block11_mlp_wout_cos_v_neg_g": 0.08939504623413086, + "embed_lm_head_sharpness": 0.0005360162467695773, + "layer_1_sharpness": 0.01943119429051876, + "layer_2_sharpness": 0.081960029900074, + "layer_3_sharpness": 0.057704661041498184, + "layer_4_sharpness": 0.0030879583209753036, + "layer_5_sharpness": 0.0014336917083710432, + "layer_6_sharpness": 0.0017027819994837046, + "layer_7_sharpness": 0.001628661761060357, + "layer_8_sharpness": 0.0014335442101582885, + "layer_9_sharpness": 0.0009084899793379009, + "layer_10_sharpness": 0.0005029861931689084, + "layer_11_sharpness": 0.0005737539613619447, + "layer_12_sharpness": 0.00042931840289384127, + "block0_q_sharpness": 0.0006136717856861651, + "block0_k_sharpness": 0.0032257852144539356, + "block0_v_sharpness": 0.1442422717809677, + "block0_o_sharpness": 0.002179364673793316, + "block0_mlp_win_sharpness": 0.0013445373624563217, + "block0_mlp_wout_sharpness": 0.003420947352424264, + "block3_q_sharpness": 0.0002102657308569178, + "block3_k_sharpness": 0.005519694648683071, + "block3_v_sharpness": 0.005194406025111675, + "block3_o_sharpness": 0.0006839580019004643, + "block3_mlp_win_sharpness": 0.00021817210654262453, + "block3_mlp_wout_sharpness": 0.00010531828593229875, + "block7_q_sharpness": 5.432443867903203e-05, + "block7_k_sharpness": 6.190376734593883e-05, + "block7_v_sharpness": 0.003947535064071417, + "block7_o_sharpness": 0.00010609978926368058, + "block7_mlp_win_sharpness": 0.0005085947923362255, + "block7_mlp_wout_sharpness": 9.441070142202079e-05, + "block11_q_sharpness": 0.00017583328008186072, + "block11_k_sharpness": 6.539698370033875e-05, + "block11_v_sharpness": 0.000222300281166099, + "block11_o_sharpness": 4.485485624172725e-05, + "block11_mlp_win_sharpness": 0.00016211246838793159, + "block11_mlp_wout_sharpness": 0.0003907726495526731, + "sum_layer_numerators": 0.027928270911322196, + "block_diag_sharpness": 0.007641543878874911, + "cross_layer_sharpness": 0.01770001159731375 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..a060eee4a7b561adda3c8f0062923c34d6aff0f7 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.6866806745529175, + "total_l1_linf_norm": 14660.7490234375, + "total_spectral_norm": 1.6866804361343384, + "embed_lm_head_update_fnorm": 0.9752926230430603, + "embed_lm_head_max_l1_linf_norm": 0.25318339467048645, + "embed_lm_head_max_spectral_norm": 0.22993071377277374, + "layer_1_update_fnorm": 0.4245752692222595, + "layer_1_max_l1_linf_norm": 0.3086971640586853, + "layer_1_max_spectral_norm": 0.008606836199760437, + "layer_2_update_fnorm": 0.38607004284858704, + "layer_2_max_l1_linf_norm": 0.3024179935455322, + "layer_2_max_spectral_norm": 0.00859927013516426, + "layer_3_update_fnorm": 0.3655983805656433, + "layer_3_max_l1_linf_norm": 0.3002278804779053, + "layer_3_max_spectral_norm": 0.008604813367128372, + "layer_4_update_fnorm": 0.3636350631713867, + "layer_4_max_l1_linf_norm": 0.30706071853637695, + "layer_4_max_spectral_norm": 0.00860435888171196, + "layer_5_update_fnorm": 0.393616259098053, + "layer_5_max_l1_linf_norm": 0.3131999671459198, + "layer_5_max_spectral_norm": 0.008601613342761993, + "layer_6_update_fnorm": 0.3849884569644928, + "layer_6_max_l1_linf_norm": 0.3179851174354553, + "layer_6_max_spectral_norm": 0.008602776564657688, + "layer_7_update_fnorm": 0.40765807032585144, + "layer_7_max_l1_linf_norm": 0.3163653016090393, + "layer_7_max_spectral_norm": 0.00860220193862915, + "layer_8_update_fnorm": 0.3954113721847534, + "layer_8_max_l1_linf_norm": 0.31764137744903564, + "layer_8_max_spectral_norm": 0.008601999841630459, + "layer_9_update_fnorm": 0.40935850143432617, + "layer_9_max_l1_linf_norm": 0.32136163115501404, + "layer_9_max_spectral_norm": 0.008604156784713268, + "layer_10_update_fnorm": 0.40740102529525757, + "layer_10_max_l1_linf_norm": 0.3193710446357727, + "layer_10_max_spectral_norm": 0.008601422421634197, + "layer_11_update_fnorm": 0.4131288528442383, + "layer_11_max_l1_linf_norm": 0.3211837708950043, + "layer_11_max_spectral_norm": 0.008603386580944061, + "layer_12_update_fnorm": 0.41058221459388733, + "layer_12_max_l1_linf_norm": 0.32646045088768005, + "layer_12_max_spectral_norm": 0.008601553738117218, + "block0_q_update_fnorm": 0.17581897974014282, + "block0_q_max_l1_linf_norm": 0.14814144372940063, + "block0_q_max_spectral_norm": 0.008600710891187191, + "block0_k_update_fnorm": 0.175540030002594, + "block0_k_max_l1_linf_norm": 0.14784207940101624, + "block0_k_max_spectral_norm": 0.008599954657256603, + "block0_v_update_fnorm": 0.1484147608280182, + "block0_v_max_l1_linf_norm": 0.13730277121067047, + "block0_v_max_spectral_norm": 0.008596387691795826, + "block0_o_update_fnorm": 0.15301872789859772, + "block0_o_max_l1_linf_norm": 0.13035421073436737, + "block0_o_max_spectral_norm": 0.008596544153988361, + "block0_mlp_win_update_fnorm": 0.193588525056839, + "block0_mlp_win_max_l1_linf_norm": 0.09750872850418091, + "block0_mlp_win_max_spectral_norm": 0.008606836199760437, + "block0_mlp_wout_update_fnorm": 0.188668355345726, + "block0_mlp_wout_max_l1_linf_norm": 0.3086971640586853, + "block0_mlp_wout_max_spectral_norm": 0.008595590479671955, + "block3_q_update_fnorm": 0.13487356901168823, + "block3_q_max_l1_linf_norm": 0.15209323167800903, + "block3_q_max_spectral_norm": 0.00859644915908575, + "block3_k_update_fnorm": 0.12076684832572937, + "block3_k_max_l1_linf_norm": 0.15599966049194336, + "block3_k_max_spectral_norm": 0.00859464704990387, + "block3_v_update_fnorm": 0.12310253828763962, + "block3_v_max_l1_linf_norm": 0.13796326518058777, + "block3_v_max_spectral_norm": 0.008593197911977768, + "block3_o_update_fnorm": 0.12949225306510925, + "block3_o_max_l1_linf_norm": 0.10954950749874115, + "block3_o_max_spectral_norm": 0.008593409322202206, + "block3_mlp_win_update_fnorm": 0.17962028086185455, + "block3_mlp_win_max_l1_linf_norm": 0.10504093766212463, + "block3_mlp_win_max_spectral_norm": 0.00860435888171196, + "block3_mlp_wout_update_fnorm": 0.18765556812286377, + "block3_mlp_wout_max_l1_linf_norm": 0.30706071853637695, + "block3_mlp_wout_max_spectral_norm": 0.008601034991443157, + "block7_q_update_fnorm": 0.14153480529785156, + "block7_q_max_l1_linf_norm": 0.15358629822731018, + "block7_q_max_spectral_norm": 0.00859528873115778, + "block7_k_update_fnorm": 0.13893334567546844, + "block7_k_max_l1_linf_norm": 0.151719331741333, + "block7_k_max_spectral_norm": 0.00859412644058466, + "block7_v_update_fnorm": 0.14387501776218414, + "block7_v_max_l1_linf_norm": 0.149733304977417, + "block7_v_max_spectral_norm": 0.008594757877290249, + "block7_o_update_fnorm": 0.1491888463497162, + "block7_o_max_l1_linf_norm": 0.12588343024253845, + "block7_o_max_spectral_norm": 0.008595248684287071, + "block7_mlp_win_update_fnorm": 0.1912657916545868, + "block7_mlp_win_max_l1_linf_norm": 0.11308740079402924, + "block7_mlp_win_max_spectral_norm": 0.008601175621151924, + "block7_mlp_wout_update_fnorm": 0.1934993714094162, + "block7_mlp_wout_max_l1_linf_norm": 0.31764137744903564, + "block7_mlp_wout_max_spectral_norm": 0.008601999841630459, + "block11_q_update_fnorm": 0.15912246704101562, + "block11_q_max_l1_linf_norm": 0.14815030992031097, + "block11_q_max_spectral_norm": 0.008599165827035904, + "block11_k_update_fnorm": 0.15913759171962738, + "block11_k_max_l1_linf_norm": 0.14569544792175293, + "block11_k_max_spectral_norm": 0.008597573265433311, + "block11_v_update_fnorm": 0.14362044632434845, + "block11_v_max_l1_linf_norm": 0.1456383913755417, + "block11_v_max_spectral_norm": 0.008594579063355923, + "block11_o_update_fnorm": 0.1504366397857666, + "block11_o_max_l1_linf_norm": 0.1282789558172226, + "block11_o_max_spectral_norm": 0.008594353683292866, + "block11_mlp_win_update_fnorm": 0.1885214000940323, + "block11_mlp_win_max_l1_linf_norm": 0.10374876111745834, + "block11_mlp_win_max_spectral_norm": 0.008599042892456055, + "block11_mlp_wout_update_fnorm": 0.19772237539291382, + "block11_mlp_wout_max_l1_linf_norm": 0.32646045088768005, + "block11_mlp_wout_max_spectral_norm": 0.008601553738117218, + "total_sharpness": 0.03759905695915222, + "block_total_sharpness": 0.052991319447755814, + "v_norm_block": 1.3761159181594849, + "v_T_H_v_block": 0.10034940391778946, + "v_norm": 1.6866806745529175, + "ip_v_neg_g_hvp": 0.09713508188724518, + "cos_v_neg_g_hvp": 0.05861964076757431, + "g_hvp_norm": 0.9824265837669373, + "ip_v_neg_g_t": 0.09759698063135147, + "cos_v_neg_g_t": 0.06257890909910202, + "g_t_norm": 0.9246460795402527, + "g_norm": 0.9824265837669373, + "hv_norm": 1.6159712076187134, + "cos_v_hv": 0.039244260638952255, + "hg_norm": 49.301353454589844, + "cos_g_hg": 0.7457922697067261, + "v_parallel_norm": 0.0041900756768882275, + "v_perp_norm": 1.6866754293441772, + "embed_lm_head_v_norm": 0.9752926230430603, + "embed_lm_head_cos_v_neg_g": 0.09257175773382187, + "layer_1_v_norm": 0.4245752692222595, + "layer_1_cos_v_neg_g": 0.06028647720813751, + "layer_2_v_norm": 0.38607004284858704, + "layer_2_cos_v_neg_g": 0.06441844254732132, + "layer_3_v_norm": 0.3655983805656433, + "layer_3_cos_v_neg_g": 0.057656534016132355, + "layer_4_v_norm": 0.3636350631713867, + "layer_4_cos_v_neg_g": 0.05998535826802254, + "layer_5_v_norm": 0.393616259098053, + "layer_5_cos_v_neg_g": 0.06628836691379547, + "layer_6_v_norm": 0.3849884867668152, + "layer_6_cos_v_neg_g": 0.0776422843337059, + "layer_7_v_norm": 0.40765807032585144, + "layer_7_cos_v_neg_g": 0.08110567927360535, + "layer_8_v_norm": 0.3954113721847534, + "layer_8_cos_v_neg_g": 0.08469084650278091, + "layer_9_v_norm": 0.40935850143432617, + "layer_9_cos_v_neg_g": 0.08084622770547867, + "layer_10_v_norm": 0.40740102529525757, + "layer_10_cos_v_neg_g": 0.08148879557847977, + "layer_11_v_norm": 0.4131288528442383, + "layer_11_cos_v_neg_g": 0.07616684585809708, + "layer_12_v_norm": 0.41058221459388733, + "layer_12_cos_v_neg_g": 0.0660557672381401, + "block0_q_v_norm": 0.17581897974014282, + "block0_q_cos_v_neg_g": 0.10441546887159348, + "block0_k_v_norm": 0.175540030002594, + "block0_k_cos_v_neg_g": 0.09560776501893997, + "block0_v_v_norm": 0.1484147608280182, + "block0_v_cos_v_neg_g": 0.045481253415346146, + "block0_o_v_norm": 0.15301872789859772, + "block0_o_cos_v_neg_g": 0.0823483094573021, + "block0_mlp_win_v_norm": 0.193588525056839, + "block0_mlp_win_cos_v_neg_g": 0.09584075957536697, + "block0_mlp_wout_v_norm": 0.188668355345726, + "block0_mlp_wout_cos_v_neg_g": 0.09493184834718704, + "block3_q_v_norm": 0.13487356901168823, + "block3_q_cos_v_neg_g": 0.06348461657762527, + "block3_k_v_norm": 0.12076684832572937, + "block3_k_cos_v_neg_g": 0.06890271604061127, + "block3_v_v_norm": 0.12310253828763962, + "block3_v_cos_v_neg_g": 0.052171118557453156, + "block3_o_v_norm": 0.12949225306510925, + "block3_o_cos_v_neg_g": 0.06577602028846741, + "block3_mlp_win_v_norm": 0.17962028086185455, + "block3_mlp_win_cos_v_neg_g": 0.07204004377126694, + "block3_mlp_wout_v_norm": 0.18765556812286377, + "block3_mlp_wout_cos_v_neg_g": 0.08242723345756531, + "block7_q_v_norm": 0.14153480529785156, + "block7_q_cos_v_neg_g": 0.08980730921030045, + "block7_k_v_norm": 0.13893334567546844, + "block7_k_cos_v_neg_g": 0.09267669916152954, + "block7_v_v_norm": 0.14387501776218414, + "block7_v_cos_v_neg_g": 0.07683909684419632, + "block7_o_v_norm": 0.1491888463497162, + "block7_o_cos_v_neg_g": 0.08355976641178131, + "block7_mlp_win_v_norm": 0.1912657916545868, + "block7_mlp_win_cos_v_neg_g": 0.09812894463539124, + "block7_mlp_wout_v_norm": 0.1934993714094162, + "block7_mlp_wout_cos_v_neg_g": 0.09829927980899811, + "block11_q_v_norm": 0.15912246704101562, + "block11_q_cos_v_neg_g": 0.10001823306083679, + "block11_k_v_norm": 0.15913759171962738, + "block11_k_cos_v_neg_g": 0.10189532488584518, + "block11_v_v_norm": 0.14362044632434845, + "block11_v_cos_v_neg_g": 0.08959558606147766, + "block11_o_v_norm": 0.1504366397857666, + "block11_o_cos_v_neg_g": 0.0854596346616745, + "block11_mlp_win_v_norm": 0.1885214000940323, + "block11_mlp_win_cos_v_neg_g": 0.08398821204900742, + "block11_mlp_wout_v_norm": 0.19772237539291382, + "block11_mlp_wout_cos_v_neg_g": 0.07331568002700806, + "embed_lm_head_sharpness": 0.0007947832345962524, + "layer_1_sharpness": 0.027383940294384956, + "layer_2_sharpness": 0.01556574460119009, + "layer_3_sharpness": 0.014752116985619068, + "layer_4_sharpness": 0.009895951487123966, + "layer_5_sharpness": 0.008368514478206635, + "layer_6_sharpness": 0.006700989790260792, + "layer_7_sharpness": 0.00493606785312295, + "layer_8_sharpness": 0.0034294864162802696, + "layer_9_sharpness": 0.0024552312679588795, + "layer_10_sharpness": 0.0020415037870407104, + "layer_11_sharpness": 0.0018808129243552685, + "layer_12_sharpness": 0.0018433124059811234, + "block0_q_sharpness": 0.0013558752834796906, + "block0_k_sharpness": 0.001711169839836657, + "block0_v_sharpness": 0.0069901710376143456, + "block0_o_sharpness": 0.00875786878168583, + "block0_mlp_win_sharpness": 0.005250770598649979, + "block0_mlp_wout_sharpness": 0.0207535233348608, + "block3_q_sharpness": 0.00028102457872591913, + "block3_k_sharpness": 0.0073773059993982315, + "block3_v_sharpness": 0.004508539102971554, + "block3_o_sharpness": 0.006547422613948584, + "block3_mlp_win_sharpness": 0.0011106153251603246, + "block3_mlp_wout_sharpness": 0.002914529526606202, + "block7_q_sharpness": 0.0003447450289968401, + "block7_k_sharpness": 0.0006613811710849404, + "block7_v_sharpness": 0.001669186633080244, + "block7_o_sharpness": 0.0012495493283495307, + "block7_mlp_win_sharpness": 0.0005563167505897582, + "block7_mlp_wout_sharpness": 0.0020152167417109013, + "block11_q_sharpness": 7.117052155081183e-05, + "block11_k_sharpness": 9.398017573403195e-05, + "block11_v_sharpness": 0.0006586051313206553, + "block11_o_sharpness": 0.00018428120529279113, + "block11_mlp_win_sharpness": 0.0003668077406473458, + "block11_mlp_wout_sharpness": 0.0019267960451543331, + "sum_layer_numerators": 0.015565050296508518, + "block_diag_sharpness": 0.008219407101132179, + "cross_layer_sharpness": 0.04477191234662364 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..eae75814733529eba0185cc86266e8cecffd0ca9 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3542675971984863, + "total_l1_linf_norm": 20059.51171875, + "total_spectral_norm": 2.3542675971984863, + "embed_lm_head_update_fnorm": 1.3419034481048584, + "embed_lm_head_max_l1_linf_norm": 0.33799487352371216, + "embed_lm_head_max_spectral_norm": 0.20054101943969727, + "layer_1_update_fnorm": 0.5512250065803528, + "layer_1_max_l1_linf_norm": 0.42758235335350037, + "layer_1_max_spectral_norm": 0.012043178081512451, + "layer_2_update_fnorm": 0.40839147567749023, + "layer_2_max_l1_linf_norm": 0.37644293904304504, + "layer_2_max_spectral_norm": 0.012033438310027122, + "layer_3_update_fnorm": 0.42177319526672363, + "layer_3_max_l1_linf_norm": 0.46360528469085693, + "layer_3_max_spectral_norm": 0.019071808084845543, + "layer_4_update_fnorm": 0.5298452973365784, + "layer_4_max_l1_linf_norm": 0.4008173942565918, + "layer_4_max_spectral_norm": 0.012042566202580929, + "layer_5_update_fnorm": 0.5823493003845215, + "layer_5_max_l1_linf_norm": 0.404258668422699, + "layer_5_max_spectral_norm": 0.012043873779475689, + "layer_6_update_fnorm": 0.5900379419326782, + "layer_6_max_l1_linf_norm": 0.40701788663864136, + "layer_6_max_spectral_norm": 0.0120475422590971, + "layer_7_update_fnorm": 0.5995032787322998, + "layer_7_max_l1_linf_norm": 0.40689176321029663, + "layer_7_max_spectral_norm": 0.012050827965140343, + "layer_8_update_fnorm": 0.5998950600624084, + "layer_8_max_l1_linf_norm": 0.4054872989654541, + "layer_8_max_spectral_norm": 0.012056203559041023, + "layer_9_update_fnorm": 0.5959482192993164, + "layer_9_max_l1_linf_norm": 0.40565305948257446, + "layer_9_max_spectral_norm": 0.012047315947711468, + "layer_10_update_fnorm": 0.5961004495620728, + "layer_10_max_l1_linf_norm": 0.40953975915908813, + "layer_10_max_spectral_norm": 0.012057645246386528, + "layer_11_update_fnorm": 0.579879105091095, + "layer_11_max_l1_linf_norm": 0.41737812757492065, + "layer_11_max_spectral_norm": 0.012044842354953289, + "layer_12_update_fnorm": 0.5994218587875366, + "layer_12_max_l1_linf_norm": 0.41740432381629944, + "layer_12_max_spectral_norm": 0.012044384144246578, + "block0_q_update_fnorm": 0.24305716156959534, + "block0_q_max_l1_linf_norm": 0.20910581946372986, + "block0_q_max_spectral_norm": 0.012043178081512451, + "block0_k_update_fnorm": 0.23493565618991852, + "block0_k_max_l1_linf_norm": 0.2059105783700943, + "block0_k_max_spectral_norm": 0.01203971728682518, + "block0_v_update_fnorm": 0.12725284695625305, + "block0_v_max_l1_linf_norm": 0.14143911004066467, + "block0_v_max_spectral_norm": 0.012024822644889355, + "block0_o_update_fnorm": 0.20163966715335846, + "block0_o_max_l1_linf_norm": 0.17726555466651917, + "block0_o_max_spectral_norm": 0.012030736543238163, + "block0_mlp_win_update_fnorm": 0.2548616826534271, + "block0_mlp_win_max_l1_linf_norm": 0.18318209052085876, + "block0_mlp_win_max_spectral_norm": 0.01203713659197092, + "block0_mlp_wout_update_fnorm": 0.2600823640823364, + "block0_mlp_wout_max_l1_linf_norm": 0.42758235335350037, + "block0_mlp_wout_max_spectral_norm": 0.0120386416092515, + "block3_q_update_fnorm": 0.1935018002986908, + "block3_q_max_l1_linf_norm": 0.20569491386413574, + "block3_q_max_spectral_norm": 0.01203621830791235, + "block3_k_update_fnorm": 0.19771717488765717, + "block3_k_max_l1_linf_norm": 0.2092239260673523, + "block3_k_max_spectral_norm": 0.01203857734799385, + "block3_v_update_fnorm": 0.16881762444972992, + "block3_v_max_l1_linf_norm": 0.18032673001289368, + "block3_v_max_spectral_norm": 0.012028155848383904, + "block3_o_update_fnorm": 0.22068022191524506, + "block3_o_max_l1_linf_norm": 0.18652023375034332, + "block3_o_max_spectral_norm": 0.01203675102442503, + "block3_mlp_win_update_fnorm": 0.2630237340927124, + "block3_mlp_win_max_l1_linf_norm": 0.17122013866901398, + "block3_mlp_win_max_spectral_norm": 0.012042566202580929, + "block3_mlp_wout_update_fnorm": 0.23991313576698303, + "block3_mlp_wout_max_l1_linf_norm": 0.3989992141723633, + "block3_mlp_wout_max_spectral_norm": 0.0114009203389287, + "block7_q_update_fnorm": 0.24249570071697235, + "block7_q_max_l1_linf_norm": 0.20659571886062622, + "block7_q_max_spectral_norm": 0.012043234892189503, + "block7_k_update_fnorm": 0.2469981163740158, + "block7_k_max_l1_linf_norm": 0.21055081486701965, + "block7_k_max_spectral_norm": 0.012042353861033916, + "block7_v_update_fnorm": 0.20815080404281616, + "block7_v_max_l1_linf_norm": 0.21108631789684296, + "block7_v_max_spectral_norm": 0.012035508640110493, + "block7_o_update_fnorm": 0.24820445477962494, + "block7_o_max_l1_linf_norm": 0.20656536519527435, + "block7_o_max_spectral_norm": 0.012043997645378113, + "block7_mlp_win_update_fnorm": 0.27380478382110596, + "block7_mlp_win_max_l1_linf_norm": 0.16018737852573395, + "block7_mlp_win_max_spectral_norm": 0.012056203559041023, + "block7_mlp_wout_update_fnorm": 0.24502110481262207, + "block7_mlp_wout_max_l1_linf_norm": 0.4054872989654541, + "block7_mlp_wout_max_spectral_norm": 0.011371767148375511, + "block11_q_update_fnorm": 0.24756135046482086, + "block11_q_max_l1_linf_norm": 0.2138729989528656, + "block11_q_max_spectral_norm": 0.01203969307243824, + "block11_k_update_fnorm": 0.24989855289459229, + "block11_k_max_l1_linf_norm": 0.21434354782104492, + "block11_k_max_spectral_norm": 0.012036077678203583, + "block11_v_update_fnorm": 0.2457025796175003, + "block11_v_max_l1_linf_norm": 0.20745855569839478, + "block11_v_max_spectral_norm": 0.012044384144246578, + "block11_o_update_fnorm": 0.2483680248260498, + "block11_o_max_l1_linf_norm": 0.21033644676208496, + "block11_o_max_spectral_norm": 0.012041409499943256, + "block11_mlp_win_update_fnorm": 0.23905262351036072, + "block11_mlp_win_max_l1_linf_norm": 0.16534465551376343, + "block11_mlp_win_max_spectral_norm": 0.011394031345844269, + "block11_mlp_wout_update_fnorm": 0.2370615452528, + "block11_mlp_wout_max_l1_linf_norm": 0.39211517572402954, + "block11_mlp_wout_max_spectral_norm": 0.011363240890204906, + "total_sharpness": 0.0077293552458286285, + "block_total_sharpness": 0.010315334424376488, + "v_norm_block": 1.934391736984253, + "v_T_H_v_block": 0.03859865292906761, + "v_norm": 2.3542675971984863, + "ip_v_neg_g_hvp": 0.048289746046066284, + "cos_v_neg_g_hvp": 0.025974499061703682, + "g_hvp_norm": 0.7896813750267029, + "ip_v_neg_g_t": 0.05615834891796112, + "cos_v_neg_g_t": 0.03024226240813732, + "g_t_norm": 0.7887588143348694, + "g_norm": 0.7896813750267029, + "hv_norm": 1.7265108823776245, + "cos_v_hv": 0.010539737530052662, + "hg_norm": 666.4644775390625, + "cos_g_hg": 0.09701836109161377, + "v_parallel_norm": 0.006752686109393835, + "v_perp_norm": 2.3542580604553223, + "embed_lm_head_v_norm": 1.3419034481048584, + "embed_lm_head_cos_v_neg_g": 0.03220697119832039, + "layer_1_v_norm": 0.5512250065803528, + "layer_1_cos_v_neg_g": 0.01549351867288351, + "layer_2_v_norm": 0.40839147567749023, + "layer_2_cos_v_neg_g": 0.02712395042181015, + "layer_3_v_norm": 0.421773225069046, + "layer_3_cos_v_neg_g": 0.0368024967610836, + "layer_4_v_norm": 0.5298452973365784, + "layer_4_cos_v_neg_g": 0.028238091617822647, + "layer_5_v_norm": 0.5823493003845215, + "layer_5_cos_v_neg_g": 0.028838710859417915, + "layer_6_v_norm": 0.5900379419326782, + "layer_6_cos_v_neg_g": 0.031064629554748535, + "layer_7_v_norm": 0.5995032787322998, + "layer_7_cos_v_neg_g": 0.029503239318728447, + "layer_8_v_norm": 0.5998950600624084, + "layer_8_cos_v_neg_g": 0.030752597376704216, + "layer_9_v_norm": 0.5959482192993164, + "layer_9_cos_v_neg_g": 0.030889013782143593, + "layer_10_v_norm": 0.5961004495620728, + "layer_10_cos_v_neg_g": 0.03453659638762474, + "layer_11_v_norm": 0.579879105091095, + "layer_11_cos_v_neg_g": 0.04360722005367279, + "layer_12_v_norm": 0.5994218587875366, + "layer_12_cos_v_neg_g": 0.0735057145357132, + "block0_q_v_norm": 0.24305716156959534, + "block0_q_cos_v_neg_g": 0.046465836465358734, + "block0_k_v_norm": 0.23493565618991852, + "block0_k_cos_v_neg_g": 0.03762800991535187, + "block0_v_v_norm": 0.12725284695625305, + "block0_v_cos_v_neg_g": 0.028277108445763588, + "block0_o_v_norm": 0.20163966715335846, + "block0_o_cos_v_neg_g": 0.03944767266511917, + "block0_mlp_win_v_norm": 0.2548616826534271, + "block0_mlp_win_cos_v_neg_g": 0.031645312905311584, + "block0_mlp_wout_v_norm": 0.2600823640823364, + "block0_mlp_wout_cos_v_neg_g": 0.05076367035508156, + "block3_q_v_norm": 0.1935018002986908, + "block3_q_cos_v_neg_g": 0.025708181783556938, + "block3_k_v_norm": 0.19771717488765717, + "block3_k_cos_v_neg_g": 0.06307411938905716, + "block3_v_v_norm": 0.16881762444972992, + "block3_v_cos_v_neg_g": 0.029523378238081932, + "block3_o_v_norm": 0.22068022191524506, + "block3_o_cos_v_neg_g": 0.03935116529464722, + "block3_mlp_win_v_norm": 0.2630237340927124, + "block3_mlp_win_cos_v_neg_g": 0.030801065266132355, + "block3_mlp_wout_v_norm": 0.23991313576698303, + "block3_mlp_wout_cos_v_neg_g": 0.10588410496711731, + "block7_q_v_norm": 0.24249570071697235, + "block7_q_cos_v_neg_g": 0.033181723207235336, + "block7_k_v_norm": 0.2469981163740158, + "block7_k_cos_v_neg_g": 0.07416441291570663, + "block7_v_v_norm": 0.20815080404281616, + "block7_v_cos_v_neg_g": 0.03127492219209671, + "block7_o_v_norm": 0.24820445477962494, + "block7_o_cos_v_neg_g": 0.08141446858644485, + "block7_mlp_win_v_norm": 0.27380478382110596, + "block7_mlp_win_cos_v_neg_g": 0.03868890553712845, + "block7_mlp_wout_v_norm": 0.24502110481262207, + "block7_mlp_wout_cos_v_neg_g": 0.12779027223587036, + "block11_q_v_norm": 0.24756135046482086, + "block11_q_cos_v_neg_g": 0.08039586991071701, + "block11_k_v_norm": 0.24989855289459229, + "block11_k_cos_v_neg_g": 0.10201992839574814, + "block11_v_v_norm": 0.2457025796175003, + "block11_v_cos_v_neg_g": 0.05720391497015953, + "block11_o_v_norm": 0.2483680248260498, + "block11_o_cos_v_neg_g": 0.0875595360994339, + "block11_mlp_win_v_norm": 0.23905262351036072, + "block11_mlp_win_cos_v_neg_g": 0.1002015471458435, + "block11_mlp_wout_v_norm": 0.2370615452528, + "block11_mlp_wout_cos_v_neg_g": 0.08061648905277252, + "embed_lm_head_sharpness": 0.00045552305527962744, + "layer_1_sharpness": 0.011692606844007969, + "layer_2_sharpness": 0.012497407384216785, + "layer_3_sharpness": 0.009901466779410839, + "layer_4_sharpness": 0.001756052952259779, + "layer_5_sharpness": 0.0009429446654394269, + "layer_6_sharpness": 0.0012049791403114796, + "layer_7_sharpness": 0.0012729204026982188, + "layer_8_sharpness": 0.001153741148300469, + "layer_9_sharpness": 0.0008297818130813539, + "layer_10_sharpness": 0.00047086464473977685, + "layer_11_sharpness": 0.0005690424586646259, + "layer_12_sharpness": 0.0004375312419142574, + "block0_q_sharpness": 0.00039457998354919255, + "block0_k_sharpness": 0.00022711220663040876, + "block0_v_sharpness": 0.049211956560611725, + "block0_o_sharpness": 0.014709225855767727, + "block0_mlp_win_sharpness": 0.0013046720996499062, + "block0_mlp_wout_sharpness": 0.001852242974564433, + "block3_q_sharpness": 7.500288484152406e-05, + "block3_k_sharpness": 0.0021901275031268597, + "block3_v_sharpness": 0.004950013943016529, + "block3_o_sharpness": 0.00043508916860446334, + "block3_mlp_win_sharpness": 0.000127714840346016, + "block3_mlp_wout_sharpness": 6.633520388277248e-05, + "block7_q_sharpness": 6.978134479140863e-05, + "block7_k_sharpness": 7.052025466691703e-05, + "block7_v_sharpness": 0.0032075163908302784, + "block7_o_sharpness": 9.805901208892465e-05, + "block7_mlp_win_sharpness": 0.00039030847256071866, + "block7_mlp_wout_sharpness": 8.761320350458845e-05, + "block11_q_sharpness": 5.448602314572781e-05, + "block11_k_sharpness": 6.914397818036377e-05, + "block11_v_sharpness": 0.00019256102677900344, + "block11_o_sharpness": 3.6819761589867994e-05, + "block11_mlp_win_sharpness": 0.00023878684442024678, + "block11_mlp_wout_sharpness": 0.0005743905203416944, + "sum_layer_numerators": 0.010314087772482513, + "block_diag_sharpness": 0.0027563982541522806, + "cross_layer_sharpness": 0.007558936170224207 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..6b9b54ba0de1c4594c539c044342625f304cacc2 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_5500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3187530040740967, + "total_l1_linf_norm": 19675.123046875, + "total_spectral_norm": 2.3187530040740967, + "embed_lm_head_update_fnorm": 1.3461315631866455, + "embed_lm_head_max_l1_linf_norm": 0.379180371761322, + "embed_lm_head_max_spectral_norm": 0.20230288803577423, + "layer_1_update_fnorm": 0.5203242301940918, + "layer_1_max_l1_linf_norm": 0.39067453145980835, + "layer_1_max_spectral_norm": 0.012041403912007809, + "layer_2_update_fnorm": 0.35199597477912903, + "layer_2_max_l1_linf_norm": 0.4385974407196045, + "layer_2_max_spectral_norm": 0.01775587722659111, + "layer_3_update_fnorm": 0.4003979563713074, + "layer_3_max_l1_linf_norm": 0.43052828311920166, + "layer_3_max_spectral_norm": 0.015195559710264206, + "layer_4_update_fnorm": 0.4867278039455414, + "layer_4_max_l1_linf_norm": 0.42790332436561584, + "layer_4_max_spectral_norm": 0.016280503943562508, + "layer_5_update_fnorm": 0.573208212852478, + "layer_5_max_l1_linf_norm": 0.4013664722442627, + "layer_5_max_spectral_norm": 0.012042186222970486, + "layer_6_update_fnorm": 0.5705405473709106, + "layer_6_max_l1_linf_norm": 0.40520697832107544, + "layer_6_max_spectral_norm": 0.012045792303979397, + "layer_7_update_fnorm": 0.5959937572479248, + "layer_7_max_l1_linf_norm": 0.40548306703567505, + "layer_7_max_spectral_norm": 0.012045582756400108, + "layer_8_update_fnorm": 0.5996410846710205, + "layer_8_max_l1_linf_norm": 0.40636342763900757, + "layer_8_max_spectral_norm": 0.012051206082105637, + "layer_9_update_fnorm": 0.5961945056915283, + "layer_9_max_l1_linf_norm": 0.4074001610279083, + "layer_9_max_spectral_norm": 0.01205352135002613, + "layer_10_update_fnorm": 0.5958397388458252, + "layer_10_max_l1_linf_norm": 0.413336843252182, + "layer_10_max_spectral_norm": 0.012052981182932854, + "layer_11_update_fnorm": 0.5783216953277588, + "layer_11_max_l1_linf_norm": 0.4164164364337921, + "layer_11_max_spectral_norm": 0.01204605307430029, + "layer_12_update_fnorm": 0.5990771055221558, + "layer_12_max_l1_linf_norm": 0.4194600284099579, + "layer_12_max_spectral_norm": 0.012046552263200283, + "block0_q_update_fnorm": 0.2452743500471115, + "block0_q_max_l1_linf_norm": 0.2082553207874298, + "block0_q_max_spectral_norm": 0.01203952357172966, + "block0_k_update_fnorm": 0.24345873296260834, + "block0_k_max_l1_linf_norm": 0.20880632102489471, + "block0_k_max_spectral_norm": 0.012041403912007809, + "block0_v_update_fnorm": 0.10854385048151016, + "block0_v_max_l1_linf_norm": 0.129233255982399, + "block0_v_max_spectral_norm": 0.012026340700685978, + "block0_o_update_fnorm": 0.17819052934646606, + "block0_o_max_l1_linf_norm": 0.16826340556144714, + "block0_o_max_spectral_norm": 0.01203041523694992, + "block0_mlp_win_update_fnorm": 0.22420534491539001, + "block0_mlp_win_max_l1_linf_norm": 0.17660382390022278, + "block0_mlp_win_max_spectral_norm": 0.012031594291329384, + "block0_mlp_wout_update_fnorm": 0.2394041270017624, + "block0_mlp_wout_max_l1_linf_norm": 0.39067453145980835, + "block0_mlp_wout_max_spectral_norm": 0.012033256702125072, + "block3_q_update_fnorm": 0.14236919581890106, + "block3_q_max_l1_linf_norm": 0.18379738926887512, + "block3_q_max_spectral_norm": 0.01202580239623785, + "block3_k_update_fnorm": 0.15866319835186005, + "block3_k_max_l1_linf_norm": 0.19458623230457306, + "block3_k_max_spectral_norm": 0.012028810568153858, + "block3_v_update_fnorm": 0.1532628983259201, + "block3_v_max_l1_linf_norm": 0.16691817343235016, + "block3_v_max_spectral_norm": 0.012026840820908546, + "block3_o_update_fnorm": 0.21058841049671173, + "block3_o_max_l1_linf_norm": 0.17741000652313232, + "block3_o_max_spectral_norm": 0.012033636681735516, + "block3_mlp_win_update_fnorm": 0.25849276781082153, + "block3_mlp_win_max_l1_linf_norm": 0.17350620031356812, + "block3_mlp_win_max_spectral_norm": 0.01204276829957962, + "block3_mlp_wout_update_fnorm": 0.23733434081077576, + "block3_mlp_wout_max_l1_linf_norm": 0.3933477997779846, + "block3_mlp_wout_max_spectral_norm": 0.011370647698640823, + "block7_q_update_fnorm": 0.24069282412528992, + "block7_q_max_l1_linf_norm": 0.20574289560317993, + "block7_q_max_spectral_norm": 0.01204562745988369, + "block7_k_update_fnorm": 0.24661873281002045, + "block7_k_max_l1_linf_norm": 0.212936669588089, + "block7_k_max_spectral_norm": 0.012039809487760067, + "block7_v_update_fnorm": 0.20409823954105377, + "block7_v_max_l1_linf_norm": 0.21175210177898407, + "block7_v_max_spectral_norm": 0.012034759856760502, + "block7_o_update_fnorm": 0.24848738312721252, + "block7_o_max_l1_linf_norm": 0.20855405926704407, + "block7_o_max_spectral_norm": 0.012051206082105637, + "block7_mlp_win_update_fnorm": 0.27804574370384216, + "block7_mlp_win_max_l1_linf_norm": 0.16250059008598328, + "block7_mlp_win_max_spectral_norm": 0.012048297561705112, + "block7_mlp_wout_update_fnorm": 0.24487638473510742, + "block7_mlp_wout_max_l1_linf_norm": 0.40636342763900757, + "block7_mlp_wout_max_spectral_norm": 0.011359404772520065, + "block11_q_update_fnorm": 0.24731889367103577, + "block11_q_max_l1_linf_norm": 0.21505458652973175, + "block11_q_max_spectral_norm": 0.012040333822369576, + "block11_k_update_fnorm": 0.2488982081413269, + "block11_k_max_l1_linf_norm": 0.21335116028785706, + "block11_k_max_spectral_norm": 0.012036053463816643, + "block11_v_update_fnorm": 0.24545465409755707, + "block11_v_max_l1_linf_norm": 0.20660966634750366, + "block11_v_max_spectral_norm": 0.012045372277498245, + "block11_o_update_fnorm": 0.24818481504917145, + "block11_o_max_l1_linf_norm": 0.21031364798545837, + "block11_o_max_spectral_norm": 0.012046552263200283, + "block11_mlp_win_update_fnorm": 0.2341965287923813, + "block11_mlp_win_max_l1_linf_norm": 0.17774604260921478, + "block11_mlp_win_max_spectral_norm": 0.011351215653121471, + "block11_mlp_wout_update_fnorm": 0.24269990622997284, + "block11_mlp_wout_max_l1_linf_norm": 0.41617465019226074, + "block11_mlp_wout_max_spectral_norm": 0.011977559886872768, + "total_sharpness": 0.010599653236567974, + "block_total_sharpness": 0.013432052917778492, + "v_norm_block": 1.888000249862671, + "v_T_H_v_block": 0.04787915572524071, + "v_norm": 2.3187530040740967, + "ip_v_neg_g_hvp": 0.0512721948325634, + "cos_v_neg_g_hvp": 0.019415978342294693, + "g_hvp_norm": 1.1388541460037231, + "ip_v_neg_g_t": 0.05918869376182556, + "cos_v_neg_g_t": 0.01977350190281868, + "g_t_norm": 1.2909239530563354, + "g_norm": 1.1388541460037231, + "hv_norm": 2.808229446411133, + "cos_v_hv": 0.008752125315368176, + "hg_norm": 2142.00244140625, + "cos_g_hg": 0.1854015439748764, + "v_parallel_norm": 0.007976164110004902, + "v_perp_norm": 2.318739414215088, + "embed_lm_head_v_norm": 1.3461315631866455, + "embed_lm_head_cos_v_neg_g": 0.01358799822628498, + "layer_1_v_norm": 0.5203242301940918, + "layer_1_cos_v_neg_g": 0.020721321925520897, + "layer_2_v_norm": 0.35199597477912903, + "layer_2_cos_v_neg_g": 0.033416811376810074, + "layer_3_v_norm": 0.40039798617362976, + "layer_3_cos_v_neg_g": 0.030987322330474854, + "layer_4_v_norm": 0.4867278039455414, + "layer_4_cos_v_neg_g": 0.02717588283121586, + "layer_5_v_norm": 0.573208212852478, + "layer_5_cos_v_neg_g": 0.028707079589366913, + "layer_6_v_norm": 0.5705404877662659, + "layer_6_cos_v_neg_g": 0.02874397300183773, + "layer_7_v_norm": 0.5959937572479248, + "layer_7_cos_v_neg_g": 0.028034502640366554, + "layer_8_v_norm": 0.5996410846710205, + "layer_8_cos_v_neg_g": 0.02574995532631874, + "layer_9_v_norm": 0.5961945056915283, + "layer_9_cos_v_neg_g": 0.027408065274357796, + "layer_10_v_norm": 0.5958397388458252, + "layer_10_cos_v_neg_g": 0.031335219740867615, + "layer_11_v_norm": 0.5783216953277588, + "layer_11_cos_v_neg_g": 0.040776822715997696, + "layer_12_v_norm": 0.5990771055221558, + "layer_12_cos_v_neg_g": 0.075995072722435, + "block0_q_v_norm": 0.2452743500471115, + "block0_q_cos_v_neg_g": 0.04555423930287361, + "block0_k_v_norm": 0.24345873296260834, + "block0_k_cos_v_neg_g": 0.03641018643975258, + "block0_v_v_norm": 0.10854385048151016, + "block0_v_cos_v_neg_g": 0.05479969084262848, + "block0_o_v_norm": 0.17819052934646606, + "block0_o_cos_v_neg_g": 0.02789740078151226, + "block0_mlp_win_v_norm": 0.22420534491539001, + "block0_mlp_win_cos_v_neg_g": 0.03525838255882263, + "block0_mlp_wout_v_norm": 0.2394041270017624, + "block0_mlp_wout_cos_v_neg_g": 0.052403006702661514, + "block3_q_v_norm": 0.14236919581890106, + "block3_q_cos_v_neg_g": 0.032802991569042206, + "block3_k_v_norm": 0.15866319835186005, + "block3_k_cos_v_neg_g": 0.05908917635679245, + "block3_v_v_norm": 0.1532628983259201, + "block3_v_cos_v_neg_g": 0.03020378015935421, + "block3_o_v_norm": 0.21058841049671173, + "block3_o_cos_v_neg_g": 0.03429101034998894, + "block3_mlp_win_v_norm": 0.25849276781082153, + "block3_mlp_win_cos_v_neg_g": 0.02970670908689499, + "block3_mlp_wout_v_norm": 0.23733434081077576, + "block3_mlp_wout_cos_v_neg_g": 0.09950457513332367, + "block7_q_v_norm": 0.24069282412528992, + "block7_q_cos_v_neg_g": 0.035363659262657166, + "block7_k_v_norm": 0.24661873281002045, + "block7_k_cos_v_neg_g": 0.08167549222707748, + "block7_v_v_norm": 0.20409823954105377, + "block7_v_cos_v_neg_g": 0.023652663454413414, + "block7_o_v_norm": 0.24848738312721252, + "block7_o_cos_v_neg_g": 0.07491140812635422, + "block7_mlp_win_v_norm": 0.27804574370384216, + "block7_mlp_win_cos_v_neg_g": 0.03328925743699074, + "block7_mlp_wout_v_norm": 0.24487638473510742, + "block7_mlp_wout_cos_v_neg_g": 0.12107468396425247, + "block11_q_v_norm": 0.24731889367103577, + "block11_q_cos_v_neg_g": 0.07988375425338745, + "block11_k_v_norm": 0.2488982081413269, + "block11_k_cos_v_neg_g": 0.105892114341259, + "block11_v_v_norm": 0.24545465409755707, + "block11_v_cos_v_neg_g": 0.04604926332831383, + "block11_o_v_norm": 0.24818481504917145, + "block11_o_cos_v_neg_g": 0.08359576016664505, + "block11_mlp_win_v_norm": 0.2341965287923813, + "block11_mlp_win_cos_v_neg_g": 0.10318823903799057, + "block11_mlp_wout_v_norm": 0.24269990622997284, + "block11_mlp_wout_cos_v_neg_g": 0.09211550652980804, + "embed_lm_head_sharpness": 0.0007063964731059968, + "layer_1_sharpness": 0.038752373307943344, + "layer_2_sharpness": 0.020059989765286446, + "layer_3_sharpness": 0.0032116170041263103, + "layer_4_sharpness": 0.0016985631082206964, + "layer_5_sharpness": 0.0010487454710528255, + "layer_6_sharpness": 0.0016599985538050532, + "layer_7_sharpness": 0.0014888844452798367, + "layer_8_sharpness": 0.001344128861092031, + "layer_9_sharpness": 0.0009335668291896582, + "layer_10_sharpness": 0.0005007963045500219, + "layer_11_sharpness": 0.0006396103417500854, + "layer_12_sharpness": 0.0019323457963764668, + "block0_q_sharpness": 0.00019535241881385446, + "block0_k_sharpness": 0.00021107208158355206, + "block0_v_sharpness": 0.2581653296947479, + "block0_o_sharpness": 0.007771414238959551, + "block0_mlp_win_sharpness": 0.008366892114281654, + "block0_mlp_wout_sharpness": 0.009688453748822212, + "block3_q_sharpness": 0.000135189518914558, + "block3_k_sharpness": 0.0012986741494387388, + "block3_v_sharpness": 0.0065474663861095905, + "block3_o_sharpness": 0.00040334262303076684, + "block3_mlp_win_sharpness": 0.00013828507508151233, + "block3_mlp_wout_sharpness": 4.618649836629629e-05, + "block7_q_sharpness": 0.00010092710726894438, + "block7_k_sharpness": 8.648309449199587e-05, + "block7_v_sharpness": 0.003451828844845295, + "block7_o_sharpness": 9.746975410962477e-05, + "block7_mlp_win_sharpness": 0.0005426632123999298, + "block7_mlp_wout_sharpness": 8.705903019290417e-05, + "block11_q_sharpness": 0.00017094527720473707, + "block11_k_sharpness": 8.775713649811223e-05, + "block11_v_sharpness": 0.00015924872423056513, + "block11_o_sharpness": 5.6223747378680855e-05, + "block11_mlp_win_sharpness": 0.001430866657756269, + "block11_mlp_wout_sharpness": 0.0031392662785947323, + "sum_layer_numerators": 0.017208617312388045, + "block_diag_sharpness": 0.004827717867285553, + "cross_layer_sharpness": 0.008604335050492939 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..31bc5e64f6c28115a1c347326c7fc1ae08c6e31e --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.291933298110962, + "total_l1_linf_norm": 19413.52734375, + "total_spectral_norm": 2.291933536529541, + "embed_lm_head_update_fnorm": 1.3419607877731323, + "embed_lm_head_max_l1_linf_norm": 0.3637460172176361, + "embed_lm_head_max_spectral_norm": 0.24388127028942108, + "layer_1_update_fnorm": 0.5267164707183838, + "layer_1_max_l1_linf_norm": 0.37884581089019775, + "layer_1_max_spectral_norm": 0.01204751618206501, + "layer_2_update_fnorm": 0.3382170796394348, + "layer_2_max_l1_linf_norm": 0.36194366216659546, + "layer_2_max_spectral_norm": 0.012942024506628513, + "layer_3_update_fnorm": 0.37591812014579773, + "layer_3_max_l1_linf_norm": 0.38899677991867065, + "layer_3_max_spectral_norm": 0.013546055182814598, + "layer_4_update_fnorm": 0.48267844319343567, + "layer_4_max_l1_linf_norm": 0.4337499737739563, + "layer_4_max_spectral_norm": 0.017300425097346306, + "layer_5_update_fnorm": 0.5208013653755188, + "layer_5_max_l1_linf_norm": 0.39784425497055054, + "layer_5_max_spectral_norm": 0.0120469955727458, + "layer_6_update_fnorm": 0.5535247921943665, + "layer_6_max_l1_linf_norm": 0.40233930945396423, + "layer_6_max_spectral_norm": 0.01204062718898058, + "layer_7_update_fnorm": 0.5945506691932678, + "layer_7_max_l1_linf_norm": 0.4006049633026123, + "layer_7_max_spectral_norm": 0.012050162069499493, + "layer_8_update_fnorm": 0.5979211330413818, + "layer_8_max_l1_linf_norm": 0.40835532546043396, + "layer_8_max_spectral_norm": 0.01204665657132864, + "layer_9_update_fnorm": 0.5954034328460693, + "layer_9_max_l1_linf_norm": 0.4010636508464813, + "layer_9_max_spectral_norm": 0.012055487371981144, + "layer_10_update_fnorm": 0.5946884155273438, + "layer_10_max_l1_linf_norm": 0.40821635723114014, + "layer_10_max_spectral_norm": 0.012056333012878895, + "layer_11_update_fnorm": 0.5751575231552124, + "layer_11_max_l1_linf_norm": 0.4117025136947632, + "layer_11_max_spectral_norm": 0.012042297050356865, + "layer_12_update_fnorm": 0.598662257194519, + "layer_12_max_l1_linf_norm": 0.4143753945827484, + "layer_12_max_spectral_norm": 0.012046555057168007, + "block0_q_update_fnorm": 0.2460336834192276, + "block0_q_max_l1_linf_norm": 0.20717918872833252, + "block0_q_max_spectral_norm": 0.01204751618206501, + "block0_k_update_fnorm": 0.24124376475811005, + "block0_k_max_l1_linf_norm": 0.20675304532051086, + "block0_k_max_spectral_norm": 0.012042838148772717, + "block0_v_update_fnorm": 0.125016987323761, + "block0_v_max_l1_linf_norm": 0.14589685201644897, + "block0_v_max_spectral_norm": 0.01202358864247799, + "block0_o_update_fnorm": 0.18822142481803894, + "block0_o_max_l1_linf_norm": 0.17201291024684906, + "block0_o_max_spectral_norm": 0.012032197788357735, + "block0_mlp_win_update_fnorm": 0.2339816838502884, + "block0_mlp_win_max_l1_linf_norm": 0.18954211473464966, + "block0_mlp_win_max_spectral_norm": 0.012032623402774334, + "block0_mlp_wout_update_fnorm": 0.229753315448761, + "block0_mlp_wout_max_l1_linf_norm": 0.37884581089019775, + "block0_mlp_wout_max_spectral_norm": 0.012031137943267822, + "block3_q_update_fnorm": 0.14561644196510315, + "block3_q_max_l1_linf_norm": 0.18165776133537292, + "block3_q_max_spectral_norm": 0.012027672491967678, + "block3_k_update_fnorm": 0.15413160622119904, + "block3_k_max_l1_linf_norm": 0.19382095336914062, + "block3_k_max_spectral_norm": 0.012027764692902565, + "block3_v_update_fnorm": 0.15241935849189758, + "block3_v_max_l1_linf_norm": 0.17041829228401184, + "block3_v_max_spectral_norm": 0.012026170268654823, + "block3_o_update_fnorm": 0.20369845628738403, + "block3_o_max_l1_linf_norm": 0.17181280255317688, + "block3_o_max_spectral_norm": 0.01203469280153513, + "block3_mlp_win_update_fnorm": 0.25674694776535034, + "block3_mlp_win_max_l1_linf_norm": 0.17806406319141388, + "block3_mlp_win_max_spectral_norm": 0.012034558691084385, + "block3_mlp_wout_update_fnorm": 0.23845098912715912, + "block3_mlp_wout_max_l1_linf_norm": 0.3932758867740631, + "block3_mlp_wout_max_spectral_norm": 0.01136207114905119, + "block7_q_update_fnorm": 0.240904301404953, + "block7_q_max_l1_linf_norm": 0.20639656484127045, + "block7_q_max_spectral_norm": 0.012041419744491577, + "block7_k_update_fnorm": 0.24593190848827362, + "block7_k_max_l1_linf_norm": 0.20655140280723572, + "block7_k_max_spectral_norm": 0.012043904513120651, + "block7_v_update_fnorm": 0.20172953605651855, + "block7_v_max_l1_linf_norm": 0.20740070939064026, + "block7_v_max_spectral_norm": 0.012028793804347515, + "block7_o_update_fnorm": 0.24768568575382233, + "block7_o_max_l1_linf_norm": 0.20743322372436523, + "block7_o_max_spectral_norm": 0.012043732218444347, + "block7_mlp_win_update_fnorm": 0.27887099981307983, + "block7_mlp_win_max_l1_linf_norm": 0.16398689150810242, + "block7_mlp_win_max_spectral_norm": 0.01204665657132864, + "block7_mlp_wout_update_fnorm": 0.2429882287979126, + "block7_mlp_wout_max_l1_linf_norm": 0.40835532546043396, + "block7_mlp_wout_max_spectral_norm": 0.011373507790267467, + "block11_q_update_fnorm": 0.24770094454288483, + "block11_q_max_l1_linf_norm": 0.2146383821964264, + "block11_q_max_spectral_norm": 0.012042565271258354, + "block11_k_update_fnorm": 0.2493986338376999, + "block11_k_max_l1_linf_norm": 0.21438747644424438, + "block11_k_max_spectral_norm": 0.012037932872772217, + "block11_v_update_fnorm": 0.2453404814004898, + "block11_v_max_l1_linf_norm": 0.20569631457328796, + "block11_v_max_spectral_norm": 0.012044714763760567, + "block11_o_update_fnorm": 0.24783949553966522, + "block11_o_max_l1_linf_norm": 0.2068163901567459, + "block11_o_max_spectral_norm": 0.012046555057168007, + "block11_mlp_win_update_fnorm": 0.23603424429893494, + "block11_mlp_win_max_l1_linf_norm": 0.16194838285446167, + "block11_mlp_win_max_spectral_norm": 0.011377434246242046, + "block11_mlp_wout_update_fnorm": 0.23945461213588715, + "block11_mlp_wout_max_l1_linf_norm": 0.409639447927475, + "block11_mlp_wout_max_spectral_norm": 0.01185304019600153, + "total_sharpness": 0.006033833138644695, + "block_total_sharpness": 0.008046749979257584, + "v_norm_block": 1.8579827547073364, + "v_T_H_v_block": 0.027778184041380882, + "v_norm": 2.291933298110962, + "ip_v_neg_g_hvp": 0.042577944695949554, + "cos_v_neg_g_hvp": 0.022534674033522606, + "g_hvp_norm": 0.8243876099586487, + "ip_v_neg_g_t": 0.047054700553417206, + "cos_v_neg_g_t": 0.02511942759156227, + "g_t_norm": 0.8173184394836426, + "g_norm": 0.8243876099586487, + "hv_norm": 1.6635574102401733, + "cos_v_hv": 0.008312994614243507, + "hg_norm": 898.302734375, + "cos_g_hg": 0.05578236281871796, + "v_parallel_norm": 0.004662226885557175, + "v_perp_norm": 2.29192852973938, + "embed_lm_head_v_norm": 1.3419607877731323, + "embed_lm_head_cos_v_neg_g": 0.032011400908231735, + "layer_1_v_norm": 0.5267164707183838, + "layer_1_cos_v_neg_g": 0.010518830269575119, + "layer_2_v_norm": 0.3382170796394348, + "layer_2_cos_v_neg_g": 0.031866274774074554, + "layer_3_v_norm": 0.37591812014579773, + "layer_3_cos_v_neg_g": 0.024941258132457733, + "layer_4_v_norm": 0.48267844319343567, + "layer_4_cos_v_neg_g": 0.023696783930063248, + "layer_5_v_norm": 0.5208013653755188, + "layer_5_cos_v_neg_g": 0.027708984911441803, + "layer_6_v_norm": 0.5535247921943665, + "layer_6_cos_v_neg_g": 0.0268191359937191, + "layer_7_v_norm": 0.5945506691932678, + "layer_7_cos_v_neg_g": 0.026771673932671547, + "layer_8_v_norm": 0.5979211926460266, + "layer_8_cos_v_neg_g": 0.02772986888885498, + "layer_9_v_norm": 0.5954034328460693, + "layer_9_cos_v_neg_g": 0.02795582078397274, + "layer_10_v_norm": 0.5946884155273438, + "layer_10_cos_v_neg_g": 0.029957829043269157, + "layer_11_v_norm": 0.5751574635505676, + "layer_11_cos_v_neg_g": 0.039670150727033615, + "layer_12_v_norm": 0.598662257194519, + "layer_12_cos_v_neg_g": 0.07200619578361511, + "block0_q_v_norm": 0.2460336834192276, + "block0_q_cos_v_neg_g": 0.035098206251859665, + "block0_k_v_norm": 0.24124376475811005, + "block0_k_cos_v_neg_g": 0.03464614972472191, + "block0_v_v_norm": 0.125016987323761, + "block0_v_cos_v_neg_g": 0.005607126280665398, + "block0_o_v_norm": 0.18822142481803894, + "block0_o_cos_v_neg_g": 0.023245709016919136, + "block0_mlp_win_v_norm": 0.2339816838502884, + "block0_mlp_win_cos_v_neg_g": 0.02071455493569374, + "block0_mlp_wout_v_norm": 0.229753315448761, + "block0_mlp_wout_cos_v_neg_g": 0.04348496347665787, + "block3_q_v_norm": 0.14561644196510315, + "block3_q_cos_v_neg_g": 0.0245404914021492, + "block3_k_v_norm": 0.15413160622119904, + "block3_k_cos_v_neg_g": 0.04086016118526459, + "block3_v_v_norm": 0.15241935849189758, + "block3_v_cos_v_neg_g": 0.024445906281471252, + "block3_o_v_norm": 0.20369845628738403, + "block3_o_cos_v_neg_g": 0.02971261367201805, + "block3_mlp_win_v_norm": 0.25674694776535034, + "block3_mlp_win_cos_v_neg_g": 0.0262275543063879, + "block3_mlp_wout_v_norm": 0.23845098912715912, + "block3_mlp_wout_cos_v_neg_g": 0.09327957034111023, + "block7_q_v_norm": 0.240904301404953, + "block7_q_cos_v_neg_g": 0.027178185060620308, + "block7_k_v_norm": 0.24593190848827362, + "block7_k_cos_v_neg_g": 0.07015807926654816, + "block7_v_v_norm": 0.20172953605651855, + "block7_v_cos_v_neg_g": 0.030586427077651024, + "block7_o_v_norm": 0.24768568575382233, + "block7_o_cos_v_neg_g": 0.07596318423748016, + "block7_mlp_win_v_norm": 0.27887099981307983, + "block7_mlp_win_cos_v_neg_g": 0.03493896499276161, + "block7_mlp_wout_v_norm": 0.2429882287979126, + "block7_mlp_wout_cos_v_neg_g": 0.12154495716094971, + "block11_q_v_norm": 0.24770094454288483, + "block11_q_cos_v_neg_g": 0.05999850854277611, + "block11_k_v_norm": 0.2493986338376999, + "block11_k_cos_v_neg_g": 0.09067137539386749, + "block11_v_v_norm": 0.2453404814004898, + "block11_v_cos_v_neg_g": 0.04737650603055954, + "block11_o_v_norm": 0.24783949553966522, + "block11_o_cos_v_neg_g": 0.07719895988702774, + "block11_mlp_win_v_norm": 0.23603424429893494, + "block11_mlp_win_cos_v_neg_g": 0.09914672374725342, + "block11_mlp_wout_v_norm": 0.23945461213588715, + "block11_mlp_wout_cos_v_neg_g": 0.08884231001138687, + "embed_lm_head_sharpness": 0.0004260184650775045, + "layer_1_sharpness": 0.010714268311858177, + "layer_2_sharpness": 0.0154205821454525, + "layer_3_sharpness": 0.003918676171451807, + "layer_4_sharpness": 0.0013962514931336045, + "layer_5_sharpness": 0.0011080470867455006, + "layer_6_sharpness": 0.0014752669958397746, + "layer_7_sharpness": 0.0012976868310943246, + "layer_8_sharpness": 0.0013696692185476422, + "layer_9_sharpness": 0.0009585070074535906, + "layer_10_sharpness": 0.0005014413618482649, + "layer_11_sharpness": 0.0005140236462466419, + "layer_12_sharpness": 0.0008138497942127287, + "block0_q_sharpness": -8.689579408382997e-05, + "block0_k_sharpness": 1.8284577890881337e-05, + "block0_v_sharpness": 0.07744848728179932, + "block0_o_sharpness": 0.00019413791596889496, + "block0_mlp_win_sharpness": 0.000746743637137115, + "block0_mlp_wout_sharpness": 0.001456113182939589, + "block3_q_sharpness": 4.716331386589445e-05, + "block3_k_sharpness": 0.00195735483430326, + "block3_v_sharpness": 0.0047059934586286545, + "block3_o_sharpness": 0.0003737022925633937, + "block3_mlp_win_sharpness": 0.00013622085680253804, + "block3_mlp_wout_sharpness": 6.415402458515018e-05, + "block7_q_sharpness": 7.436690793838352e-05, + "block7_k_sharpness": 8.531372441211715e-05, + "block7_v_sharpness": 0.0036328290589153767, + "block7_o_sharpness": 7.775423728162423e-05, + "block7_mlp_win_sharpness": 0.0005812307354062796, + "block7_mlp_wout_sharpness": 9.806609159568325e-05, + "block11_q_sharpness": 8.38167907204479e-05, + "block11_k_sharpness": 4.283303132979199e-05, + "block11_v_sharpness": 0.00013495676103048027, + "block11_o_sharpness": 3.239631769247353e-05, + "block11_mlp_win_sharpness": 0.0007483321824111044, + "block11_mlp_wout_sharpness": 0.0011868133442476392, + "sum_layer_numerators": 0.00829528923379018, + "block_diag_sharpness": 0.002402969043116238, + "cross_layer_sharpness": 0.005643780936141346 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..8f84f0adfc47ccf9f3187e7d5a120488b5d73a6d --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_6500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2112908363342285, + "total_l1_linf_norm": 18529.052734375, + "total_spectral_norm": 2.2112908363342285, + "embed_lm_head_update_fnorm": 1.2824161052703857, + "embed_lm_head_max_l1_linf_norm": 0.3220450282096863, + "embed_lm_head_max_spectral_norm": 0.18879161775112152, + "layer_1_update_fnorm": 0.48104530572891235, + "layer_1_max_l1_linf_norm": 0.3256537914276123, + "layer_1_max_spectral_norm": 0.012041615322232246, + "layer_2_update_fnorm": 0.26685163378715515, + "layer_2_max_l1_linf_norm": 0.5118861198425293, + "layer_2_max_spectral_norm": 0.012034173123538494, + "layer_3_update_fnorm": 0.3204907476902008, + "layer_3_max_l1_linf_norm": 0.5194941163063049, + "layer_3_max_spectral_norm": 0.012030537240207195, + "layer_4_update_fnorm": 0.4251675307750702, + "layer_4_max_l1_linf_norm": 0.5505807399749756, + "layer_4_max_spectral_norm": 0.01837249845266342, + "layer_5_update_fnorm": 0.5222769975662231, + "layer_5_max_l1_linf_norm": 0.5059748291969299, + "layer_5_max_spectral_norm": 0.012041120789945126, + "layer_6_update_fnorm": 0.5454396605491638, + "layer_6_max_l1_linf_norm": 0.44786185026168823, + "layer_6_max_spectral_norm": 0.012041589245200157, + "layer_7_update_fnorm": 0.588817834854126, + "layer_7_max_l1_linf_norm": 0.41280922293663025, + "layer_7_max_spectral_norm": 0.012044875882565975, + "layer_8_update_fnorm": 0.5928296446800232, + "layer_8_max_l1_linf_norm": 0.4069192707538605, + "layer_8_max_spectral_norm": 0.012050838209688663, + "layer_9_update_fnorm": 0.5940043330192566, + "layer_9_max_l1_linf_norm": 0.4057350754737854, + "layer_9_max_spectral_norm": 0.012048637494444847, + "layer_10_update_fnorm": 0.5949196815490723, + "layer_10_max_l1_linf_norm": 0.43669068813323975, + "layer_10_max_spectral_norm": 0.012052792124450207, + "layer_11_update_fnorm": 0.5701684951782227, + "layer_11_max_l1_linf_norm": 0.4648754596710205, + "layer_11_max_spectral_norm": 0.01204537134617567, + "layer_12_update_fnorm": 0.5990206003189087, + "layer_12_max_l1_linf_norm": 0.4671667516231537, + "layer_12_max_spectral_norm": 0.01204823050647974, + "block0_q_update_fnorm": 0.24175846576690674, + "block0_q_max_l1_linf_norm": 0.20916524529457092, + "block0_q_max_spectral_norm": 0.012041615322232246, + "block0_k_update_fnorm": 0.2342338263988495, + "block0_k_max_l1_linf_norm": 0.21233677864074707, + "block0_k_max_spectral_norm": 0.012041101232171059, + "block0_v_update_fnorm": 0.12733671069145203, + "block0_v_max_l1_linf_norm": 0.13095733523368835, + "block0_v_max_spectral_norm": 0.012025929987430573, + "block0_o_update_fnorm": 0.17368751764297485, + "block0_o_max_l1_linf_norm": 0.17171107232570648, + "block0_o_max_spectral_norm": 0.012030664831399918, + "block0_mlp_win_update_fnorm": 0.17908748984336853, + "block0_mlp_win_max_l1_linf_norm": 0.19401592016220093, + "block0_mlp_win_max_spectral_norm": 0.01202849019318819, + "block0_mlp_wout_update_fnorm": 0.1988082230091095, + "block0_mlp_wout_max_l1_linf_norm": 0.3256537914276123, + "block0_mlp_wout_max_spectral_norm": 0.012028733268380165, + "block3_q_update_fnorm": 0.07833366841077805, + "block3_q_max_l1_linf_norm": 0.10931617021560669, + "block3_q_max_spectral_norm": 0.01202581450343132, + "block3_k_update_fnorm": 0.08171733468770981, + "block3_k_max_l1_linf_norm": 0.12092038989067078, + "block3_k_max_spectral_norm": 0.012022937647998333, + "block3_v_update_fnorm": 0.1346064805984497, + "block3_v_max_l1_linf_norm": 0.14316138625144958, + "block3_v_max_spectral_norm": 0.012025512754917145, + "block3_o_update_fnorm": 0.1726600080728531, + "block3_o_max_l1_linf_norm": 0.1508426070213318, + "block3_o_max_spectral_norm": 0.012030750513076782, + "block3_mlp_win_update_fnorm": 0.22895009815692902, + "block3_mlp_win_max_l1_linf_norm": 0.1860378086566925, + "block3_mlp_win_max_spectral_norm": 0.01203353051096201, + "block3_mlp_wout_update_fnorm": 0.25862574577331543, + "block3_mlp_wout_max_l1_linf_norm": 0.4228057861328125, + "block3_mlp_wout_max_spectral_norm": 0.012046702206134796, + "block7_q_update_fnorm": 0.23867374658584595, + "block7_q_max_l1_linf_norm": 0.20779897272586823, + "block7_q_max_spectral_norm": 0.01204060111194849, + "block7_k_update_fnorm": 0.24625709652900696, + "block7_k_max_l1_linf_norm": 0.20795279741287231, + "block7_k_max_spectral_norm": 0.012043887749314308, + "block7_v_update_fnorm": 0.19070608913898468, + "block7_v_max_l1_linf_norm": 0.20564185082912445, + "block7_v_max_spectral_norm": 0.012032998725771904, + "block7_o_update_fnorm": 0.24727696180343628, + "block7_o_max_l1_linf_norm": 0.2064981758594513, + "block7_o_max_spectral_norm": 0.012045843526721, + "block7_mlp_win_update_fnorm": 0.2801147401332855, + "block7_mlp_win_max_l1_linf_norm": 0.16222605109214783, + "block7_mlp_win_max_spectral_norm": 0.012050838209688663, + "block7_mlp_wout_update_fnorm": 0.2401627153158188, + "block7_mlp_wout_max_l1_linf_norm": 0.4027038812637329, + "block7_mlp_wout_max_spectral_norm": 0.01139763928949833, + "block11_q_update_fnorm": 0.24749986827373505, + "block11_q_max_l1_linf_norm": 0.22090387344360352, + "block11_q_max_spectral_norm": 0.012040589936077595, + "block11_k_update_fnorm": 0.24964642524719238, + "block11_k_max_l1_linf_norm": 0.21438241004943848, + "block11_k_max_spectral_norm": 0.012035120278596878, + "block11_v_update_fnorm": 0.24453184008598328, + "block11_v_max_l1_linf_norm": 0.2074567675590515, + "block11_v_max_spectral_norm": 0.01204823050647974, + "block11_o_update_fnorm": 0.24766777455806732, + "block11_o_max_l1_linf_norm": 0.20664572715759277, + "block11_o_max_spectral_norm": 0.012041668407619, + "block11_mlp_win_update_fnorm": 0.23492489755153656, + "block11_mlp_win_max_l1_linf_norm": 0.16452306509017944, + "block11_mlp_win_max_spectral_norm": 0.011351455003023148, + "block11_mlp_wout_update_fnorm": 0.2422942817211151, + "block11_mlp_wout_max_l1_linf_norm": 0.4093374013900757, + "block11_mlp_wout_max_spectral_norm": 0.012031287886202335, + "total_sharpness": 0.03573838993906975, + "block_total_sharpness": 0.047353267669677734, + "v_norm_block": 1.801448106765747, + "v_T_H_v_block": 0.15367154777050018, + "v_norm": 2.2112908363342285, + "ip_v_neg_g_hvp": 0.030798520892858505, + "cos_v_neg_g_hvp": 0.0088645089417696, + "g_hvp_norm": 1.5711921453475952, + "ip_v_neg_g_t": 0.04182066768407822, + "cos_v_neg_g_t": 0.016506854444742203, + "g_t_norm": 1.145725965499878, + "g_norm": 1.5711921453475952, + "hv_norm": 48.348289489746094, + "cos_v_hv": 0.001634555752389133, + "hg_norm": 213286.03125, + "cos_g_hg": 0.4063246548175812, + "v_parallel_norm": 0.004672887269407511, + "v_perp_norm": 2.2112858295440674, + "embed_lm_head_v_norm": 1.2824161052703857, + "embed_lm_head_cos_v_neg_g": 0.006787351332604885, + "layer_1_v_norm": 0.48104530572891235, + "layer_1_cos_v_neg_g": -0.004068374168127775, + "layer_2_v_norm": 0.26685163378715515, + "layer_2_cos_v_neg_g": 0.011345379054546356, + "layer_3_v_norm": 0.3204907476902008, + "layer_3_cos_v_neg_g": 0.012880916707217693, + "layer_4_v_norm": 0.4251675307750702, + "layer_4_cos_v_neg_g": 0.018889153376221657, + "layer_5_v_norm": 0.5222769975662231, + "layer_5_cos_v_neg_g": 0.01882091537117958, + "layer_6_v_norm": 0.5454396605491638, + "layer_6_cos_v_neg_g": 0.025234147906303406, + "layer_7_v_norm": 0.588817834854126, + "layer_7_cos_v_neg_g": 0.023926209658384323, + "layer_8_v_norm": 0.5928296446800232, + "layer_8_cos_v_neg_g": 0.025920623913407326, + "layer_9_v_norm": 0.5940043330192566, + "layer_9_cos_v_neg_g": 0.02651832439005375, + "layer_10_v_norm": 0.5949196815490723, + "layer_10_cos_v_neg_g": 0.029011862352490425, + "layer_11_v_norm": 0.5701685547828674, + "layer_11_cos_v_neg_g": 0.03727145493030548, + "layer_12_v_norm": 0.5990206003189087, + "layer_12_cos_v_neg_g": 0.06037323921918869, + "block0_q_v_norm": 0.24175846576690674, + "block0_q_cos_v_neg_g": 0.024149760603904724, + "block0_k_v_norm": 0.2342338263988495, + "block0_k_cos_v_neg_g": 0.019548529759049416, + "block0_v_v_norm": 0.12733671069145203, + "block0_v_cos_v_neg_g": -0.017260534688830376, + "block0_o_v_norm": 0.17368751764297485, + "block0_o_cos_v_neg_g": -0.012604114599525928, + "block0_mlp_win_v_norm": 0.17908748984336853, + "block0_mlp_win_cos_v_neg_g": 0.00374339846894145, + "block0_mlp_wout_v_norm": 0.1988082230091095, + "block0_mlp_wout_cos_v_neg_g": 0.014095216989517212, + "block3_q_v_norm": 0.07833366841077805, + "block3_q_cos_v_neg_g": 0.019025294110178947, + "block3_k_v_norm": 0.08171733468770981, + "block3_k_cos_v_neg_g": 0.027514953166246414, + "block3_v_v_norm": 0.1346064805984497, + "block3_v_cos_v_neg_g": 0.02083515003323555, + "block3_o_v_norm": 0.1726600080728531, + "block3_o_cos_v_neg_g": 0.02439757250249386, + "block3_mlp_win_v_norm": 0.22895009815692902, + "block3_mlp_win_cos_v_neg_g": 0.019348615780472755, + "block3_mlp_wout_v_norm": 0.25862574577331543, + "block3_mlp_wout_cos_v_neg_g": 0.06597594916820526, + "block7_q_v_norm": 0.23867374658584595, + "block7_q_cos_v_neg_g": 0.02542182430624962, + "block7_k_v_norm": 0.24625709652900696, + "block7_k_cos_v_neg_g": 0.06782785803079605, + "block7_v_v_norm": 0.19070608913898468, + "block7_v_cos_v_neg_g": 0.02703634649515152, + "block7_o_v_norm": 0.24727696180343628, + "block7_o_cos_v_neg_g": 0.07412742078304291, + "block7_mlp_win_v_norm": 0.2801147401332855, + "block7_mlp_win_cos_v_neg_g": 0.03199152275919914, + "block7_mlp_wout_v_norm": 0.2401627153158188, + "block7_mlp_wout_cos_v_neg_g": 0.11501087248325348, + "block11_q_v_norm": 0.24749986827373505, + "block11_q_cos_v_neg_g": 0.07602227479219437, + "block11_k_v_norm": 0.24964642524719238, + "block11_k_cos_v_neg_g": 0.09832923114299774, + "block11_v_v_norm": 0.24453184008598328, + "block11_v_cos_v_neg_g": 0.04238979145884514, + "block11_o_v_norm": 0.24766777455806732, + "block11_o_cos_v_neg_g": 0.07221721857786179, + "block11_mlp_win_v_norm": 0.23492489755153656, + "block11_mlp_win_cos_v_neg_g": 0.08114062249660492, + "block11_mlp_wout_v_norm": 0.2422942817211151, + "block11_mlp_wout_cos_v_neg_g": 0.06798809766769409, + "embed_lm_head_sharpness": 0.0008407259592786431, + "layer_1_sharpness": 0.5553359389305115, + "layer_2_sharpness": 0.015057551674544811, + "layer_3_sharpness": 0.004408118315041065, + "layer_4_sharpness": 0.0019350579241290689, + "layer_5_sharpness": 0.0014460828388109803, + "layer_6_sharpness": 0.0015567392110824585, + "layer_7_sharpness": 0.0014061294496059418, + "layer_8_sharpness": 0.0012910577934235334, + "layer_9_sharpness": 0.0008786332327872515, + "layer_10_sharpness": 0.0004472023865673691, + "layer_11_sharpness": 0.000547938805539161, + "layer_12_sharpness": 0.0009355735965073109, + "block0_q_sharpness": 0.0005924835568293929, + "block0_k_sharpness": 0.0001875334419310093, + "block0_v_sharpness": 2.993779420852661, + "block0_o_sharpness": 0.34588414430618286, + "block0_mlp_win_sharpness": 0.013449087738990784, + "block0_mlp_wout_sharpness": 0.007355135399848223, + "block3_q_sharpness": 6.186861719470471e-05, + "block3_k_sharpness": 0.006375296041369438, + "block3_v_sharpness": 0.005616834387183189, + "block3_o_sharpness": 0.0010439865291118622, + "block3_mlp_win_sharpness": 0.00023078650701791048, + "block3_mlp_wout_sharpness": 5.37500309292227e-05, + "block7_q_sharpness": 8.706108201295137e-05, + "block7_k_sharpness": 7.693732914049178e-05, + "block7_v_sharpness": 0.003812453243881464, + "block7_o_sharpness": 8.182843157555908e-05, + "block7_mlp_win_sharpness": 0.0006012619705870748, + "block7_mlp_wout_sharpness": 8.867678843671456e-05, + "block11_q_sharpness": 0.0001234053634107113, + "block11_k_sharpness": 6.428463530028239e-05, + "block11_v_sharpness": 0.00011485078721307218, + "block11_o_sharpness": 4.942637315252796e-05, + "block11_mlp_win_sharpness": 0.000788990524597466, + "block11_mlp_wout_sharpness": 0.0015872057992964983, + "sum_layer_numerators": 0.13316307715054762, + "block_diag_sharpness": 0.041033665136180376, + "cross_layer_sharpness": 0.006319602533497358 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..f040b22388949d55440c9bde62c432c1eb92fb14 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.250776767730713, + "total_l1_linf_norm": 18935.806640625, + "total_spectral_norm": 2.250776529312134, + "embed_lm_head_update_fnorm": 1.332216739654541, + "embed_lm_head_max_l1_linf_norm": 0.39206334948539734, + "embed_lm_head_max_spectral_norm": 0.26467424631118774, + "layer_1_update_fnorm": 0.5106209516525269, + "layer_1_max_l1_linf_norm": 0.3593659996986389, + "layer_1_max_spectral_norm": 0.012040872126817703, + "layer_2_update_fnorm": 0.31093424558639526, + "layer_2_max_l1_linf_norm": 0.4453788995742798, + "layer_2_max_spectral_norm": 0.01202965248376131, + "layer_3_update_fnorm": 0.33395811915397644, + "layer_3_max_l1_linf_norm": 0.45454227924346924, + "layer_3_max_spectral_norm": 0.012635838240385056, + "layer_4_update_fnorm": 0.41070371866226196, + "layer_4_max_l1_linf_norm": 0.5680439472198486, + "layer_4_max_spectral_norm": 0.02163810282945633, + "layer_5_update_fnorm": 0.5027440190315247, + "layer_5_max_l1_linf_norm": 0.4457162022590637, + "layer_5_max_spectral_norm": 0.013913868926465511, + "layer_6_update_fnorm": 0.5418265461921692, + "layer_6_max_l1_linf_norm": 0.41083312034606934, + "layer_6_max_spectral_norm": 0.012046686373651028, + "layer_7_update_fnorm": 0.5946023464202881, + "layer_7_max_l1_linf_norm": 0.4000486731529236, + "layer_7_max_spectral_norm": 0.0120449960231781, + "layer_8_update_fnorm": 0.5978411436080933, + "layer_8_max_l1_linf_norm": 0.4020974934101105, + "layer_8_max_spectral_norm": 0.012047899886965752, + "layer_9_update_fnorm": 0.5957822203636169, + "layer_9_max_l1_linf_norm": 0.3995792269706726, + "layer_9_max_spectral_norm": 0.012048314325511456, + "layer_10_update_fnorm": 0.5957334637641907, + "layer_10_max_l1_linf_norm": 0.435906320810318, + "layer_10_max_spectral_norm": 0.012057230807840824, + "layer_11_update_fnorm": 0.5728328824043274, + "layer_11_max_l1_linf_norm": 0.4695475697517395, + "layer_11_max_spectral_norm": 0.012042340822517872, + "layer_12_update_fnorm": 0.5985719561576843, + "layer_12_max_l1_linf_norm": 0.45908990502357483, + "layer_12_max_spectral_norm": 0.012045133858919144, + "block0_q_update_fnorm": 0.23921236395835876, + "block0_q_max_l1_linf_norm": 0.20915637910366058, + "block0_q_max_spectral_norm": 0.012040872126817703, + "block0_k_update_fnorm": 0.23660928010940552, + "block0_k_max_l1_linf_norm": 0.2101534605026245, + "block0_k_max_spectral_norm": 0.012039713561534882, + "block0_v_update_fnorm": 0.15365473926067352, + "block0_v_max_l1_linf_norm": 0.15700650215148926, + "block0_v_max_spectral_norm": 0.01202786061912775, + "block0_o_update_fnorm": 0.19053636491298676, + "block0_o_max_l1_linf_norm": 0.17868438363075256, + "block0_o_max_spectral_norm": 0.012032052502036095, + "block0_mlp_win_update_fnorm": 0.21137884259223938, + "block0_mlp_win_max_l1_linf_norm": 0.1879948377609253, + "block0_mlp_win_max_spectral_norm": 0.012030132114887238, + "block0_mlp_wout_update_fnorm": 0.2067926824092865, + "block0_mlp_wout_max_l1_linf_norm": 0.338360458612442, + "block0_mlp_wout_max_spectral_norm": 0.012028700672090054, + "block3_q_update_fnorm": 0.03403377905488014, + "block3_q_max_l1_linf_norm": 0.08491389453411102, + "block3_q_max_spectral_norm": 0.012016721069812775, + "block3_k_update_fnorm": 0.037444598972797394, + "block3_k_max_l1_linf_norm": 0.08973086625337601, + "block3_k_max_spectral_norm": 0.011809039860963821, + "block3_v_update_fnorm": 0.13084203004837036, + "block3_v_max_l1_linf_norm": 0.1443028450012207, + "block3_v_max_spectral_norm": 0.01202768087387085, + "block3_o_update_fnorm": 0.15494264662265778, + "block3_o_max_l1_linf_norm": 0.13542965054512024, + "block3_o_max_spectral_norm": 0.012027755379676819, + "block3_mlp_win_update_fnorm": 0.23932130634784698, + "block3_mlp_win_max_l1_linf_norm": 0.18003159761428833, + "block3_mlp_win_max_spectral_norm": 0.012033168226480484, + "block3_mlp_wout_update_fnorm": 0.25836533308029175, + "block3_mlp_wout_max_l1_linf_norm": 0.42514854669570923, + "block3_mlp_wout_max_spectral_norm": 0.012048258446156979, + "block7_q_update_fnorm": 0.2396984100341797, + "block7_q_max_l1_linf_norm": 0.20871932804584503, + "block7_q_max_spectral_norm": 0.012044301256537437, + "block7_k_update_fnorm": 0.24625210464000702, + "block7_k_max_l1_linf_norm": 0.2085483819246292, + "block7_k_max_spectral_norm": 0.01204219926148653, + "block7_v_update_fnorm": 0.2015809267759323, + "block7_v_max_l1_linf_norm": 0.20740318298339844, + "block7_v_max_spectral_norm": 0.012032811529934406, + "block7_o_update_fnorm": 0.2476363182067871, + "block7_o_max_l1_linf_norm": 0.20894889533519745, + "block7_o_max_spectral_norm": 0.012047899886965752, + "block7_mlp_win_update_fnorm": 0.27999308705329895, + "block7_mlp_win_max_l1_linf_norm": 0.1622370183467865, + "block7_mlp_win_max_spectral_norm": 0.012042050249874592, + "block7_mlp_wout_update_fnorm": 0.2425159215927124, + "block7_mlp_wout_max_l1_linf_norm": 0.4020974934101105, + "block7_mlp_wout_max_spectral_norm": 0.011378202587366104, + "block11_q_update_fnorm": 0.24774162471294403, + "block11_q_max_l1_linf_norm": 0.21680192649364471, + "block11_q_max_spectral_norm": 0.01204062718898058, + "block11_k_update_fnorm": 0.24965886771678925, + "block11_k_max_l1_linf_norm": 0.22076278924942017, + "block11_k_max_spectral_norm": 0.012040986679494381, + "block11_v_update_fnorm": 0.24457678198814392, + "block11_v_max_l1_linf_norm": 0.2062678337097168, + "block11_v_max_spectral_norm": 0.012044073082506657, + "block11_o_update_fnorm": 0.24787721037864685, + "block11_o_max_l1_linf_norm": 0.20708529651165009, + "block11_o_max_spectral_norm": 0.012045133858919144, + "block11_mlp_win_update_fnorm": 0.2354946732521057, + "block11_mlp_win_max_l1_linf_norm": 0.16046342253684998, + "block11_mlp_win_max_spectral_norm": 0.011362859979271889, + "block11_mlp_wout_update_fnorm": 0.24011141061782837, + "block11_mlp_wout_max_l1_linf_norm": 0.42566341161727905, + "block11_mlp_wout_max_spectral_norm": 0.011879445984959602, + "total_sharpness": 0.0146133191883564, + "block_total_sharpness": 0.012806616723537445, + "v_norm_block": 1.8141647577285767, + "v_T_H_v_block": 0.04214905947446823, + "v_norm": 2.250776767730713, + "ip_v_neg_g_hvp": 0.037747468799352646, + "cos_v_neg_g_hvp": 0.015822529792785645, + "g_hvp_norm": 1.0599356889724731, + "ip_v_neg_g_t": 0.05394459515810013, + "cos_v_neg_g_t": 0.027034223079681396, + "g_t_norm": 0.8865466713905334, + "g_norm": 1.0599356889724731, + "hv_norm": 35.0785026550293, + "cos_v_hv": 0.0009376488160341978, + "hg_norm": 534179.875, + "cos_g_hg": 0.4020400047302246, + "v_parallel_norm": 0.003788852132856846, + "v_perp_norm": 2.2507734298706055, + "embed_lm_head_v_norm": 1.332216739654541, + "embed_lm_head_cos_v_neg_g": 0.02014053612947464, + "layer_1_v_norm": 0.5106209516525269, + "layer_1_cos_v_neg_g": 0.0030919862911105156, + "layer_2_v_norm": 0.31093424558639526, + "layer_2_cos_v_neg_g": 0.013824298977851868, + "layer_3_v_norm": 0.33395808935165405, + "layer_3_cos_v_neg_g": 0.027058076113462448, + "layer_4_v_norm": 0.41070371866226196, + "layer_4_cos_v_neg_g": 0.025928111746907234, + "layer_5_v_norm": 0.5027440190315247, + "layer_5_cos_v_neg_g": 0.027323080226778984, + "layer_6_v_norm": 0.5418265461921692, + "layer_6_cos_v_neg_g": 0.027354910969734192, + "layer_7_v_norm": 0.5946023464202881, + "layer_7_cos_v_neg_g": 0.026670070365071297, + "layer_8_v_norm": 0.5978411436080933, + "layer_8_cos_v_neg_g": 0.027488628402352333, + "layer_9_v_norm": 0.5957822203636169, + "layer_9_cos_v_neg_g": 0.02818927727639675, + "layer_10_v_norm": 0.5957334637641907, + "layer_10_cos_v_neg_g": 0.0308743417263031, + "layer_11_v_norm": 0.5728328824043274, + "layer_11_cos_v_neg_g": 0.040209703147411346, + "layer_12_v_norm": 0.5985719561576843, + "layer_12_cos_v_neg_g": 0.0749088004231453, + "block0_q_v_norm": 0.23921236395835876, + "block0_q_cos_v_neg_g": 0.027431901544332504, + "block0_k_v_norm": 0.23660928010940552, + "block0_k_cos_v_neg_g": 0.021438486874103546, + "block0_v_v_norm": 0.15365473926067352, + "block0_v_cos_v_neg_g": 0.0029618474654853344, + "block0_o_v_norm": 0.19053636491298676, + "block0_o_cos_v_neg_g": 0.004594321828335524, + "block0_mlp_win_v_norm": 0.21137884259223938, + "block0_mlp_win_cos_v_neg_g": 0.005835483781993389, + "block0_mlp_wout_v_norm": 0.2067926824092865, + "block0_mlp_wout_cos_v_neg_g": 0.01165990624576807, + "block3_q_v_norm": 0.03403377905488014, + "block3_q_cos_v_neg_g": 0.031111186370253563, + "block3_k_v_norm": 0.037444598972797394, + "block3_k_cos_v_neg_g": 0.1832321584224701, + "block3_v_v_norm": 0.13084203004837036, + "block3_v_cos_v_neg_g": 0.030237944796681404, + "block3_o_v_norm": 0.15494264662265778, + "block3_o_cos_v_neg_g": 0.024593356996774673, + "block3_mlp_win_v_norm": 0.23932130634784698, + "block3_mlp_win_cos_v_neg_g": 0.028528159484267235, + "block3_mlp_wout_v_norm": 0.25836533308029175, + "block3_mlp_wout_cos_v_neg_g": 0.06327584385871887, + "block7_q_v_norm": 0.2396984100341797, + "block7_q_cos_v_neg_g": 0.03245473653078079, + "block7_k_v_norm": 0.24625210464000702, + "block7_k_cos_v_neg_g": 0.08276981860399246, + "block7_v_v_norm": 0.2015809267759323, + "block7_v_cos_v_neg_g": 0.026177680119872093, + "block7_o_v_norm": 0.2476363182067871, + "block7_o_cos_v_neg_g": 0.0775320753455162, + "block7_mlp_win_v_norm": 0.27999308705329895, + "block7_mlp_win_cos_v_neg_g": 0.03315276280045509, + "block7_mlp_wout_v_norm": 0.2425159215927124, + "block7_mlp_wout_cos_v_neg_g": 0.12275238335132599, + "block11_q_v_norm": 0.24774162471294403, + "block11_q_cos_v_neg_g": 0.07520893216133118, + "block11_k_v_norm": 0.24965886771678925, + "block11_k_cos_v_neg_g": 0.09758561849594116, + "block11_v_v_norm": 0.24457678198814392, + "block11_v_cos_v_neg_g": 0.04474516957998276, + "block11_o_v_norm": 0.24787721037864685, + "block11_o_cos_v_neg_g": 0.07983292639255524, + "block11_mlp_win_v_norm": 0.2354946732521057, + "block11_mlp_win_cos_v_neg_g": 0.10386865586042404, + "block11_mlp_wout_v_norm": 0.24011141061782837, + "block11_mlp_wout_cos_v_neg_g": 0.09289059787988663, + "embed_lm_head_sharpness": 0.0041905795224010944, + "layer_1_sharpness": 0.02130335010588169, + "layer_2_sharpness": 0.0765935480594635, + "layer_3_sharpness": 0.007190674543380737, + "layer_4_sharpness": 0.004090856295078993, + "layer_5_sharpness": 0.00243672844953835, + "layer_6_sharpness": 0.0015713301254436374, + "layer_7_sharpness": 0.0011376794427633286, + "layer_8_sharpness": 0.001064473413862288, + "layer_9_sharpness": 0.0008940460393205285, + "layer_10_sharpness": 0.00047334920964203775, + "layer_11_sharpness": 0.0005516087985597551, + "layer_12_sharpness": 0.0006300288368947804, + "block0_q_sharpness": 0.0010001111077144742, + "block0_k_sharpness": 0.0016039751935750246, + "block0_v_sharpness": 0.023920942097902298, + "block0_o_sharpness": 0.007441017776727676, + "block0_mlp_win_sharpness": 0.03550232946872711, + "block0_mlp_wout_sharpness": 0.002544969320297241, + "block3_q_sharpness": 0.0005615305854007602, + "block3_k_sharpness": 0.04169691726565361, + "block3_v_sharpness": 0.008610079064965248, + "block3_o_sharpness": 0.002669094828888774, + "block3_mlp_win_sharpness": 0.000542129622772336, + "block3_mlp_wout_sharpness": 0.00010559611109783873, + "block7_q_sharpness": 7.836984877940267e-05, + "block7_k_sharpness": 8.720585174160078e-05, + "block7_v_sharpness": 0.0032413729932159185, + "block7_o_sharpness": 5.504539149114862e-05, + "block7_mlp_win_sharpness": 0.0004259531560819596, + "block7_mlp_wout_sharpness": 8.345858077518642e-05, + "block11_q_sharpness": 7.829598325770348e-05, + "block11_k_sharpness": 4.8861758841667324e-05, + "block11_v_sharpness": 0.00010067762923426926, + "block11_o_sharpness": 3.606642349041067e-05, + "block11_mlp_win_sharpness": 0.000542925379704684, + "block11_mlp_wout_sharpness": 0.0010304444003850222, + "sum_layer_numerators": 0.01720352401088807, + "block_diag_sharpness": 0.005227137999953901, + "cross_layer_sharpness": 0.007579478723583544 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..c5d834239199f063e97c918e2d66f654f56ea316 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_7500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.224472761154175, + "total_l1_linf_norm": 18607.703125, + "total_spectral_norm": 2.2244725227355957, + "embed_lm_head_update_fnorm": 1.3583321571350098, + "embed_lm_head_max_l1_linf_norm": 0.5034505128860474, + "embed_lm_head_max_spectral_norm": 0.2845570147037506, + "layer_1_update_fnorm": 0.47332650423049927, + "layer_1_max_l1_linf_norm": 0.4317704141139984, + "layer_1_max_spectral_norm": 0.012042131274938583, + "layer_2_update_fnorm": 0.2766857147216797, + "layer_2_max_l1_linf_norm": 0.6007856130599976, + "layer_2_max_spectral_norm": 0.013262108899652958, + "layer_3_update_fnorm": 0.28226736187934875, + "layer_3_max_l1_linf_norm": 0.5310328006744385, + "layer_3_max_spectral_norm": 0.014008836820721626, + "layer_4_update_fnorm": 0.3966064751148224, + "layer_4_max_l1_linf_norm": 0.5767563581466675, + "layer_4_max_spectral_norm": 0.021565739065408707, + "layer_5_update_fnorm": 0.469023197889328, + "layer_5_max_l1_linf_norm": 0.48438534140586853, + "layer_5_max_spectral_norm": 0.01465386152267456, + "layer_6_update_fnorm": 0.5295866131782532, + "layer_6_max_l1_linf_norm": 0.4588565230369568, + "layer_6_max_spectral_norm": 0.012038339860737324, + "layer_7_update_fnorm": 0.5843761563301086, + "layer_7_max_l1_linf_norm": 0.43569836020469666, + "layer_7_max_spectral_norm": 0.012044246308505535, + "layer_8_update_fnorm": 0.5902321934700012, + "layer_8_max_l1_linf_norm": 0.413558691740036, + "layer_8_max_spectral_norm": 0.012047728523612022, + "layer_9_update_fnorm": 0.5894370675086975, + "layer_9_max_l1_linf_norm": 0.44923728704452515, + "layer_9_max_spectral_norm": 0.012043487280607224, + "layer_10_update_fnorm": 0.5923781394958496, + "layer_10_max_l1_linf_norm": 0.47503358125686646, + "layer_10_max_spectral_norm": 0.012050028890371323, + "layer_11_update_fnorm": 0.5650453567504883, + "layer_11_max_l1_linf_norm": 0.4867516756057739, + "layer_11_max_spectral_norm": 0.012043281458318233, + "layer_12_update_fnorm": 0.5980874300003052, + "layer_12_max_l1_linf_norm": 0.5021628141403198, + "layer_12_max_spectral_norm": 0.012046167626976967, + "block0_q_update_fnorm": 0.23722688853740692, + "block0_q_max_l1_linf_norm": 0.2100747674703598, + "block0_q_max_spectral_norm": 0.012042131274938583, + "block0_k_update_fnorm": 0.2324756681919098, + "block0_k_max_l1_linf_norm": 0.20874567329883575, + "block0_k_max_spectral_norm": 0.012040412984788418, + "block0_v_update_fnorm": 0.10695324093103409, + "block0_v_max_l1_linf_norm": 0.13435086607933044, + "block0_v_max_spectral_norm": 0.012020884081721306, + "block0_o_update_fnorm": 0.16876253485679626, + "block0_o_max_l1_linf_norm": 0.16522717475891113, + "block0_o_max_spectral_norm": 0.01202979777008295, + "block0_mlp_win_update_fnorm": 0.1926475465297699, + "block0_mlp_win_max_l1_linf_norm": 0.19080981612205505, + "block0_mlp_win_max_spectral_norm": 0.012027286924421787, + "block0_mlp_wout_update_fnorm": 0.19087694585323334, + "block0_mlp_wout_max_l1_linf_norm": 0.3091258406639099, + "block0_mlp_wout_max_spectral_norm": 0.012029808945953846, + "block3_q_update_fnorm": 0.038954783231019974, + "block3_q_max_l1_linf_norm": 0.1143408715724945, + "block3_q_max_spectral_norm": 0.011955423280596733, + "block3_k_update_fnorm": 0.04668223485350609, + "block3_k_max_l1_linf_norm": 0.10498882830142975, + "block3_k_max_spectral_norm": 0.011952062137424946, + "block3_v_update_fnorm": 0.11816051602363586, + "block3_v_max_l1_linf_norm": 0.1444666087627411, + "block3_v_max_spectral_norm": 0.012026899494230747, + "block3_o_update_fnorm": 0.1421952098608017, + "block3_o_max_l1_linf_norm": 0.12600170075893402, + "block3_o_max_spectral_norm": 0.012029403820633888, + "block3_mlp_win_update_fnorm": 0.21486668288707733, + "block3_mlp_win_max_l1_linf_norm": 0.18399012088775635, + "block3_mlp_win_max_spectral_norm": 0.012030287645757198, + "block3_mlp_wout_update_fnorm": 0.2687261998653412, + "block3_mlp_wout_max_l1_linf_norm": 0.4356236457824707, + "block3_mlp_wout_max_spectral_norm": 0.012036774307489395, + "block7_q_update_fnorm": 0.23858900368213654, + "block7_q_max_l1_linf_norm": 0.2062954306602478, + "block7_q_max_spectral_norm": 0.0120425745844841, + "block7_k_update_fnorm": 0.24591487646102905, + "block7_k_max_l1_linf_norm": 0.20962515473365784, + "block7_k_max_spectral_norm": 0.012040901929140091, + "block7_v_update_fnorm": 0.18554246425628662, + "block7_v_max_l1_linf_norm": 0.2061690092086792, + "block7_v_max_spectral_norm": 0.01203172281384468, + "block7_o_update_fnorm": 0.24703074991703033, + "block7_o_max_l1_linf_norm": 0.2092944234609604, + "block7_o_max_spectral_norm": 0.012044540606439114, + "block7_mlp_win_update_fnorm": 0.2794719338417053, + "block7_mlp_win_max_l1_linf_norm": 0.1676587462425232, + "block7_mlp_win_max_spectral_norm": 0.012047728523612022, + "block7_mlp_wout_update_fnorm": 0.23919963836669922, + "block7_mlp_wout_max_l1_linf_norm": 0.39847394824028015, + "block7_mlp_wout_max_spectral_norm": 0.01139539573341608, + "block11_q_update_fnorm": 0.24742408096790314, + "block11_q_max_l1_linf_norm": 0.21264615654945374, + "block11_q_max_spectral_norm": 0.012041348032653332, + "block11_k_update_fnorm": 0.2492498904466629, + "block11_k_max_l1_linf_norm": 0.21631936728954315, + "block11_k_max_spectral_norm": 0.012038526125252247, + "block11_v_update_fnorm": 0.24338825047016144, + "block11_v_max_l1_linf_norm": 0.20445707440376282, + "block11_v_max_spectral_norm": 0.01204091589897871, + "block11_o_update_fnorm": 0.24731780588626862, + "block11_o_max_l1_linf_norm": 0.20822742581367493, + "block11_o_max_spectral_norm": 0.012046167626976967, + "block11_mlp_win_update_fnorm": 0.23401400446891785, + "block11_mlp_win_max_l1_linf_norm": 0.16398197412490845, + "block11_mlp_win_max_spectral_norm": 0.011344260536134243, + "block11_mlp_wout_update_fnorm": 0.24280446767807007, + "block11_mlp_wout_max_l1_linf_norm": 0.4205542206764221, + "block11_mlp_wout_max_spectral_norm": 0.012004346586763859, + "total_sharpness": 0.5091426968574524, + "block_total_sharpness": 0.7111107707023621, + "v_norm_block": 1.7615934610366821, + "v_T_H_v_block": 2.2067272663116455, + "v_norm": 2.224472761154175, + "ip_v_neg_g_hvp": 0.045335553586483, + "cos_v_neg_g_hvp": 0.01932256855070591, + "g_hvp_norm": 1.054743766784668, + "ip_v_neg_g_t": 0.05770479515194893, + "cos_v_neg_g_t": 0.022833358496427536, + "g_t_norm": 1.1360960006713867, + "g_norm": 1.054743766784668, + "hv_norm": 899.9683837890625, + "cos_v_hv": 0.0012584597570821643, + "hg_norm": 1059118.25, + "cos_g_hg": 0.036114972084760666, + "v_parallel_norm": 0.0038923092652112246, + "v_perp_norm": 2.2244691848754883, + "embed_lm_head_v_norm": 1.3583321571350098, + "embed_lm_head_cos_v_neg_g": 0.021771321073174477, + "layer_1_v_norm": 0.47332650423049927, + "layer_1_cos_v_neg_g": 0.0067411502823233604, + "layer_2_v_norm": 0.2766857147216797, + "layer_2_cos_v_neg_g": 0.018178438767790794, + "layer_3_v_norm": 0.28226736187934875, + "layer_3_cos_v_neg_g": 0.03534150868654251, + "layer_4_v_norm": 0.3966064751148224, + "layer_4_cos_v_neg_g": 0.026536019518971443, + "layer_5_v_norm": 0.469023197889328, + "layer_5_cos_v_neg_g": 0.026016071438789368, + "layer_6_v_norm": 0.5295865535736084, + "layer_6_cos_v_neg_g": 0.027614636346697807, + "layer_7_v_norm": 0.5843761563301086, + "layer_7_cos_v_neg_g": 0.025951184332370758, + "layer_8_v_norm": 0.5902321934700012, + "layer_8_cos_v_neg_g": 0.026848526671528816, + "layer_9_v_norm": 0.5894370675086975, + "layer_9_cos_v_neg_g": 0.027659112587571144, + "layer_10_v_norm": 0.5923781394958496, + "layer_10_cos_v_neg_g": 0.03059534542262554, + "layer_11_v_norm": 0.5650453567504883, + "layer_11_cos_v_neg_g": 0.040101077407598495, + "layer_12_v_norm": 0.5980874300003052, + "layer_12_cos_v_neg_g": 0.07293208688497543, + "block0_q_v_norm": 0.23722688853740692, + "block0_q_cos_v_neg_g": 0.021269667893648148, + "block0_k_v_norm": 0.2324756681919098, + "block0_k_cos_v_neg_g": 0.01151476614177227, + "block0_v_v_norm": 0.10695324093103409, + "block0_v_cos_v_neg_g": 0.004867081064730883, + "block0_o_v_norm": 0.16876253485679626, + "block0_o_cos_v_neg_g": 0.019370080903172493, + "block0_mlp_win_v_norm": 0.1926475465297699, + "block0_mlp_win_cos_v_neg_g": 0.014994797296822071, + "block0_mlp_wout_v_norm": 0.19087694585323334, + "block0_mlp_wout_cos_v_neg_g": 0.018958255648612976, + "block3_q_v_norm": 0.038954783231019974, + "block3_q_cos_v_neg_g": 0.06944204866886139, + "block3_k_v_norm": 0.04668223485350609, + "block3_k_cos_v_neg_g": 0.09508489817380905, + "block3_v_v_norm": 0.11816051602363586, + "block3_v_cos_v_neg_g": 0.03161700814962387, + "block3_o_v_norm": 0.1421952098608017, + "block3_o_cos_v_neg_g": 0.030217967927455902, + "block3_mlp_win_v_norm": 0.21486668288707733, + "block3_mlp_win_cos_v_neg_g": 0.022871797904372215, + "block3_mlp_wout_v_norm": 0.2687261998653412, + "block3_mlp_wout_cos_v_neg_g": 0.04381420835852623, + "block7_q_v_norm": 0.23858900368213654, + "block7_q_cos_v_neg_g": 0.031766731292009354, + "block7_k_v_norm": 0.24591487646102905, + "block7_k_cos_v_neg_g": 0.08232762664556503, + "block7_v_v_norm": 0.18554246425628662, + "block7_v_cos_v_neg_g": 0.027972755953669548, + "block7_o_v_norm": 0.24703074991703033, + "block7_o_cos_v_neg_g": 0.08437510579824448, + "block7_mlp_win_v_norm": 0.2794719338417053, + "block7_mlp_win_cos_v_neg_g": 0.030818821862339973, + "block7_mlp_wout_v_norm": 0.23919963836669922, + "block7_mlp_wout_cos_v_neg_g": 0.12534314393997192, + "block11_q_v_norm": 0.24742408096790314, + "block11_q_cos_v_neg_g": 0.07037725299596786, + "block11_k_v_norm": 0.2492498904466629, + "block11_k_cos_v_neg_g": 0.09683530032634735, + "block11_v_v_norm": 0.24338825047016144, + "block11_v_cos_v_neg_g": 0.0455944687128067, + "block11_o_v_norm": 0.24731780588626862, + "block11_o_cos_v_neg_g": 0.07974186539649963, + "block11_mlp_win_v_norm": 0.23401400446891785, + "block11_mlp_win_cos_v_neg_g": 0.10023211687803268, + "block11_mlp_wout_v_norm": 0.24280446767807007, + "block11_mlp_wout_cos_v_neg_g": 0.09011875838041306, + "embed_lm_head_sharpness": 0.011163048446178436, + "layer_1_sharpness": 3.908822774887085, + "layer_2_sharpness": 2.354262590408325, + "layer_3_sharpness": 0.0973777249455452, + "layer_4_sharpness": 0.014033942483365536, + "layer_5_sharpness": 0.0030397872906178236, + "layer_6_sharpness": 0.0018420832930132747, + "layer_7_sharpness": 0.0017085665604099631, + "layer_8_sharpness": 0.0012907333439216018, + "layer_9_sharpness": 0.0009556911536492407, + "layer_10_sharpness": 0.000586180598475039, + "layer_11_sharpness": 0.0006287088035605848, + "layer_12_sharpness": 0.000630230933893472, + "block0_q_sharpness": 0.0003489432274363935, + "block0_k_sharpness": 0.00034302397398278117, + "block0_v_sharpness": 37.46584701538086, + "block0_o_sharpness": 0.6076331734657288, + "block0_mlp_win_sharpness": 0.2033022940158844, + "block0_mlp_wout_sharpness": 0.20558767020702362, + "block3_q_sharpness": 0.0015503568574786186, + "block3_k_sharpness": 0.03144215792417526, + "block3_v_sharpness": 0.030026907101273537, + "block3_o_sharpness": 0.0061674038879573345, + "block3_mlp_win_sharpness": 0.0014350926503539085, + "block3_mlp_wout_sharpness": 0.0019987639971077442, + "block7_q_sharpness": 0.00010244063014397398, + "block7_k_sharpness": 0.00010800622112583369, + "block7_v_sharpness": 0.0034986548125743866, + "block7_o_sharpness": 7.657177047803998e-05, + "block7_mlp_win_sharpness": 0.0005336977774277329, + "block7_mlp_wout_sharpness": 0.00011699277092702687, + "block11_q_sharpness": 7.33117267373018e-05, + "block11_k_sharpness": 4.673775038099848e-05, + "block11_v_sharpness": 0.00010471502901054919, + "block11_o_sharpness": 4.6074412239249796e-05, + "block11_mlp_win_sharpness": 0.0004929733113385737, + "block11_mlp_wout_sharpness": 0.0009077875292859972, + "sum_layer_numerators": 1.069103714399405, + "block_diag_sharpness": 0.34451525680133976, + "cross_layer_sharpness": 0.3665955139010223 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..521b4d01ef32b4b620a9c92b13bdc2033ad829af --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.203331708908081, + "total_l1_linf_norm": 18174.654296875, + "total_spectral_norm": 2.2033321857452393, + "embed_lm_head_update_fnorm": 1.3456779718399048, + "embed_lm_head_max_l1_linf_norm": 0.36586588621139526, + "embed_lm_head_max_spectral_norm": 0.2494615912437439, + "layer_1_update_fnorm": 0.3800741136074066, + "layer_1_max_l1_linf_norm": 0.6827685832977295, + "layer_1_max_spectral_norm": 0.014383211731910706, + "layer_2_update_fnorm": 0.17993636429309845, + "layer_2_max_l1_linf_norm": 0.6179600954055786, + "layer_2_max_spectral_norm": 0.01355454046279192, + "layer_3_update_fnorm": 0.29283004999160767, + "layer_3_max_l1_linf_norm": 0.5848714113235474, + "layer_3_max_spectral_norm": 0.013213487342000008, + "layer_4_update_fnorm": 0.4018252491950989, + "layer_4_max_l1_linf_norm": 0.5609246492385864, + "layer_4_max_spectral_norm": 0.02154366485774517, + "layer_5_update_fnorm": 0.4939477741718292, + "layer_5_max_l1_linf_norm": 0.47645115852355957, + "layer_5_max_spectral_norm": 0.01568322256207466, + "layer_6_update_fnorm": 0.5357856750488281, + "layer_6_max_l1_linf_norm": 0.42847776412963867, + "layer_6_max_spectral_norm": 0.012540092691779137, + "layer_7_update_fnorm": 0.5875978469848633, + "layer_7_max_l1_linf_norm": 0.3940393328666687, + "layer_7_max_spectral_norm": 0.012052157893776894, + "layer_8_update_fnorm": 0.59371018409729, + "layer_8_max_l1_linf_norm": 0.3990545868873596, + "layer_8_max_spectral_norm": 0.012046574614942074, + "layer_9_update_fnorm": 0.5925528407096863, + "layer_9_max_l1_linf_norm": 0.3999589681625366, + "layer_9_max_spectral_norm": 0.012045035138726234, + "layer_10_update_fnorm": 0.5945631265640259, + "layer_10_max_l1_linf_norm": 0.4092007875442505, + "layer_10_max_spectral_norm": 0.012057716958224773, + "layer_11_update_fnorm": 0.5735071897506714, + "layer_11_max_l1_linf_norm": 0.46125608682632446, + "layer_11_max_spectral_norm": 0.012042452581226826, + "layer_12_update_fnorm": 0.5978530645370483, + "layer_12_max_l1_linf_norm": 0.4585931897163391, + "layer_12_max_spectral_norm": 0.012045868672430515, + "block0_q_update_fnorm": 0.2354024201631546, + "block0_q_max_l1_linf_norm": 0.210496187210083, + "block0_q_max_spectral_norm": 0.012040173634886742, + "block0_k_update_fnorm": 0.23381230235099792, + "block0_k_max_l1_linf_norm": 0.20897401869297028, + "block0_k_max_spectral_norm": 0.01203981414437294, + "block0_v_update_fnorm": 0.07745305448770523, + "block0_v_max_l1_linf_norm": 0.11934918165206909, + "block0_v_max_spectral_norm": 0.012017170898616314, + "block0_o_update_fnorm": 0.08613838255405426, + "block0_o_max_l1_linf_norm": 0.11544603854417801, + "block0_o_max_spectral_norm": 0.01202392764389515, + "block0_mlp_win_update_fnorm": 0.08683917671442032, + "block0_mlp_win_max_l1_linf_norm": 0.166123628616333, + "block0_mlp_win_max_spectral_norm": 0.012015708722174168, + "block0_mlp_wout_update_fnorm": 0.11410696059465408, + "block0_mlp_wout_max_l1_linf_norm": 0.18899968266487122, + "block0_mlp_wout_max_spectral_norm": 0.01202445849776268, + "block3_q_update_fnorm": 0.03595208004117012, + "block3_q_max_l1_linf_norm": 0.08149728178977966, + "block3_q_max_spectral_norm": 0.012009945698082447, + "block3_k_update_fnorm": 0.039986491203308105, + "block3_k_max_l1_linf_norm": 0.08907169103622437, + "block3_k_max_spectral_norm": 0.011987699195742607, + "block3_v_update_fnorm": 0.12368878722190857, + "block3_v_max_l1_linf_norm": 0.14100515842437744, + "block3_v_max_spectral_norm": 0.012027830816805363, + "block3_o_update_fnorm": 0.13714461028575897, + "block3_o_max_l1_linf_norm": 0.12181925028562546, + "block3_o_max_spectral_norm": 0.012026745826005936, + "block3_mlp_win_update_fnorm": 0.2180895358324051, + "block3_mlp_win_max_l1_linf_norm": 0.1981661021709442, + "block3_mlp_win_max_spectral_norm": 0.012031959369778633, + "block3_mlp_wout_update_fnorm": 0.2753608524799347, + "block3_mlp_wout_max_l1_linf_norm": 0.446816086769104, + "block3_mlp_wout_max_spectral_norm": 0.01204710453748703, + "block7_q_update_fnorm": 0.23753370344638824, + "block7_q_max_l1_linf_norm": 0.20558494329452515, + "block7_q_max_spectral_norm": 0.012041853740811348, + "block7_k_update_fnorm": 0.24604931473731995, + "block7_k_max_l1_linf_norm": 0.20834973454475403, + "block7_k_max_spectral_norm": 0.01204206608235836, + "block7_v_update_fnorm": 0.19413450360298157, + "block7_v_max_l1_linf_norm": 0.20457340776920319, + "block7_v_max_spectral_norm": 0.012034278362989426, + "block7_o_update_fnorm": 0.24759848415851593, + "block7_o_max_l1_linf_norm": 0.20654025673866272, + "block7_o_max_spectral_norm": 0.012042746879160404, + "block7_mlp_win_update_fnorm": 0.27969837188720703, + "block7_mlp_win_max_l1_linf_norm": 0.16546031832695007, + "block7_mlp_win_max_spectral_norm": 0.012046574614942074, + "block7_mlp_wout_update_fnorm": 0.2410968393087387, + "block7_mlp_wout_max_l1_linf_norm": 0.3990545868873596, + "block7_mlp_wout_max_spectral_norm": 0.011397227644920349, + "block11_q_update_fnorm": 0.24723012745380402, + "block11_q_max_l1_linf_norm": 0.2151413857936859, + "block11_q_max_spectral_norm": 0.012038717977702618, + "block11_k_update_fnorm": 0.2492399513721466, + "block11_k_max_l1_linf_norm": 0.2171875536441803, + "block11_k_max_spectral_norm": 0.012039211578667164, + "block11_v_update_fnorm": 0.24479126930236816, + "block11_v_max_l1_linf_norm": 0.20563355088233948, + "block11_v_max_spectral_norm": 0.012045059353113174, + "block11_o_update_fnorm": 0.24767886102199554, + "block11_o_max_l1_linf_norm": 0.2102203518152237, + "block11_o_max_spectral_norm": 0.012045868672430515, + "block11_mlp_win_update_fnorm": 0.2355295866727829, + "block11_mlp_win_max_l1_linf_norm": 0.16621029376983643, + "block11_mlp_win_max_spectral_norm": 0.011352380737662315, + "block11_mlp_wout_update_fnorm": 0.2392255663871765, + "block11_mlp_wout_max_l1_linf_norm": 0.4102461040019989, + "block11_mlp_wout_max_spectral_norm": 0.01181052066385746, + "total_sharpness": 0.06608875095844269, + "block_total_sharpness": 0.0693107396364212, + "v_norm_block": 1.7446554899215698, + "v_T_H_v_block": 0.21096959710121155, + "v_norm": 2.203331708908081, + "ip_v_neg_g_hvp": 0.03407440334558487, + "cos_v_neg_g_hvp": 0.013794388622045517, + "g_hvp_norm": 1.1211040019989014, + "ip_v_neg_g_t": 0.05067421868443489, + "cos_v_neg_g_t": 0.01889907568693161, + "g_t_norm": 1.216932773590088, + "g_norm": 1.1211040019989014, + "hv_norm": 133.35166931152344, + "cos_v_hv": 0.0010919657070189714, + "hg_norm": 1079646.625, + "cos_g_hg": 0.5469369888305664, + "v_parallel_norm": 0.003494017058983445, + "v_perp_norm": 2.203329086303711, + "embed_lm_head_v_norm": 1.3456779718399048, + "embed_lm_head_cos_v_neg_g": 0.017994632944464684, + "layer_1_v_norm": 0.3800741136074066, + "layer_1_cos_v_neg_g": -0.005871276371181011, + "layer_2_v_norm": 0.17993636429309845, + "layer_2_cos_v_neg_g": 0.00994738470762968, + "layer_3_v_norm": 0.29283007979393005, + "layer_3_cos_v_neg_g": 0.038467686623334885, + "layer_4_v_norm": 0.4018252491950989, + "layer_4_cos_v_neg_g": 0.026663031429052353, + "layer_5_v_norm": 0.4939477741718292, + "layer_5_cos_v_neg_g": 0.02507801726460457, + "layer_6_v_norm": 0.5357856750488281, + "layer_6_cos_v_neg_g": 0.026152372360229492, + "layer_7_v_norm": 0.5875978469848633, + "layer_7_cos_v_neg_g": 0.026229215785861015, + "layer_8_v_norm": 0.5937101244926453, + "layer_8_cos_v_neg_g": 0.027524610981345177, + "layer_9_v_norm": 0.5925528407096863, + "layer_9_cos_v_neg_g": 0.02740461938083172, + "layer_10_v_norm": 0.5945631265640259, + "layer_10_cos_v_neg_g": 0.03088834322988987, + "layer_11_v_norm": 0.5735071897506714, + "layer_11_cos_v_neg_g": 0.03908412531018257, + "layer_12_v_norm": 0.5978530645370483, + "layer_12_cos_v_neg_g": 0.06664726883172989, + "block0_q_v_norm": 0.2354024201631546, + "block0_q_cos_v_neg_g": 0.006145660765469074, + "block0_k_v_norm": 0.23381230235099792, + "block0_k_cos_v_neg_g": 0.006410898175090551, + "block0_v_v_norm": 0.07745305448770523, + "block0_v_cos_v_neg_g": -0.043494850397109985, + "block0_o_v_norm": 0.08613838255405426, + "block0_o_cos_v_neg_g": 0.0013930359855294228, + "block0_mlp_win_v_norm": 0.08683917671442032, + "block0_mlp_win_cos_v_neg_g": 0.004803456831723452, + "block0_mlp_wout_v_norm": 0.11410696059465408, + "block0_mlp_wout_cos_v_neg_g": 0.008689364418387413, + "block3_q_v_norm": 0.03595208004117012, + "block3_q_cos_v_neg_g": 0.015005321241915226, + "block3_k_v_norm": 0.039986491203308105, + "block3_k_cos_v_neg_g": 0.20498324930667877, + "block3_v_v_norm": 0.12368878722190857, + "block3_v_cos_v_neg_g": 0.029432402923703194, + "block3_o_v_norm": 0.13714461028575897, + "block3_o_cos_v_neg_g": 0.028772536665201187, + "block3_mlp_win_v_norm": 0.2180895358324051, + "block3_mlp_win_cos_v_neg_g": 0.027179591357707977, + "block3_mlp_wout_v_norm": 0.2753608524799347, + "block3_mlp_wout_cos_v_neg_g": 0.049622777849435806, + "block7_q_v_norm": 0.23753370344638824, + "block7_q_cos_v_neg_g": 0.031051399186253548, + "block7_k_v_norm": 0.24604931473731995, + "block7_k_cos_v_neg_g": 0.07451777905225754, + "block7_v_v_norm": 0.19413450360298157, + "block7_v_cos_v_neg_g": 0.028665876016020775, + "block7_o_v_norm": 0.24759848415851593, + "block7_o_cos_v_neg_g": 0.08114941418170929, + "block7_mlp_win_v_norm": 0.27969837188720703, + "block7_mlp_win_cos_v_neg_g": 0.03283156082034111, + "block7_mlp_wout_v_norm": 0.2410968393087387, + "block7_mlp_wout_cos_v_neg_g": 0.12060757726430893, + "block11_q_v_norm": 0.24723012745380402, + "block11_q_cos_v_neg_g": 0.0777653232216835, + "block11_k_v_norm": 0.2492399513721466, + "block11_k_cos_v_neg_g": 0.10073190182447433, + "block11_v_v_norm": 0.24479126930236816, + "block11_v_cos_v_neg_g": 0.04681837931275368, + "block11_o_v_norm": 0.24767886102199554, + "block11_o_cos_v_neg_g": 0.07975860685110092, + "block11_mlp_win_v_norm": 0.2355295866727829, + "block11_mlp_win_cos_v_neg_g": 0.08973861485719681, + "block11_mlp_wout_v_norm": 0.2392255663871765, + "block11_mlp_wout_cos_v_neg_g": 0.0691329836845398, + "embed_lm_head_sharpness": 0.007218779064714909, + "layer_1_sharpness": 0.1220577284693718, + "layer_2_sharpness": 0.23156283795833588, + "layer_3_sharpness": 0.693155825138092, + "layer_4_sharpness": 0.005093627143651247, + "layer_5_sharpness": 0.0020383407827466726, + "layer_6_sharpness": 0.0016916695749387145, + "layer_7_sharpness": 0.0013115513138473034, + "layer_8_sharpness": 0.001360007794573903, + "layer_9_sharpness": 0.0009123986237682402, + "layer_10_sharpness": 0.000426066922955215, + "layer_11_sharpness": 0.0005167845520190895, + "layer_12_sharpness": 0.0004063751839566976, + "block0_q_sharpness": 0.005185266025364399, + "block0_k_sharpness": 0.02619638666510582, + "block0_v_sharpness": 0.6290346384048462, + "block0_o_sharpness": 2.4447498321533203, + "block0_mlp_win_sharpness": 0.9079527854919434, + "block0_mlp_wout_sharpness": 0.04762069508433342, + "block3_q_sharpness": 0.0001312330859946087, + "block3_k_sharpness": 0.183595210313797, + "block3_v_sharpness": 0.0007732851663604379, + "block3_o_sharpness": 0.003266487503424287, + "block3_mlp_win_sharpness": 0.0005485582514666021, + "block3_mlp_wout_sharpness": 0.00036686734529212117, + "block7_q_sharpness": 0.00012695520126726478, + "block7_k_sharpness": 0.00010612500773277134, + "block7_v_sharpness": 0.0037196108605712652, + "block7_o_sharpness": 5.792796946479939e-05, + "block7_mlp_win_sharpness": 0.0006690607988275588, + "block7_mlp_wout_sharpness": 9.101810428546742e-05, + "block11_q_sharpness": 0.00016279956616926938, + "block11_k_sharpness": 6.961131293792278e-05, + "block11_v_sharpness": 0.00010553110769251361, + "block11_o_sharpness": 3.3525524486321956e-05, + "block11_mlp_win_sharpness": 0.00036344234831631184, + "block11_mlp_wout_sharpness": 0.00037525343941524625, + "sum_layer_numerators": 0.08809089369419701, + "block_diag_sharpness": 0.028940874717160243, + "cross_layer_sharpness": 0.040369864919260964 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..be444497e32159e9db58c9e77b368700101a43ea --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_8500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.6394689083099365, + "total_l1_linf_norm": 13629.015625, + "total_spectral_norm": 1.639468789100647, + "embed_lm_head_update_fnorm": 0.9851737022399902, + "embed_lm_head_max_l1_linf_norm": 0.3140864968299866, + "embed_lm_head_max_spectral_norm": 0.2963910698890686, + "layer_1_update_fnorm": 0.33458587527275085, + "layer_1_max_l1_linf_norm": 0.3607509136199951, + "layer_1_max_spectral_norm": 0.009035575203597546, + "layer_2_update_fnorm": 0.18552735447883606, + "layer_2_max_l1_linf_norm": 0.4606139063835144, + "layer_2_max_spectral_norm": 0.010208941996097565, + "layer_3_update_fnorm": 0.22898636758327484, + "layer_3_max_l1_linf_norm": 0.4473346471786499, + "layer_3_max_spectral_norm": 0.009908976964652538, + "layer_4_update_fnorm": 0.2926810681819916, + "layer_4_max_l1_linf_norm": 0.44395291805267334, + "layer_4_max_spectral_norm": 0.015587447211146355, + "layer_5_update_fnorm": 0.3448280990123749, + "layer_5_max_l1_linf_norm": 0.42220234870910645, + "layer_5_max_spectral_norm": 0.013345121406018734, + "layer_6_update_fnorm": 0.39621609449386597, + "layer_6_max_l1_linf_norm": 0.3802202045917511, + "layer_6_max_spectral_norm": 0.009034719318151474, + "layer_7_update_fnorm": 0.4363807141780853, + "layer_7_max_l1_linf_norm": 0.3327980935573578, + "layer_7_max_spectral_norm": 0.009038732387125492, + "layer_8_update_fnorm": 0.4388691782951355, + "layer_8_max_l1_linf_norm": 0.3381226062774658, + "layer_8_max_spectral_norm": 0.00904158130288124, + "layer_9_update_fnorm": 0.4406437873840332, + "layer_9_max_l1_linf_norm": 0.3589959144592285, + "layer_9_max_spectral_norm": 0.00904081016778946, + "layer_10_update_fnorm": 0.44348034262657166, + "layer_10_max_l1_linf_norm": 0.3705306947231293, + "layer_10_max_spectral_norm": 0.009043924510478973, + "layer_11_update_fnorm": 0.4254898130893707, + "layer_11_max_l1_linf_norm": 0.3962949514389038, + "layer_11_max_spectral_norm": 0.009039292111992836, + "layer_12_update_fnorm": 0.449460506439209, + "layer_12_max_l1_linf_norm": 0.3951180577278137, + "layer_12_max_spectral_norm": 0.009039301425218582, + "block0_q_update_fnorm": 0.17383161187171936, + "block0_q_max_l1_linf_norm": 0.15651163458824158, + "block0_q_max_spectral_norm": 0.009035266935825348, + "block0_k_update_fnorm": 0.1734943836927414, + "block0_k_max_l1_linf_norm": 0.15330800414085388, + "block0_k_max_spectral_norm": 0.009035575203597546, + "block0_v_update_fnorm": 0.09318229556083679, + "block0_v_max_l1_linf_norm": 0.10842305421829224, + "block0_v_max_spectral_norm": 0.009026464074850082, + "block0_o_update_fnorm": 0.1167982667684555, + "block0_o_max_l1_linf_norm": 0.1207883358001709, + "block0_o_max_spectral_norm": 0.00902685895562172, + "block0_mlp_win_update_fnorm": 0.11859101057052612, + "block0_mlp_win_max_l1_linf_norm": 0.1318425089120865, + "block0_mlp_win_max_spectral_norm": 0.009024666622281075, + "block0_mlp_wout_update_fnorm": 0.1229277178645134, + "block0_mlp_wout_max_l1_linf_norm": 0.1950974315404892, + "block0_mlp_wout_max_spectral_norm": 0.009027635678648949, + "block3_q_update_fnorm": 0.025207890197634697, + "block3_q_max_l1_linf_norm": 0.07276906073093414, + "block3_q_max_spectral_norm": 0.008770687505602837, + "block3_k_update_fnorm": 0.03258237987756729, + "block3_k_max_l1_linf_norm": 0.07094652950763702, + "block3_k_max_spectral_norm": 0.009025276638567448, + "block3_v_update_fnorm": 0.09519261121749878, + "block3_v_max_l1_linf_norm": 0.10873662680387497, + "block3_v_max_spectral_norm": 0.009027060121297836, + "block3_o_update_fnorm": 0.1066817194223404, + "block3_o_max_l1_linf_norm": 0.09156528115272522, + "block3_o_max_spectral_norm": 0.009026303887367249, + "block3_mlp_win_update_fnorm": 0.15381188690662384, + "block3_mlp_win_max_l1_linf_norm": 0.13864369690418243, + "block3_mlp_win_max_spectral_norm": 0.009028770960867405, + "block3_mlp_wout_update_fnorm": 0.19814006984233856, + "block3_mlp_wout_max_l1_linf_norm": 0.32008278369903564, + "block3_mlp_wout_max_spectral_norm": 0.009033378213644028, + "block7_q_update_fnorm": 0.17563967406749725, + "block7_q_max_l1_linf_norm": 0.15543153882026672, + "block7_q_max_spectral_norm": 0.00903920829296112, + "block7_k_update_fnorm": 0.18377098441123962, + "block7_k_max_l1_linf_norm": 0.15498924255371094, + "block7_k_max_spectral_norm": 0.009036652743816376, + "block7_v_update_fnorm": 0.13538068532943726, + "block7_v_max_l1_linf_norm": 0.14925149083137512, + "block7_v_max_spectral_norm": 0.009030058048665524, + "block7_o_update_fnorm": 0.18541406095027924, + "block7_o_max_l1_linf_norm": 0.15826916694641113, + "block7_o_max_spectral_norm": 0.00904158130288124, + "block7_mlp_win_update_fnorm": 0.20769193768501282, + "block7_mlp_win_max_l1_linf_norm": 0.12696200609207153, + "block7_mlp_win_max_spectral_norm": 0.00903589092195034, + "block7_mlp_wout_update_fnorm": 0.17890894412994385, + "block7_mlp_wout_max_l1_linf_norm": 0.29738858342170715, + "block7_mlp_wout_max_spectral_norm": 0.008549056015908718, + "block11_q_update_fnorm": 0.18566720187664032, + "block11_q_max_l1_linf_norm": 0.16164112091064453, + "block11_q_max_spectral_norm": 0.00903711561113596, + "block11_k_update_fnorm": 0.18706762790679932, + "block11_k_max_l1_linf_norm": 0.16581371426582336, + "block11_k_max_spectral_norm": 0.009034178219735622, + "block11_v_update_fnorm": 0.18298004567623138, + "block11_v_max_l1_linf_norm": 0.15517620742321014, + "block11_v_max_spectral_norm": 0.009037597104907036, + "block11_o_update_fnorm": 0.18581229448318481, + "block11_o_max_l1_linf_norm": 0.15531745553016663, + "block11_o_max_spectral_norm": 0.009039301425218582, + "block11_mlp_win_update_fnorm": 0.17596684396266937, + "block11_mlp_win_max_l1_linf_norm": 0.12745898962020874, + "block11_mlp_win_max_spectral_norm": 0.008524304255843163, + "block11_mlp_wout_update_fnorm": 0.18282663822174072, + "block11_mlp_wout_max_l1_linf_norm": 0.3201632797718048, + "block11_mlp_wout_max_spectral_norm": 0.009029312059283257, + "total_sharpness": -0.028216201812028885, + "block_total_sharpness": -0.039873406291007996, + "v_norm_block": 1.3104546070098877, + "v_T_H_v_block": -0.06847424805164337, + "v_norm": 1.6394689083099365, + "ip_v_neg_g_hvp": 0.02420896105468273, + "cos_v_neg_g_hvp": 0.01272149384021759, + "g_hvp_norm": 1.160739779472351, + "ip_v_neg_g_t": 0.039516400545835495, + "cos_v_neg_g_t": 0.026221295818686485, + "g_t_norm": 0.9192212224006653, + "g_norm": 1.160739779472351, + "hv_norm": 41.22229766845703, + "cos_v_hv": -0.0011221981840208173, + "hg_norm": 100742.6953125, + "cos_g_hg": 0.4677945375442505, + "v_parallel_norm": 0.0024426402524113655, + "v_perp_norm": 1.6394671201705933, + "embed_lm_head_v_norm": 0.9851737022399902, + "embed_lm_head_cos_v_neg_g": 0.012894146144390106, + "layer_1_v_norm": 0.33458587527275085, + "layer_1_cos_v_neg_g": -0.005047732498496771, + "layer_2_v_norm": 0.18552735447883606, + "layer_2_cos_v_neg_g": -0.0032777816522866488, + "layer_3_v_norm": 0.22898636758327484, + "layer_3_cos_v_neg_g": 0.018478315323591232, + "layer_4_v_norm": 0.2926810681819916, + "layer_4_cos_v_neg_g": 0.026666974648833275, + "layer_5_v_norm": 0.3448280990123749, + "layer_5_cos_v_neg_g": 0.026958337053656578, + "layer_6_v_norm": 0.39621609449386597, + "layer_6_cos_v_neg_g": 0.026064973324537277, + "layer_7_v_norm": 0.4363807141780853, + "layer_7_cos_v_neg_g": 0.025642313063144684, + "layer_8_v_norm": 0.4388691782951355, + "layer_8_cos_v_neg_g": 0.025355901569128036, + "layer_9_v_norm": 0.4406437873840332, + "layer_9_cos_v_neg_g": 0.0276471059769392, + "layer_10_v_norm": 0.44348034262657166, + "layer_10_cos_v_neg_g": 0.028076419606804848, + "layer_11_v_norm": 0.4254898130893707, + "layer_11_cos_v_neg_g": 0.03658823296427727, + "layer_12_v_norm": 0.449460506439209, + "layer_12_cos_v_neg_g": 0.0695837065577507, + "block0_q_v_norm": 0.17383161187171936, + "block0_q_cos_v_neg_g": 0.002650992013514042, + "block0_k_v_norm": 0.1734943836927414, + "block0_k_cos_v_neg_g": 0.004633239936083555, + "block0_v_v_norm": 0.09318229556083679, + "block0_v_cos_v_neg_g": -0.016732199117541313, + "block0_o_v_norm": 0.1167982667684555, + "block0_o_cos_v_neg_g": -0.006984137464314699, + "block0_mlp_win_v_norm": 0.11859101057052612, + "block0_mlp_win_cos_v_neg_g": -0.005692648235708475, + "block0_mlp_wout_v_norm": 0.1229277178645134, + "block0_mlp_wout_cos_v_neg_g": 0.008212643675506115, + "block3_q_v_norm": 0.025207890197634697, + "block3_q_cos_v_neg_g": 0.011264592409133911, + "block3_k_v_norm": 0.03258237987756729, + "block3_k_cos_v_neg_g": 0.06331811100244522, + "block3_v_v_norm": 0.09519261121749878, + "block3_v_cos_v_neg_g": 0.032360076904296875, + "block3_o_v_norm": 0.1066817194223404, + "block3_o_cos_v_neg_g": 0.028131192550063133, + "block3_mlp_win_v_norm": 0.15381188690662384, + "block3_mlp_win_cos_v_neg_g": 0.02665451541543007, + "block3_mlp_wout_v_norm": 0.19814006984233856, + "block3_mlp_wout_cos_v_neg_g": 0.04695802927017212, + "block7_q_v_norm": 0.17563967406749725, + "block7_q_cos_v_neg_g": 0.03256194293498993, + "block7_k_v_norm": 0.18377098441123962, + "block7_k_cos_v_neg_g": 0.08155108988285065, + "block7_v_v_norm": 0.13538068532943726, + "block7_v_cos_v_neg_g": 0.02886394038796425, + "block7_o_v_norm": 0.18541406095027924, + "block7_o_cos_v_neg_g": 0.08221634477376938, + "block7_mlp_win_v_norm": 0.20769193768501282, + "block7_mlp_win_cos_v_neg_g": 0.027457905933260918, + "block7_mlp_wout_v_norm": 0.17890894412994385, + "block7_mlp_wout_cos_v_neg_g": 0.12228483706712723, + "block11_q_v_norm": 0.18566720187664032, + "block11_q_cos_v_neg_g": 0.07118979841470718, + "block11_k_v_norm": 0.18706762790679932, + "block11_k_cos_v_neg_g": 0.09627260267734528, + "block11_v_v_norm": 0.18298004567623138, + "block11_v_cos_v_neg_g": 0.04363812878727913, + "block11_o_v_norm": 0.18581229448318481, + "block11_o_cos_v_neg_g": 0.07673488557338715, + "block11_mlp_win_v_norm": 0.17596684396266937, + "block11_mlp_win_cos_v_neg_g": 0.08908389508724213, + "block11_mlp_wout_v_norm": 0.18282663822174072, + "block11_mlp_wout_cos_v_neg_g": 0.08406692743301392, + "embed_lm_head_sharpness": 0.000754724838770926, + "layer_1_sharpness": -0.4144788682460785, + "layer_2_sharpness": -0.3346414268016815, + "layer_3_sharpness": 0.006715720985084772, + "layer_4_sharpness": -0.0006403781590051949, + "layer_5_sharpness": 0.0023653064854443073, + "layer_6_sharpness": 0.0022733472287654877, + "layer_7_sharpness": 0.0016918055480346084, + "layer_8_sharpness": 0.001734451623633504, + "layer_9_sharpness": 0.0013763131573796272, + "layer_10_sharpness": 0.0006175215821713209, + "layer_11_sharpness": 0.0005538584082387388, + "layer_12_sharpness": 0.0004811794206034392, + "block0_q_sharpness": 0.00019811757374554873, + "block0_k_sharpness": 0.0004449091211427003, + "block0_v_sharpness": -0.8947863578796387, + "block0_o_sharpness": -0.9751696586608887, + "block0_mlp_win_sharpness": -0.05869601294398308, + "block0_mlp_wout_sharpness": -0.0450044684112072, + "block3_q_sharpness": 0.0002483549469616264, + "block3_k_sharpness": 0.006024574860930443, + "block3_v_sharpness": 0.004857377614825964, + "block3_o_sharpness": 0.0017351510468870401, + "block3_mlp_win_sharpness": 0.00033561381860636175, + "block3_mlp_wout_sharpness": 0.0009707043063826859, + "block7_q_sharpness": 0.00010257600661134347, + "block7_k_sharpness": 0.00011692594853229821, + "block7_v_sharpness": 0.004798029083758593, + "block7_o_sharpness": 8.546451135771349e-05, + "block7_mlp_win_sharpness": 0.0008179377764463425, + "block7_mlp_wout_sharpness": 0.00013204275455791503, + "block11_q_sharpness": 8.0562946095597e-05, + "block11_k_sharpness": 6.311378092505038e-05, + "block11_v_sharpness": 9.55135838012211e-05, + "block11_o_sharpness": 3.661197843030095e-05, + "block11_mlp_win_sharpness": 0.00042866644798778, + "block11_mlp_wout_sharpness": 0.0005744807422161102, + "sum_layer_numerators": -0.055740637665141535, + "block_diag_sharpness": -0.0324584643331046, + "cross_layer_sharpness": -0.007414941957903397 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..d3091375a82e144fbcd1b5a7b109530b46cd2bd5 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.096049427986145, + "total_l1_linf_norm": 9097.8896484375, + "total_spectral_norm": 1.0960493087768555, + "embed_lm_head_update_fnorm": 0.6630154848098755, + "embed_lm_head_max_l1_linf_norm": 0.1866961419582367, + "embed_lm_head_max_spectral_norm": 0.10438515245914459, + "layer_1_update_fnorm": 0.20902946591377258, + "layer_1_max_l1_linf_norm": 0.18454602360725403, + "layer_1_max_spectral_norm": 0.006026541348546743, + "layer_2_update_fnorm": 0.11472605913877487, + "layer_2_max_l1_linf_norm": 0.22404204308986664, + "layer_2_max_spectral_norm": 0.006018905434757471, + "layer_3_update_fnorm": 0.1619275063276291, + "layer_3_max_l1_linf_norm": 0.2464803159236908, + "layer_3_max_spectral_norm": 0.006020824890583754, + "layer_4_update_fnorm": 0.19798657298088074, + "layer_4_max_l1_linf_norm": 0.2471800297498703, + "layer_4_max_spectral_norm": 0.008836339227855206, + "layer_5_update_fnorm": 0.22152847051620483, + "layer_5_max_l1_linf_norm": 0.25623172521591187, + "layer_5_max_spectral_norm": 0.009878724813461304, + "layer_6_update_fnorm": 0.26894405484199524, + "layer_6_max_l1_linf_norm": 0.22960175573825836, + "layer_6_max_spectral_norm": 0.006025819573551416, + "layer_7_update_fnorm": 0.29179254174232483, + "layer_7_max_l1_linf_norm": 0.20979043841362, + "layer_7_max_spectral_norm": 0.006029069423675537, + "layer_8_update_fnorm": 0.29457545280456543, + "layer_8_max_l1_linf_norm": 0.20046588778495789, + "layer_8_max_spectral_norm": 0.006031963974237442, + "layer_9_update_fnorm": 0.29518038034439087, + "layer_9_max_l1_linf_norm": 0.20766060054302216, + "layer_9_max_spectral_norm": 0.006029430311173201, + "layer_10_update_fnorm": 0.297189325094223, + "layer_10_max_l1_linf_norm": 0.23397164046764374, + "layer_10_max_spectral_norm": 0.006030569318681955, + "layer_11_update_fnorm": 0.2844969630241394, + "layer_11_max_l1_linf_norm": 0.25031617283821106, + "layer_11_max_spectral_norm": 0.006028282456099987, + "layer_12_update_fnorm": 0.2995750606060028, + "layer_12_max_l1_linf_norm": 0.2517451345920563, + "layer_12_max_spectral_norm": 0.00602748291566968, + "block0_q_update_fnorm": 0.11617391556501389, + "block0_q_max_l1_linf_norm": 0.10578910261392593, + "block0_q_max_spectral_norm": 0.006025815382599831, + "block0_k_update_fnorm": 0.11573293060064316, + "block0_k_max_l1_linf_norm": 0.10452383756637573, + "block0_k_max_spectral_norm": 0.006026541348546743, + "block0_v_update_fnorm": 0.060773253440856934, + "block0_v_max_l1_linf_norm": 0.07072901725769043, + "block0_v_max_spectral_norm": 0.006019910331815481, + "block0_o_update_fnorm": 0.052483540028333664, + "block0_o_max_l1_linf_norm": 0.06764278560876846, + "block0_o_max_spectral_norm": 0.006017743609845638, + "block0_mlp_win_update_fnorm": 0.05954635515809059, + "block0_mlp_win_max_l1_linf_norm": 0.08947654068470001, + "block0_mlp_win_max_spectral_norm": 0.006017869338393211, + "block0_mlp_wout_update_fnorm": 0.08230102807283401, + "block0_mlp_wout_max_l1_linf_norm": 0.13006514310836792, + "block0_mlp_wout_max_spectral_norm": 0.0060205888003110886, + "block3_q_update_fnorm": 0.01813814602792263, + "block3_q_max_l1_linf_norm": 0.04255874454975128, + "block3_q_max_spectral_norm": 0.005830614361912012, + "block3_k_update_fnorm": 0.019036225974559784, + "block3_k_max_l1_linf_norm": 0.04105827212333679, + "block3_k_max_spectral_norm": 0.005917774047702551, + "block3_v_update_fnorm": 0.060792747884988785, + "block3_v_max_l1_linf_norm": 0.07468363642692566, + "block3_v_max_spectral_norm": 0.006018931046128273, + "block3_o_update_fnorm": 0.06971707940101624, + "block3_o_max_l1_linf_norm": 0.0596313551068306, + "block3_o_max_spectral_norm": 0.00601907167583704, + "block3_mlp_win_update_fnorm": 0.10556201636791229, + "block3_mlp_win_max_l1_linf_norm": 0.08613044768571854, + "block3_mlp_win_max_spectral_norm": 0.006021519657224417, + "block3_mlp_wout_update_fnorm": 0.13641563057899475, + "block3_mlp_wout_max_l1_linf_norm": 0.22047002613544464, + "block3_mlp_wout_max_spectral_norm": 0.006027433089911938, + "block7_q_update_fnorm": 0.11868679523468018, + "block7_q_max_l1_linf_norm": 0.10407260805368423, + "block7_q_max_spectral_norm": 0.0060274964198470116, + "block7_k_update_fnorm": 0.12293577939271927, + "block7_k_max_l1_linf_norm": 0.10450849682092667, + "block7_k_max_spectral_norm": 0.006026747170835733, + "block7_v_update_fnorm": 0.08928278088569641, + "block7_v_max_l1_linf_norm": 0.09972776472568512, + "block7_v_max_spectral_norm": 0.00602108845487237, + "block7_o_update_fnorm": 0.12396139651536942, + "block7_o_max_l1_linf_norm": 0.10380413383245468, + "block7_o_max_spectral_norm": 0.006028522737324238, + "block7_mlp_win_update_fnorm": 0.1401619017124176, + "block7_mlp_win_max_l1_linf_norm": 0.08343502879142761, + "block7_mlp_win_max_spectral_norm": 0.006031963974237442, + "block7_mlp_wout_update_fnorm": 0.12059096992015839, + "block7_mlp_wout_max_l1_linf_norm": 0.20046588778495789, + "block7_mlp_wout_max_spectral_norm": 0.0057068788446486, + "block11_q_update_fnorm": 0.12363740056753159, + "block11_q_max_l1_linf_norm": 0.1082853153347969, + "block11_q_max_spectral_norm": 0.006026481278240681, + "block11_k_update_fnorm": 0.12484825402498245, + "block11_k_max_l1_linf_norm": 0.10893359035253525, + "block11_k_max_spectral_norm": 0.00602748291566968, + "block11_v_update_fnorm": 0.12204679101705551, + "block11_v_max_l1_linf_norm": 0.10401396453380585, + "block11_v_max_spectral_norm": 0.00602738605812192, + "block11_o_update_fnorm": 0.12394791841506958, + "block11_o_max_l1_linf_norm": 0.10393193364143372, + "block11_o_max_spectral_norm": 0.006025009322911501, + "block11_mlp_win_update_fnorm": 0.11742974072694778, + "block11_mlp_win_max_l1_linf_norm": 0.08048906177282333, + "block11_mlp_win_max_spectral_norm": 0.005680039990693331, + "block11_mlp_wout_update_fnorm": 0.1214897483587265, + "block11_mlp_wout_max_l1_linf_norm": 0.2283966988325119, + "block11_mlp_wout_max_spectral_norm": 0.006022107787430286, + "total_sharpness": 0.00610392726957798, + "block_total_sharpness": 0.009725422598421574, + "v_norm_block": 0.8727740049362183, + "v_T_H_v_block": 0.007408189587295055, + "v_norm": 1.096049427986145, + "ip_v_neg_g_hvp": 0.014869926497340202, + "cos_v_neg_g_hvp": 0.022921251133084297, + "g_hvp_norm": 0.5918891429901123, + "ip_v_neg_g_t": 0.02208159677684307, + "cos_v_neg_g_t": 0.02759973332285881, + "g_t_norm": 0.729953944683075, + "g_norm": 0.5918891429901123, + "hv_norm": 4.810882568359375, + "cos_v_hv": 0.0013906400417909026, + "hg_norm": 45380.81640625, + "cos_g_hg": -0.19105570018291473, + "v_parallel_norm": 0.002610293682664633, + "v_perp_norm": 1.0960463285446167, + "embed_lm_head_v_norm": 0.6630154848098755, + "embed_lm_head_cos_v_neg_g": 0.04063308238983154, + "layer_1_v_norm": 0.20902946591377258, + "layer_1_cos_v_neg_g": 0.007266509812325239, + "layer_2_v_norm": 0.11472605913877487, + "layer_2_cos_v_neg_g": 0.01375350821763277, + "layer_3_v_norm": 0.1619275063276291, + "layer_3_cos_v_neg_g": 0.015107116661965847, + "layer_4_v_norm": 0.19798657298088074, + "layer_4_cos_v_neg_g": 0.015362384729087353, + "layer_5_v_norm": 0.22152847051620483, + "layer_5_cos_v_neg_g": 0.020843788981437683, + "layer_6_v_norm": 0.2689440846443176, + "layer_6_cos_v_neg_g": 0.02407945692539215, + "layer_7_v_norm": 0.29179254174232483, + "layer_7_cos_v_neg_g": 0.024791525676846504, + "layer_8_v_norm": 0.29457545280456543, + "layer_8_cos_v_neg_g": 0.022088434547185898, + "layer_9_v_norm": 0.29518038034439087, + "layer_9_cos_v_neg_g": 0.023686671629548073, + "layer_10_v_norm": 0.297189325094223, + "layer_10_cos_v_neg_g": 0.02752755582332611, + "layer_11_v_norm": 0.2844969630241394, + "layer_11_cos_v_neg_g": 0.03738071396946907, + "layer_12_v_norm": 0.2995750606060028, + "layer_12_cos_v_neg_g": 0.07153933495283127, + "block0_q_v_norm": 0.11617391556501389, + "block0_q_cos_v_neg_g": 0.00748310936614871, + "block0_k_v_norm": 0.11573293060064316, + "block0_k_cos_v_neg_g": 0.006644373293966055, + "block0_v_v_norm": 0.060773253440856934, + "block0_v_cos_v_neg_g": 0.006972523871809244, + "block0_o_v_norm": 0.052483540028333664, + "block0_o_cos_v_neg_g": 0.015222136862576008, + "block0_mlp_win_v_norm": 0.05954635515809059, + "block0_mlp_win_cos_v_neg_g": 0.02021295577287674, + "block0_mlp_wout_v_norm": 0.08230102807283401, + "block0_mlp_wout_cos_v_neg_g": 0.02124076895415783, + "block3_q_v_norm": 0.01813814602792263, + "block3_q_cos_v_neg_g": 0.009789916686713696, + "block3_k_v_norm": 0.019036225974559784, + "block3_k_cos_v_neg_g": 0.08930793404579163, + "block3_v_v_norm": 0.060792747884988785, + "block3_v_cos_v_neg_g": 0.009085020050406456, + "block3_o_v_norm": 0.06971707940101624, + "block3_o_cos_v_neg_g": 0.01870155893266201, + "block3_mlp_win_v_norm": 0.10556201636791229, + "block3_mlp_win_cos_v_neg_g": 0.013475255109369755, + "block3_mlp_wout_v_norm": 0.13641563057899475, + "block3_mlp_wout_cos_v_neg_g": 0.03521115705370903, + "block7_q_v_norm": 0.11868679523468018, + "block7_q_cos_v_neg_g": 0.027977796271443367, + "block7_k_v_norm": 0.12293577939271927, + "block7_k_cos_v_neg_g": 0.07087183743715286, + "block7_v_v_norm": 0.08928278088569641, + "block7_v_cos_v_neg_g": 0.022688305005431175, + "block7_o_v_norm": 0.12396139651536942, + "block7_o_cos_v_neg_g": 0.0764741450548172, + "block7_mlp_win_v_norm": 0.1401619017124176, + "block7_mlp_win_cos_v_neg_g": 0.023990698158740997, + "block7_mlp_wout_v_norm": 0.12059096992015839, + "block7_mlp_wout_cos_v_neg_g": 0.10973881185054779, + "block11_q_v_norm": 0.12363740056753159, + "block11_q_cos_v_neg_g": 0.07630925625562668, + "block11_k_v_norm": 0.12484825402498245, + "block11_k_cos_v_neg_g": 0.09883487224578857, + "block11_v_v_norm": 0.12204679101705551, + "block11_v_cos_v_neg_g": 0.044324979186058044, + "block11_o_v_norm": 0.12394791841506958, + "block11_o_cos_v_neg_g": 0.08334420621395111, + "block11_mlp_win_v_norm": 0.11742974072694778, + "block11_mlp_win_cos_v_neg_g": 0.09103534370660782, + "block11_mlp_wout_v_norm": 0.1214897483587265, + "block11_mlp_wout_cos_v_neg_g": 0.0846497192978859, + "embed_lm_head_sharpness": -0.0015674624592065811, + "layer_1_sharpness": -0.0008086609886959195, + "layer_2_sharpness": 0.001144407782703638, + "layer_3_sharpness": 0.008154172450304031, + "layer_4_sharpness": 0.003603300778195262, + "layer_5_sharpness": 0.00276017002761364, + "layer_6_sharpness": 0.00220468663610518, + "layer_7_sharpness": 0.0022232243791222572, + "layer_8_sharpness": 0.001846302067860961, + "layer_9_sharpness": 0.001093864324502647, + "layer_10_sharpness": 0.0005940275732427835, + "layer_11_sharpness": 0.0006979177123866975, + "layer_12_sharpness": 0.001252199406735599, + "block0_q_sharpness": -3.124885552097112e-05, + "block0_k_sharpness": -1.1678152077365667e-05, + "block0_v_sharpness": 0.01972810924053192, + "block0_o_sharpness": 0.009779242798686028, + "block0_mlp_win_sharpness": -0.053895577788352966, + "block0_mlp_wout_sharpness": 0.0023814504966139793, + "block3_q_sharpness": 3.6897967220284045e-05, + "block3_k_sharpness": 0.013360363431274891, + "block3_v_sharpness": 0.0048992387019097805, + "block3_o_sharpness": 0.003368926467373967, + "block3_mlp_win_sharpness": 0.000606188434176147, + "block3_mlp_wout_sharpness": 0.00039622155600227416, + "block7_q_sharpness": 0.00015179556794464588, + "block7_k_sharpness": 0.00012823272845707834, + "block7_v_sharpness": 0.006098721642047167, + "block7_o_sharpness": 8.244358468800783e-05, + "block7_mlp_win_sharpness": 0.0006745772552676499, + "block7_mlp_wout_sharpness": 0.00010263656440656632, + "block11_q_sharpness": 0.00037740703555755317, + "block11_k_sharpness": 8.647235517855734e-05, + "block11_v_sharpness": 0.00012137801968492568, + "block11_o_sharpness": 5.295013761497103e-05, + "block11_mlp_win_sharpness": 0.0006137397140264511, + "block11_mlp_wout_sharpness": 0.0021761890966445208, + "sum_layer_numerators": 0.0012958497156337504, + "block_diag_sharpness": 0.0017011829940741976, + "cross_layer_sharpness": 0.008024239604347377 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..447832eed917179bf62ee033f8416d89ada275d9 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/sharpness_step_9500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.5522695183753967, + "total_l1_linf_norm": 4600.10791015625, + "total_spectral_norm": 0.5522695183753967, + "embed_lm_head_update_fnorm": 0.332337349653244, + "embed_lm_head_max_l1_linf_norm": 0.08719031512737274, + "embed_lm_head_max_spectral_norm": 0.053474172949790955, + "layer_1_update_fnorm": 0.12333083897829056, + "layer_1_max_l1_linf_norm": 0.117171511054039, + "layer_1_max_spectral_norm": 0.003016251605004072, + "layer_2_update_fnorm": 0.06476955860853195, + "layer_2_max_l1_linf_norm": 0.1307593733072281, + "layer_2_max_spectral_norm": 0.003012667642906308, + "layer_3_update_fnorm": 0.0749589130282402, + "layer_3_max_l1_linf_norm": 0.1444588005542755, + "layer_3_max_spectral_norm": 0.0032309195958077908, + "layer_4_update_fnorm": 0.09743718057870865, + "layer_4_max_l1_linf_norm": 0.1462017297744751, + "layer_4_max_spectral_norm": 0.004655182361602783, + "layer_5_update_fnorm": 0.1081397533416748, + "layer_5_max_l1_linf_norm": 0.13848865032196045, + "layer_5_max_spectral_norm": 0.004466962534934282, + "layer_6_update_fnorm": 0.13367566466331482, + "layer_6_max_l1_linf_norm": 0.13819536566734314, + "layer_6_max_spectral_norm": 0.0030516821425408125, + "layer_7_update_fnorm": 0.14655132591724396, + "layer_7_max_l1_linf_norm": 0.1222086176276207, + "layer_7_max_spectral_norm": 0.0030168271623551846, + "layer_8_update_fnorm": 0.14849506318569183, + "layer_8_max_l1_linf_norm": 0.12416932731866837, + "layer_8_max_spectral_norm": 0.0030180858448147774, + "layer_9_update_fnorm": 0.1482086330652237, + "layer_9_max_l1_linf_norm": 0.1305292248725891, + "layer_9_max_spectral_norm": 0.0030172551050782204, + "layer_10_update_fnorm": 0.1487736999988556, + "layer_10_max_l1_linf_norm": 0.1420026421546936, + "layer_10_max_spectral_norm": 0.0031532219145447016, + "layer_11_update_fnorm": 0.14256082475185394, + "layer_11_max_l1_linf_norm": 0.14521156251430511, + "layer_11_max_spectral_norm": 0.0032028413843363523, + "layer_12_update_fnorm": 0.15006862580776215, + "layer_12_max_l1_linf_norm": 0.14579170942306519, + "layer_12_max_spectral_norm": 0.0032315340358763933, + "block0_q_update_fnorm": 0.057877589017152786, + "block0_q_max_l1_linf_norm": 0.052783191204071045, + "block0_q_max_spectral_norm": 0.003016251605004072, + "block0_k_update_fnorm": 0.05576157569885254, + "block0_k_max_l1_linf_norm": 0.05239693820476532, + "block0_k_max_spectral_norm": 0.00301475846208632, + "block0_v_update_fnorm": 0.04547085985541344, + "block0_v_max_l1_linf_norm": 0.04504972696304321, + "block0_v_max_spectral_norm": 0.0030137086287140846, + "block0_o_update_fnorm": 0.048235710710287094, + "block0_o_max_l1_linf_norm": 0.045433126389980316, + "block0_o_max_spectral_norm": 0.003013996174558997, + "block0_mlp_win_update_fnorm": 0.048736296594142914, + "block0_mlp_win_max_l1_linf_norm": 0.04743240773677826, + "block0_mlp_win_max_spectral_norm": 0.0030132972169667482, + "block0_mlp_wout_update_fnorm": 0.0443493090569973, + "block0_mlp_wout_max_l1_linf_norm": 0.07085030525922775, + "block0_mlp_wout_max_spectral_norm": 0.0030129975639283657, + "block3_q_update_fnorm": 0.00830831564962864, + "block3_q_max_l1_linf_norm": 0.018358375877141953, + "block3_q_max_spectral_norm": 0.002773904474452138, + "block3_k_update_fnorm": 0.00878625363111496, + "block3_k_max_l1_linf_norm": 0.020666303113102913, + "block3_k_max_spectral_norm": 0.002996803494170308, + "block3_v_update_fnorm": 0.0281137116253376, + "block3_v_max_l1_linf_norm": 0.03461199998855591, + "block3_v_max_spectral_norm": 0.0030119388829916716, + "block3_o_update_fnorm": 0.03353492543101311, + "block3_o_max_l1_linf_norm": 0.028775257989764214, + "block3_o_max_spectral_norm": 0.0030127898789942265, + "block3_mlp_win_update_fnorm": 0.052861228585243225, + "block3_mlp_win_max_l1_linf_norm": 0.044974759221076965, + "block3_mlp_win_max_spectral_norm": 0.0030134483240544796, + "block3_mlp_wout_update_fnorm": 0.06766853481531143, + "block3_mlp_wout_max_l1_linf_norm": 0.10997422784566879, + "block3_mlp_wout_max_spectral_norm": 0.0030173903796821833, + "block7_q_update_fnorm": 0.05978740006685257, + "block7_q_max_l1_linf_norm": 0.051895033568143845, + "block7_q_max_spectral_norm": 0.0030165608040988445, + "block7_k_update_fnorm": 0.061709724366664886, + "block7_k_max_l1_linf_norm": 0.051948726177215576, + "block7_k_max_spectral_norm": 0.0030155314598232508, + "block7_v_update_fnorm": 0.04721198230981827, + "block7_v_max_l1_linf_norm": 0.051024314016103745, + "block7_v_max_spectral_norm": 0.0030136366840451956, + "block7_o_update_fnorm": 0.06214989349246025, + "block7_o_max_l1_linf_norm": 0.05228159576654434, + "block7_o_max_spectral_norm": 0.0030169542878866196, + "block7_mlp_win_update_fnorm": 0.07005194574594498, + "block7_mlp_win_max_l1_linf_norm": 0.042209744453430176, + "block7_mlp_win_max_spectral_norm": 0.0030180858448147774, + "block7_mlp_wout_update_fnorm": 0.060435663908720016, + "block7_mlp_wout_max_l1_linf_norm": 0.09983417391777039, + "block7_mlp_wout_max_spectral_norm": 0.002854360733181238, + "block11_q_update_fnorm": 0.06188356876373291, + "block11_q_max_l1_linf_norm": 0.05549757182598114, + "block11_q_max_spectral_norm": 0.003018236020579934, + "block11_k_update_fnorm": 0.06243046373128891, + "block11_k_max_l1_linf_norm": 0.054436542093753815, + "block11_k_max_spectral_norm": 0.003018340328708291, + "block11_v_update_fnorm": 0.060996636748313904, + "block11_v_max_l1_linf_norm": 0.05156940966844559, + "block11_v_max_spectral_norm": 0.0030157885048538446, + "block11_o_update_fnorm": 0.062064029276371, + "block11_o_max_l1_linf_norm": 0.05186765268445015, + "block11_o_max_spectral_norm": 0.003016069997102022, + "block11_mlp_win_update_fnorm": 0.05862419307231903, + "block11_mlp_win_max_l1_linf_norm": 0.040684301406145096, + "block11_mlp_win_max_spectral_norm": 0.002844439120963216, + "block11_mlp_wout_update_fnorm": 0.061341792345047, + "block11_mlp_wout_max_l1_linf_norm": 0.11172270774841309, + "block11_mlp_wout_max_spectral_norm": 0.003013347275555134, + "total_sharpness": 0.004786955192685127, + "block_total_sharpness": 0.0059985569678246975, + "v_norm_block": 0.4410821497440338, + "v_T_H_v_block": 0.0011670400854200125, + "v_norm": 0.5522695183753967, + "ip_v_neg_g_hvp": 0.006875224411487579, + "cos_v_neg_g_hvp": 0.021833796054124832, + "g_hvp_norm": 0.570172905921936, + "ip_v_neg_g_t": 0.010364523157477379, + "cos_v_neg_g_t": 0.026984430849552155, + "g_t_norm": 0.6954805850982666, + "g_norm": 0.570172905921936, + "hv_norm": 1.9327722787857056, + "cos_v_hv": 0.0013678226387128234, + "hg_norm": 4595.0361328125, + "cos_g_hg": -0.03587670624256134, + "v_parallel_norm": 0.0011277273297309875, + "v_perp_norm": 0.552268385887146, + "embed_lm_head_v_norm": 0.332337349653244, + "embed_lm_head_cos_v_neg_g": 0.04563270881772041, + "layer_1_v_norm": 0.12333083897829056, + "layer_1_cos_v_neg_g": 0.007294469512999058, + "layer_2_v_norm": 0.06476955860853195, + "layer_2_cos_v_neg_g": 0.0075608124025166035, + "layer_3_v_norm": 0.0749589130282402, + "layer_3_cos_v_neg_g": 0.02010161057114601, + "layer_4_v_norm": 0.09743718057870865, + "layer_4_cos_v_neg_g": 0.016751261427998543, + "layer_5_v_norm": 0.1081397533416748, + "layer_5_cos_v_neg_g": 0.022280199453234673, + "layer_6_v_norm": 0.13367566466331482, + "layer_6_cos_v_neg_g": 0.022753197699785233, + "layer_7_v_norm": 0.14655132591724396, + "layer_7_cos_v_neg_g": 0.021381039172410965, + "layer_8_v_norm": 0.14849504828453064, + "layer_8_cos_v_neg_g": 0.02225223369896412, + "layer_9_v_norm": 0.1482086330652237, + "layer_9_cos_v_neg_g": 0.022147981449961662, + "layer_10_v_norm": 0.1487736999988556, + "layer_10_cos_v_neg_g": 0.026310952380299568, + "layer_11_v_norm": 0.14256082475185394, + "layer_11_cos_v_neg_g": 0.03547082841396332, + "layer_12_v_norm": 0.15006862580776215, + "layer_12_cos_v_neg_g": 0.06837859749794006, + "block0_q_v_norm": 0.057877589017152786, + "block0_q_cos_v_neg_g": 0.011063558049499989, + "block0_k_v_norm": 0.05576157569885254, + "block0_k_cos_v_neg_g": 0.00958159938454628, + "block0_v_v_norm": 0.04547085985541344, + "block0_v_cos_v_neg_g": 0.006267937831580639, + "block0_o_v_norm": 0.048235710710287094, + "block0_o_cos_v_neg_g": 0.005560547113418579, + "block0_mlp_win_v_norm": 0.048736296594142914, + "block0_mlp_win_cos_v_neg_g": 0.01304700132459402, + "block0_mlp_wout_v_norm": 0.0443493090569973, + "block0_mlp_wout_cos_v_neg_g": 0.024758487939834595, + "block3_q_v_norm": 0.00830831564962864, + "block3_q_cos_v_neg_g": 0.006503304000943899, + "block3_k_v_norm": 0.00878625363111496, + "block3_k_cos_v_neg_g": 0.017238683998584747, + "block3_v_v_norm": 0.0281137116253376, + "block3_v_cos_v_neg_g": 0.0068456148728728294, + "block3_o_v_norm": 0.03353492543101311, + "block3_o_cos_v_neg_g": 0.0213366337120533, + "block3_mlp_win_v_norm": 0.052861228585243225, + "block3_mlp_win_cos_v_neg_g": 0.01765020750463009, + "block3_mlp_wout_v_norm": 0.06766853481531143, + "block3_mlp_wout_cos_v_neg_g": 0.036578595638275146, + "block7_q_v_norm": 0.05978740006685257, + "block7_q_cos_v_neg_g": 0.026728197932243347, + "block7_k_v_norm": 0.061709724366664886, + "block7_k_cos_v_neg_g": 0.06494792550802231, + "block7_v_v_norm": 0.04721198230981827, + "block7_v_cos_v_neg_g": 0.017558442428708076, + "block7_o_v_norm": 0.06214989349246025, + "block7_o_cos_v_neg_g": 0.07522860914468765, + "block7_mlp_win_v_norm": 0.07005194574594498, + "block7_mlp_win_cos_v_neg_g": 0.024258002638816833, + "block7_mlp_wout_v_norm": 0.060435663908720016, + "block7_mlp_wout_cos_v_neg_g": 0.10864683240652084, + "block11_q_v_norm": 0.06188356876373291, + "block11_q_cos_v_neg_g": 0.07189404219388962, + "block11_k_v_norm": 0.06243046373128891, + "block11_k_cos_v_neg_g": 0.09348072856664658, + "block11_v_v_norm": 0.060996636748313904, + "block11_v_cos_v_neg_g": 0.03895563259720802, + "block11_o_v_norm": 0.062064029276371, + "block11_o_cos_v_neg_g": 0.08347700536251068, + "block11_mlp_win_v_norm": 0.05862419307231903, + "block11_mlp_win_cos_v_neg_g": 0.08811269700527191, + "block11_mlp_wout_v_norm": 0.061341792345047, + "block11_mlp_wout_cos_v_neg_g": 0.08146369457244873, + "embed_lm_head_sharpness": -0.00013042545469943434, + "layer_1_sharpness": -0.011417058296501637, + "layer_2_sharpness": -0.005071073770523071, + "layer_3_sharpness": 0.01667710207402706, + "layer_4_sharpness": 0.004728137515485287, + "layer_5_sharpness": 0.0034804183524101973, + "layer_6_sharpness": 0.0023938026279211044, + "layer_7_sharpness": 0.001568550942465663, + "layer_8_sharpness": 0.0014049006858840585, + "layer_9_sharpness": 0.0010354082332924008, + "layer_10_sharpness": 0.0006407097680494189, + "layer_11_sharpness": 0.0007878629839979112, + "layer_12_sharpness": 0.0010123489191755652, + "block0_q_sharpness": -0.00021210481645539403, + "block0_k_sharpness": -0.0002851538301911205, + "block0_v_sharpness": 0.0439874567091465, + "block0_o_sharpness": -0.015889544039964676, + "block0_mlp_win_sharpness": -0.0060749356634914875, + "block0_mlp_wout_sharpness": 0.010640370659530163, + "block3_q_sharpness": 5.2763629355467856e-05, + "block3_k_sharpness": 0.02316691353917122, + "block3_v_sharpness": 0.012467863969504833, + "block3_o_sharpness": 0.0034193964675068855, + "block3_mlp_win_sharpness": 0.0014309808611869812, + "block3_mlp_wout_sharpness": 0.0006878099520690739, + "block7_q_sharpness": 9.294272604165599e-05, + "block7_k_sharpness": 7.450948760379106e-05, + "block7_v_sharpness": 0.0041587776504457, + "block7_o_sharpness": 7.27426668163389e-05, + "block7_mlp_win_sharpness": 0.0007175885839387774, + "block7_mlp_wout_sharpness": 9.793746721697971e-05, + "block11_q_sharpness": 0.000398359727114439, + "block11_k_sharpness": 9.188932017423213e-05, + "block11_v_sharpness": 0.0001222608843818307, + "block11_o_sharpness": 7.487687253160402e-05, + "block11_mlp_win_sharpness": 0.00041497519123367965, + "block11_mlp_wout_sharpness": 0.0016822166508063674, + "sum_layer_numerators": 0.0001675412458510914, + "block_diag_sharpness": 0.0008611578710561058, + "cross_layer_sharpness": 0.005137399096768591 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/training_log.txt b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..893c356b46213cfc279343f276514312b0d55e19 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_44_693e530c-4417-4bdf-b2fa-b0da1b08f67f/training_log.txt @@ -0,0 +1,11788 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +import nano_GPT_qkvonorm_pure +from nano_GPT_qkvonorm_pure import GPT, GPTConfig + +# Import debug utilities +# from debug_utils import setup_debugpy + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes, + shuffle_files=False, random_seed=None): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + self.shuffle_files = shuffle_files + self.random_seed = random_seed + self._rng = random.Random(random_seed) if shuffle_files and random_seed is not None else None + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + if self.shuffle_files: + self._shuffle_files() + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + next_shard = (self.current_shard + 1) % len(self.files) + if next_shard == 0 and self.shuffle_files: + self._shuffle_files() + self.current_shard = next_shard + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + + def _shuffle_files(self): + if self._rng is not None: + self._rng.shuffle(self.files) + else: + random.shuffle(self.files) + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + + all_param_groups["embed_lm_head"] = list(model.lm_head.parameters()) + + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # Add fine-grained params for selected layers (0, 3, 7, 11) + selected_layers = [0, 3, 7, 11] + for layer_idx in selected_layers: + block = blocks[layer_idx] + prefix = f"block{layer_idx}" + # Attention: Q, K, V, O + all_param_groups[f"{prefix}_q"] = [block.attn.q_w.weight] + all_param_groups[f"{prefix}_k"] = [block.attn.k_w.weight] + all_param_groups[f"{prefix}_v"] = [block.attn.v_w.weight] + all_param_groups[f"{prefix}_o"] = [block.attn.c_proj.weight] + # MLP: c_fc (win) and c_proj (wout) + all_param_groups[f"{prefix}_mlp_win"] = [block.mlp.c_fc.weight] + all_param_groups[f"{prefix}_mlp_wout"] = [block.mlp.c_proj.weight] + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + original_flash = nano_GPT_qkvonorm_pure.FLASH + nano_GPT_qkvonorm_pure.FLASH = 0 + print0(f"[Enhanced Sharpness @ Step {step}] Disabled FLASH attention for HVP (was {original_flash})") + + # Get block parameter indices for cross-layer analysis (need this before loop) + block_param_indices = set() + for group_name, param_group in all_param_groups.items(): + if group_name.startswith("layer_"): + for p in param_group: + if id(p) in param_to_idx: + block_param_indices.add(param_to_idx[id(p)]) + + # Initialize accumulators for all quantities we need + grads_hvp = None + hvp_v_total = None + hvp_v_block = None + hvp_g_accum = None + layer_hvp_accum = {} + + + group_names_to_process = [gn for gn, pg in all_param_groups.items() + if pg and any(id(p) in param_to_idx for p in pg)] + + if last_training_batches is not None and len(last_training_batches) > 0: + + batch_iterator = [(x, y) for x, y in last_training_batches] + n_batches = len(batch_iterator) + print0(f"[Enhanced Sharpness @ Step {step}] Using {n_batches} microbatches for HVP (out of {grad_accum_steps} training microbatches)") + restore_loader = False + else: + # Fallback: use new batches from train_loader (should rarely happen) + print0(f"[Enhanced Sharpness @ Step {step}] WARNING: last_training_batches is None/empty, using {grad_accum_steps} new batches (inconsistent)") + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + n_batches = grad_accum_steps # Use same number as training for consistency + batch_iterator = [] + shard_was_changed = False + for _ in range(n_batches): + x_hvp, y_hvp = train_loader.next_batch() + batch_iterator.append((x_hvp, y_hvp)) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + restore_loader = True + + + print0(f"[Enhanced Sharpness @ Step {step}] Computing HVPs for {n_batches} microbatches") + for mb_idx, (x_hvp, y_hvp) in enumerate(batch_iterator): + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + + + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + grads_mb = torch.autograd.grad(loss_mb, model.parameters(), create_graph=True, allow_unused=True) + + # Compute H·v (total sharpness) + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_mb, update_direction_v) if g is not None) + + if not isinstance(v_dot_g_total, torch.Tensor): + v_dot_g_total = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_total_mb = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + # Compute H·v_block (block-only sharpness) + if block_param_indices: + v_dot_g_block = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in block_param_indices if grads_mb[i] is not None) + if not isinstance(v_dot_g_block, torch.Tensor): + v_dot_g_block = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_block_mb = torch.autograd.grad(v_dot_g_block, model.parameters(), retain_graph=True, allow_unused=True) + else: + + hvp_v_block_mb = [None] * len(list(model.parameters())) + + + g_dot_g = sum(torch.sum(g * g) for g in grads_mb if g is not None) + if not isinstance(g_dot_g, torch.Tensor): + g_dot_g = torch.tensor(0.0, device=device, requires_grad=True) + + + hvp_g_mb_raw = torch.autograd.grad(g_dot_g, model.parameters(), + retain_graph=True, allow_unused=True) + hvp_g_mb = [h / 2.0 if h is not None else None for h in hvp_g_mb_raw] + + # Compute per-layer H_kk·v_k (for layer-wise sharpness) + for group_idx, group_name in enumerate(group_names_to_process): + param_group = all_param_groups[group_name] + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + is_last_layer = (group_idx == len(group_names_to_process) - 1) + is_last_microbatch = (mb_idx == n_batches - 1) + need_retain = not (is_last_layer and is_last_microbatch) + + try: + v_dot_g_layer = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in indices if grads_mb[i] is not None) + + if not isinstance(v_dot_g_layer, torch.Tensor): + v_dot_g_layer = torch.tensor(0.0, device=device, requires_grad=True) + + hvp_layer_mb = torch.autograd.grad(v_dot_g_layer, model.parameters(), + retain_graph=need_retain, + allow_unused=True) + + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_layer_mb] + else: + layer_hvp_accum[group_name] = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + ] + + # Accumulate layer HVP + # if group_name not in layer_hvp_accum: + # layer_hvp_accum[group_name] = [h.detach() / n_batches if h is not None else None for h in hvp_layer_mb] + # else: + # layer_hvp_accum[group_name] = [ + # (h_acc + h.detach() / n_batches) if (h is not None and h_acc is not None) + # else (h.detach() / n_batches if h is not None else h_acc) + # for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + # ] + # del hvp_layer_mb, v_dot_g_layer + # torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error computing layer HVP for '{group_name}' in microbatch {mb_idx}: {e}") + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = None + + # 6. Accumulate all quantities + if grads_hvp is None: + grads_hvp = [(g.detach() / n_batches).cpu() if g is not None else None for g in grads_mb] + hvp_v_total = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_total_mb] + hvp_v_block = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_block_mb] + hvp_g_accum = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_g_mb] + else: + grads_hvp = [ + (g_acc + (g.detach() / n_batches).cpu()) if (g is not None and g_acc is not None) + else ((g.detach() / n_batches).cpu() if g is not None else g_acc) + for g_acc, g in zip(grads_hvp, grads_mb) + ] + hvp_v_total = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_total, hvp_v_total_mb) + ] + hvp_v_block = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_block, hvp_v_block_mb) + ] + hvp_g_accum = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_g_accum, hvp_g_mb) + ] + + + + if mb_idx % max(1, n_batches // 4) == 0: + print0(f"[Enhanced Sharpness @ Step {step}] Processed microbatch {mb_idx + 1}/{n_batches}") + + + if restore_loader: + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + print0(f"[Enhanced Sharpness @ Step {step}] Finished computing all HVPs for {n_batches} microbatches") + grads_hvp = [g.to(device) if g is not None else None for g in grads_hvp] + hvp_v_total = [h.to(device) if h is not None else None for h in hvp_v_total] + hvp_v_block = [h.to(device) if h is not None else None for h in hvp_v_block] + hvp_g_accum = [h.to(device) if h is not None else None for h in hvp_g_accum] + for group_name in layer_hvp_accum: + if layer_hvp_accum[group_name] is not None: + layer_hvp_accum[group_name] = [h.to(device) if h is not None else None for h in layer_hvp_accum[group_name]] + # --- Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + # hvp_v_total is already computed in the loop above + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_v_total, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_total, torch.Tensor): + vhp_dot_v_total = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_total, torch.Tensor): + v_norm_sq_total = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + + print0(f"[Enhanced Sharpness @ Step {step}] Calculating BLOCK-ONLY total sharpness...") + # hvp_v_block is already computed in the loop above + if block_param_indices: # Only compute if there are block parameters + # Compute v_block^T H v_block (only sum over block indices) + vhp_dot_v_block = sum(torch.sum(hvp_v_block[i] * update_direction_v[i]) + for i in block_param_indices if hvp_v_block[i] is not None) + + v_norm_sq_block = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in block_param_indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_block, torch.Tensor): + vhp_dot_v_block = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_block, torch.Tensor): + v_norm_sq_block = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_block, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_block, op=dist.ReduceOp.AVG) + + if v_norm_sq_block.item() > 1e-12: + analysis_results["block_total_sharpness"] = (vhp_dot_v_block / v_norm_sq_block).item() + else: + analysis_results["block_total_sharpness"] = 0.0 + + analysis_results["v_norm_block"] = torch.sqrt(v_norm_sq_block).item() + analysis_results["v_T_H_v_block"] = vhp_dot_v_block.item() + else: + # No block parameters + analysis_results["block_total_sharpness"] = 0.0 + analysis_results["v_norm_block"] = 0.0 + analysis_results["v_T_H_v_block"] = 0.0 + + torch.cuda.empty_cache() + + # ---- Alignment metrics between update v and (negative) gradient g ---- + eps = 1e-12 + v_norm = torch.sqrt(v_norm_sq_total + eps) + analysis_results["v_norm"] = v_norm.item() + + # --- Version 1: g_hvp --- + ip_v_neg_g_hvp = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + g_hvp_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + + if not isinstance(ip_v_neg_g_hvp, torch.Tensor): + ip_v_neg_g_hvp = torch.tensor(0.0, device=device) + if not isinstance(g_hvp_norm_sq, torch.Tensor): + g_hvp_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_v_neg_g_hvp, op=dist.ReduceOp.AVG) + dist.all_reduce(g_hvp_norm_sq, op=dist.ReduceOp.AVG) + g_hvp_norm = torch.sqrt(g_hvp_norm_sq + eps) + analysis_results["ip_v_neg_g_hvp"] = ip_v_neg_g_hvp.item() + analysis_results["cos_v_neg_g_hvp"] = (ip_v_neg_g_hvp / (v_norm * g_hvp_norm + eps)).item() + analysis_results["g_hvp_norm"] = g_hvp_norm.item() + + # --- Version 2: g_t (original gradient that produced v) --- + # last_training_gradient is the actual gradient from training that led to the update v + if last_training_gradient is not None: + ip_v_neg_g_t = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, last_training_gradient) if g is not None) + g_t_norm_sq = sum(torch.sum(g * g) for g in last_training_gradient if g is not None) + dist.all_reduce(ip_v_neg_g_t, op=dist.ReduceOp.AVG) + dist.all_reduce(g_t_norm_sq, op=dist.ReduceOp.AVG) + g_t_norm = torch.sqrt(g_t_norm_sq + eps) + analysis_results["ip_v_neg_g_t"] = ip_v_neg_g_t.item() + analysis_results["cos_v_neg_g_t"] = (ip_v_neg_g_t / (v_norm * g_t_norm + eps)).item() + analysis_results["g_t_norm"] = g_t_norm.item() + else: + print0(f"[Enhanced Sharpness @ Step {step}] Warning: last_training_gradient is None, skipping g_t metrics") + + # Keep backward compatibility aliases (g_norm uses g_hvp for now) + g_norm_sq = g_hvp_norm_sq + g_norm = g_hvp_norm + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_v_total if hvp is not None) + if not isinstance(hv_norm_sq, torch.Tensor): + hv_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg ---- + # hvp_g_accum is already computed in the loop above + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_accum) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_accum if hg is not None) + if not isinstance(ip_g_hg, torch.Tensor): + ip_g_hg = torch.tensor(0.0, device=device) + if not isinstance(hg_norm_sq, torch.Tensor): + hg_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + if not isinstance(v_parallel_norm_sq, torch.Tensor): + v_parallel_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(torch.clamp(v_norm_sq_total - v_parallel_norm_sq, min=0.0) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + # Ensure they are tensors + if not isinstance(v_norm_sq_layer, torch.Tensor): + v_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(g_norm_sq_layer, torch.Tensor): + g_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(ip_v_neg_g_layer, torch.Tensor): + ip_v_neg_g_layer = torch.tensor(0.0, device=device) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + if group_name not in layer_hvp_accum or layer_hvp_accum[group_name] is None: + print0(f"[Enhanced Sharpness @ Step {step}] No HVP data for '{group_name}', skipping") + analysis_results[f"{group_name}_sharpness"] = 0.0 + continue + + hvp_group_result = layer_hvp_accum[group_name] + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_group, torch.Tensor): + vhp_dot_v_group = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_group, torch.Tensor): + v_norm_sq_group = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- Calculate block-diagonal approximation and cross-layer interaction --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating block-diagonal and cross-layer sharpness...") + + sum_layer_numerators = 0.0 + for layer in range(1, NUM_LAYERS + 1): + layer_name = f"layer_{layer}" + if f"{layer_name}_sharpness" in analysis_results and f"{layer_name}_v_norm" in analysis_results: + s_k = analysis_results[f"{layer_name}_sharpness"] + v_k_norm = analysis_results[f"{layer_name}_v_norm"] + sum_layer_numerators += s_k * (v_k_norm ** 2) + + analysis_results["sum_layer_numerators"] = sum_layer_numerators + + # Block-diagonal sharpness (using block ||v||²) + v_norm_block = analysis_results.get("v_norm_block", 0) + v_norm_sq_block_val = v_norm_block ** 2 if v_norm_block else 1e-12 + + if v_norm_sq_block_val > 1e-12: + analysis_results["block_diag_sharpness"] = sum_layer_numerators / v_norm_sq_block_val + else: + analysis_results["block_diag_sharpness"] = 0.0 + + # Cross-layer interaction = block_total - block_diag + block_total = analysis_results.get("block_total_sharpness", 0) + block_diag = analysis_results.get("block_diag_sharpness", 0) + analysis_results["cross_layer_sharpness"] = block_total - block_diag + + print0(f"[Enhanced Sharpness @ Step {step}] block_total={block_total:.6f}, block_diag={block_diag:.6f}, cross_layer={block_total - block_diag:.6f}") + + # --- 8. Cleanup --- + nano_GPT_qkvonorm_pure.FLASH = original_flash + print0(f"[Enhanced Sharpness @ Step {step}] Restored FLASH attention to {original_flash}") + + print0(f"[Enhanced Sharpness @ Step {step}] Restoring parameters back to θ_{{t+1}}...") + with torch.no_grad(): + for p, v in zip(model.parameters(), update_direction_v): + p.data.add_(v) + + if prev_training_mode: + model.train() + else: + model.eval() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del hvp_v_total, hvp_v_block, hvp_g_accum, layer_hvp_accum + del vhp_dot_v_total, v_norm_sq_total + del vhp_dot_v_block, v_norm_sq_block + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + + # Version 1: g_hvp (new batch, computed at θ_t during HVP calculation) + if 'cos_v_neg_g_hvp' in results: + misc_parts.append(f"cos_v_-g_hvp:{results['cos_v_neg_g_hvp']:.4e}") + if 'g_hvp_norm' in results: + misc_parts.append(f"g_hvp_norm:{results['g_hvp_norm']:.4e}") + + # Version 2: g_t (original gradient that produced v) + if 'cos_v_neg_g_t' in results: + misc_parts.append(f"cos_v_-g_t:{results['cos_v_neg_g_t']:.4e}") + if 'g_t_norm' in results: + misc_parts.append(f"g_t_norm:{results['g_t_norm']:.4e}") + + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d8|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + parser.add_argument("--shuffle_files", action="store_true") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d8", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # Setup debugpy for remote debugging (only activates if DEBUGPY env var is set) + # setup_debugpy(rank=ddp_rank, force=True) + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + nano_GPT_qkvonorm_pure.FLASH = args.flash # Set module-level FLASH for training + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d8": GPTConfig(block_size=1024, vocab_size=50257, n_layer=8, n_head=8, n_embd=512), + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader( + args.input_bin, B, T, ddp_rank, ddp_world_size, + shuffle_files=args.shuffle_files, random_seed=args.seed + ) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + base_module = model.module if ddp else model + # If compiled, unwrap to get the original module + if hasattr(base_module, "_orig_mod"): + base_module = base_module._orig_mod + + raw_params = list(raw_model_uncompiled.parameters()) + train_params = list(base_module.parameters()) + + assert len(raw_params) == len(train_params), \ + f"Parameter count mismatch: raw_model_uncompiled has {len(raw_params)}, training model has {len(train_params)}" + for i, (rp, tp) in enumerate(zip(raw_params, train_params)): + assert rp.data_ptr() == tp.data_ptr(), \ + f"Parameter {i} has different data_ptr: raw_model_uncompiled and training model do not share parameters!" + print0(f"[Verified] raw_model_uncompiled and training model share the same {len(raw_params)} Parameter objects") + + last_training_update = None + last_training_gradient = None # Store the original gradient that produced the update + last_training_batches = None # Store ALL microbatches (x, y) for consistent HVP calculation + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it, base_lr): + min_lr = base_lr * args.lr_decay_frac + cooldown_iters = int(args.num_iterations * 0.2) + # 1) Warmup: linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it + 1) / args.warmup_iters + # 3) Decay: linear decay from base_lr to min_lr in the last cooldown_iters steps + cooldown_start = args.num_iterations - cooldown_iters + if it >= cooldown_start: + decay_ratio = (it - cooldown_start) / cooldown_iters + return base_lr - decay_ratio * (base_lr - min_lr) + # 2) Stable: constant learning rate at base_lr + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + last_training_update=last_training_update, # Pass the real update captured from training + last_training_gradient=last_training_gradient, # Pass the original gradient g_t + last_training_batches=last_training_batches # Pass ALL microbatches for consistent HVP + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + + # Pre-check if we need to collect microbatches for sharpness analysis + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + + microbatches_this_step = [] if will_analyze_sharpness_next else None + + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + + # Store ALL microbatches for memory-efficient HVP calculation + if will_analyze_sharpness_next: + microbatches_this_step.append((x.detach().clone(), y.detach().clone())) + + if ddp: + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + + #no clipping + # norm = torch.nn.utils.clip_grad_norm_(raw_model_uncompiled.parameters(), float('inf')) + + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + print(raw_model_uncompiled.transformer.h[0].attn.q_w.weight[:5,:5]) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + # Save the original gradient g_t that will produce the update v + last_training_gradient = [ + p.grad.detach().clone() if p.grad is not None else torch.zeros_like(p) + for p in raw_model_uncompiled.parameters() + ] + # Capture ALL microbatches for consistent HVP calculation + # This ensures H is computed on the exact same objective as g_t and v + last_training_batches = microbatches_this_step # Already cloned above + else: + params_before_optimizer_step = None + last_training_batches = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p.detach() - p_before + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group()step:0 validation loss:11.020915 +step:0 train loss:11.019319 +step:1 train loss:11.018906 +step:2 train loss:11.009562 +step:3 train loss:11.003719 +step:4 train loss:10.994306 +step:5 train loss:10.981155 +step:6 train loss:10.966602 +step:7 train loss:10.946893 +step:8 train loss:10.926901 +step:9 train loss:10.906741 +step:10 train loss:10.871193 +step:11 train loss:10.847674 +step:12 train loss:10.816145 +step:13 train loss:10.781302 +step:14 train loss:10.744669 +step:15 train loss:10.701258 +step:16 train loss:10.667715 +step:17 train loss:10.622952 +step:18 train loss:10.580236 +step:19 train loss:10.526869 +step:20 train loss:10.491168 +step:21 train loss:10.436893 +step:22 train loss:10.385355 +step:23 train loss:10.321215 +step:24 train loss:10.277843 +step:25 train loss:10.204778 +step:26 train loss:10.170521 +step:27 train loss:10.112211 +step:28 train loss:10.061573 +step:29 train loss:9.993437 +step:30 train loss:9.933231 +step:31 train loss:9.877522 +step:32 train loss:9.813165 +step:33 train loss:9.749046 +step:34 train loss:9.693674 +step:35 train loss:9.617342 +step:36 train loss:9.568773 +step:37 train loss:9.527522 +step:38 train loss:9.408061 +step:39 train loss:9.363055 +step:40 train loss:9.312854 +step:41 train loss:9.258770 +step:42 train loss:9.214289 +step:43 train loss:9.130584 +step:44 train loss:9.053660 +step:45 train loss:9.000839 +step:46 train loss:8.972201 +step:47 train loss:8.872232 +step:48 train loss:8.798258 +step:49 train loss:8.774384 +step:50 train loss:8.671278 +step:51 train loss:8.613113 +step:52 train loss:8.537920 +step:53 train loss:8.507022 +step:54 train loss:8.455428 +step:55 train loss:8.382212 +step:56 train loss:8.322626 +step:57 train loss:8.296779 +step:58 train loss:8.234423 +step:59 train loss:8.172492 +step:60 train loss:8.119663 +step:61 train loss:8.075646 +step:62 train loss:8.006991 +step:63 train loss:7.923642 +step:64 train loss:7.901175 +step:65 train loss:7.899553 +step:66 train loss:7.850445 +step:67 train loss:7.791120 +step:68 train loss:7.723900 +step:69 train loss:7.731227 +step:70 train loss:7.659500 +step:71 train loss:7.604903 +step:72 train loss:7.550501 +step:73 train loss:7.482994 +step:74 train loss:7.503763 +step:75 train loss:7.514763 +step:76 train loss:7.442580 +step:77 train loss:7.396504 +step:78 train loss:7.352933 +step:79 train loss:7.368006 +step:80 train loss:7.321602 +step:81 train loss:7.233480 +step:82 train loss:7.308528 +step:83 train loss:7.202637 +step:84 train loss:7.210094 +step:85 train loss:7.218637 +step:86 train loss:7.191924 +step:87 train loss:7.155932 +step:88 train loss:7.120923 +step:89 train loss:7.175706 +step:90 train loss:7.063266 +step:91 train loss:7.042995 +step:92 train loss:7.084074 +step:93 train loss:6.992200 +step:94 train loss:7.106489 +step:95 train loss:6.933446 +step:96 train loss:6.958823 +step:97 train loss:7.003408 +step:98 train loss:6.967638 +step:99 train loss:6.920091 +step:100 train loss:6.886951 +step:101 train loss:6.923906 +step:102 train loss:6.845215 +step:103 train loss:6.862034 +step:104 train loss:6.862043 +step:105 train loss:6.795751 +step:106 train loss:6.828043 +step:107 train loss:6.812415 +step:108 train loss:6.717609 +step:109 train loss:6.726767 +step:110 train loss:6.734109 +step:111 train loss:6.750082 +step:112 train loss:6.800637 +step:113 train loss:6.687154 +step:114 train loss:6.729536 +step:115 train loss:6.704241 +step:116 train loss:6.614415 +step:117 train loss:6.686568 +step:118 train loss:6.647679 +step:119 train loss:6.596206 +step:120 train loss:6.637194 +step:121 train loss:6.551560 +step:122 train loss:6.424649 +step:123 train loss:6.613449 +step:124 train loss:6.646669 +step:125 train loss:6.589122 +step:126 train loss:6.543947 +step:127 train loss:6.505463 +step:128 train loss:6.731808 +step:129 train loss:6.527686 +step:130 train loss:6.549913 +step:131 train loss:6.599687 +step:132 train loss:6.530354 +step:133 train loss:6.434964 +step:134 train loss:6.406175 +step:135 train loss:6.471976 +step:136 train loss:6.430882 +step:137 train loss:6.508530 +step:138 train loss:6.398086 +step:139 train loss:6.511098 +step:140 train loss:6.432591 +step:141 train loss:6.416412 +step:142 train loss:6.471911 +step:143 train loss:6.322591 +step:144 train loss:6.422688 +step:145 train loss:6.310422 +step:146 train loss:6.350378 +step:147 train loss:6.372369 +step:148 train loss:6.338684 +step:149 train loss:6.374010 +step:150 train loss:6.291301 +step:151 train loss:6.373013 +step:152 train loss:6.305301 +step:153 train loss:6.331782 +step:154 train loss:6.304585 +step:155 train loss:6.324071 +step:156 train loss:6.273415 +step:157 train loss:6.260152 +step:158 train loss:6.237163 +step:159 train loss:6.298219 +step:160 train loss:6.125604 +step:161 train loss:6.173692 +step:162 train loss:6.213700 +step:163 train loss:6.237697 +step:164 train loss:6.241140 +step:165 train loss:6.194074 +step:166 train loss:6.236253 +step:167 train loss:6.216495 +step:168 train loss:6.222068 +step:169 train loss:6.151029 +step:170 train loss:6.245358 +step:171 train loss:6.146444 +step:172 train loss:6.183198 +step:173 train loss:6.130275 +step:174 train loss:6.295431 +step:175 train loss:6.168813 +step:176 train loss:6.099840 +step:177 train loss:6.172663 +step:178 train loss:6.122396 +step:179 train loss:6.151347 +step:180 train loss:6.101517 +step:181 train loss:6.035504 +step:182 train loss:6.141107 +step:183 train loss:6.115128 +step:184 train loss:6.134788 +step:185 train loss:6.065259 +step:186 train loss:6.111958 +step:187 train loss:6.079677 +step:188 train loss:6.007035 +step:189 train loss:6.127697 +step:190 train loss:6.021555 +step:191 train loss:6.115432 +step:192 train loss:6.036284 +step:193 train loss:6.027804 +step:194 train loss:6.026360 +step:195 train loss:6.140543 +step:196 train loss:6.018046 +step:197 train loss:6.016849 +step:198 train loss:6.065678 +step:199 train loss:6.031680 +step:200 train loss:6.040002 +step:201 train loss:6.069042 +step:202 train loss:6.011781 +step:203 train loss:5.977032 +step:204 train loss:6.031686 +step:205 train loss:6.073426 +step:206 train loss:6.005526 +step:207 train loss:5.974813 +step:208 train loss:6.013217 +step:209 train loss:5.938540 +step:210 train loss:5.951120 +step:211 train loss:5.990659 +step:212 train loss:5.950558 +step:213 train loss:5.957594 +step:214 train loss:6.013687 +step:215 train loss:5.916621 +step:216 train loss:5.948393 +step:217 train loss:5.914270 +step:218 train loss:5.931731 +step:219 train loss:5.905164 +step:220 train loss:5.946249 +step:221 train loss:5.918109 +step:222 train loss:5.939638 +step:223 train loss:5.870407 +step:224 train loss:5.909045 +step:225 train loss:5.921305 +step:226 train loss:5.932060 +step:227 train loss:5.923059 +step:228 train loss:5.904678 +step:229 train loss:5.918555 +step:230 train loss:5.890760 +step:231 train loss:5.917542 +step:232 train loss:5.893868 +step:233 train loss:5.928466 +step:234 train loss:5.876899 +step:235 train loss:5.854488 +step:236 train loss:5.802874 +step:237 train loss:5.841175 +step:238 train loss:5.814374 +step:239 train loss:5.816538 +step:240 train loss:5.815847 +step:241 train loss:5.871996 +step:242 train loss:5.862312 +step:243 train loss:5.855461 +step:244 train loss:5.839541 +step:245 train loss:5.809223 +step:246 train loss:5.798773 +step:247 train loss:5.871027 +step:248 train loss:5.760897 +step:249 train loss:5.800827 +step:250 validation loss:5.821039 +step:250 train loss:5.751309 +step:251 train loss:5.809093 +step:252 train loss:5.790253 +step:253 train loss:5.796381 +step:254 train loss:5.795004 +step:255 train loss:5.763582 +step:256 train loss:5.662510 +step:257 train loss:5.755712 +step:258 train loss:5.822392 +step:259 train loss:5.735472 +step:260 train loss:5.811448 +step:261 train loss:5.731877 +step:262 train loss:5.764722 +step:263 train loss:5.719560 +step:264 train loss:5.789382 +step:265 train loss:5.681811 +step:266 train loss:5.659363 +step:267 train loss:5.738798 +step:268 train loss:5.735996 +step:269 train loss:5.659797 +step:270 train loss:5.705321 +step:271 train loss:5.735574 +step:272 train loss:5.739264 +step:273 train loss:5.660190 +step:274 train loss:5.726072 +step:275 train loss:5.700884 +step:276 train loss:5.682837 +step:277 train loss:5.668112 +step:278 train loss:5.670651 +step:279 train loss:5.647853 +step:280 train loss:5.692556 +step:281 train loss:5.679107 +step:282 train loss:5.644216 +step:283 train loss:5.648878 +step:284 train loss:5.664400 +step:285 train loss:5.672298 +step:286 train loss:5.537629 +step:287 train loss:5.505841 +step:288 train loss:5.674724 +step:289 train loss:5.614362 +step:290 train loss:5.666215 +step:291 train loss:5.657391 +step:292 train loss:5.627775 +step:293 train loss:5.655909 +step:294 train loss:5.723761 +step:295 train loss:5.636713 +step:296 train loss:5.617006 +step:297 train loss:5.572289 +step:298 train loss:5.630697 +step:299 train loss:5.624016 +step:300 train loss:5.573437 +step:301 train loss:5.581013 +step:302 train loss:5.593081 +step:303 train loss:5.592070 +step:304 train loss:5.565339 +step:305 train loss:5.603133 +step:306 train loss:5.602726 +step:307 train loss:5.576996 +step:308 train loss:5.635713 +step:309 train loss:5.554852 +step:310 train loss:5.561424 +step:311 train loss:5.405101 +step:312 train loss:5.577122 +step:313 train loss:5.528114 +step:314 train loss:5.487829 +step:315 train loss:5.551515 +step:316 train loss:5.466077 +step:317 train loss:5.552635 +step:318 train loss:5.608484 +step:319 train loss:5.534712 +step:320 train loss:5.532789 +step:321 train loss:5.505213 +step:322 train loss:5.456991 +step:323 train loss:5.553800 +step:324 train loss:5.480118 +step:325 train loss:5.514046 +step:326 train loss:5.507458 +step:327 train loss:5.479580 +step:328 train loss:5.523469 +step:329 train loss:5.468818 +step:330 train loss:5.428087 +step:331 train loss:5.458752 +step:332 train loss:5.544127 +step:333 train loss:5.467326 +step:334 train loss:5.523234 +step:335 train loss:5.410252 +step:336 train loss:5.389704 +step:337 train loss:5.380263 +step:338 train loss:5.419055 +step:339 train loss:5.452030 +step:340 train loss:5.451458 +step:341 train loss:5.384346 +step:342 train loss:5.407017 +step:343 train loss:5.415913 +step:344 train loss:5.308625 +step:345 train loss:5.458329 +step:346 train loss:5.358374 +step:347 train loss:5.372795 +step:348 train loss:5.355592 +step:349 train loss:5.289693 +step:350 train loss:5.373116 +step:351 train loss:5.342407 +step:352 train loss:5.375419 +step:353 train loss:5.332790 +step:354 train loss:5.427752 +step:355 train loss:5.380817 +step:356 train loss:5.382432 +step:357 train loss:5.309577 +step:358 train loss:5.316905 +step:359 train loss:5.364819 +step:360 train loss:5.337070 +step:361 train loss:5.342264 +step:362 train loss:5.278982 +step:363 train loss:5.385723 +step:364 train loss:5.303545 +step:365 train loss:5.286193 +step:366 train loss:5.367924 +step:367 train loss:5.323740 +step:368 train loss:5.307443 +step:369 train loss:5.336058 +step:370 train loss:5.274357 +step:371 train loss:5.299528 +step:372 train loss:5.303477 +step:373 train loss:5.244601 +step:374 train loss:5.266225 +step:375 train loss:5.261505 +step:376 train loss:5.268145 +step:377 train loss:5.277411 +step:378 train loss:5.321195 +step:379 train loss:5.310802 +step:380 train loss:5.226627 +step:381 train loss:5.244060 +step:382 train loss:5.151911 +step:383 train loss:5.214863 +step:384 train loss:5.181729 +step:385 train loss:5.134177 +step:386 train loss:5.198737 +step:387 train loss:5.132273 +step:388 train loss:5.170580 +step:389 train loss:5.178024 +step:390 train loss:5.189092 +step:391 train loss:5.276120 +step:392 train loss:5.155802 +step:393 train loss:5.184143 +step:394 train loss:5.136492 +step:395 train loss:5.077604 +step:396 train loss:5.173501 +step:397 train loss:5.152060 +step:398 train loss:5.155910 +step:399 train loss:5.130581 +step:400 train loss:5.127212 +step:401 train loss:5.124984 +step:402 train loss:5.086550 +step:403 train loss:5.120263 +step:404 train loss:5.092525 +step:405 train loss:5.102323 +step:406 train loss:5.015985 +step:407 train loss:5.037178 +step:408 train loss:5.111532 +step:409 train loss:5.019366 +step:410 train loss:5.074001 +step:411 train loss:5.054322 +step:412 train loss:5.012488 +step:413 train loss:5.037446 +step:414 train loss:5.031514 +step:415 train loss:5.040442 +step:416 train loss:5.056622 +step:417 train loss:5.042097 +step:418 train loss:5.020565 +step:419 train loss:5.022826 +step:420 train loss:5.007463 +step:421 train loss:5.047483 +step:422 train loss:5.051389 +step:423 train loss:4.950326 +step:424 train loss:4.988778 +step:425 train loss:5.064279 +step:426 train loss:5.031848 +step:427 train loss:4.954876 +step:428 train loss:4.972895 +step:429 train loss:4.981355 +step:430 train loss:4.978052 +step:431 train loss:4.966547 +step:432 train loss:4.936109 +step:433 train loss:4.930083 +step:434 train loss:4.926149 +step:435 train loss:4.920794 +step:436 train loss:4.893244 +step:437 train loss:4.985650 +step:438 train loss:4.906961 +step:439 train loss:4.937047 +step:440 train loss:4.922161 +step:441 train loss:4.938763 +step:442 train loss:4.938258 +step:443 train loss:4.927610 +step:444 train loss:4.883964 +step:445 train loss:4.964604 +step:446 train loss:4.891149 +step:447 train loss:4.859755 +step:448 train loss:4.820126 +step:449 train loss:4.865603 +step:450 train loss:4.966504 +step:451 train loss:4.878927 +step:452 train loss:4.826018 +step:453 train loss:4.885273 +step:454 train loss:4.771315 +step:455 train loss:4.813218 +step:456 train loss:4.817016 +step:457 train loss:4.861419 +step:458 train loss:4.767858 +step:459 train loss:4.759850 +step:460 train loss:4.774610 +step:461 train loss:4.878030 +step:462 train loss:4.854970 +step:463 train loss:4.787920 +step:464 train loss:4.830180 +step:465 train loss:4.747603 +step:466 train loss:4.836235 +step:467 train loss:4.731415 +step:468 train loss:4.794578 +step:469 train loss:4.797489 +step:470 train loss:4.796626 +step:471 train loss:4.799613 +step:472 train loss:4.751335 +step:473 train loss:4.730032 +step:474 train loss:4.539171 +step:475 train loss:4.547462 +step:476 train loss:4.693118 +step:477 train loss:4.779639 +step:478 train loss:4.737017 +step:479 train loss:4.706322 +step:480 train loss:4.730574 +step:481 train loss:4.716732 +step:482 train loss:4.676464 +step:483 train loss:4.719012 +step:484 train loss:4.681220 +step:485 train loss:4.682785 +step:486 train loss:4.692152 +step:487 train loss:4.647899 +step:488 train loss:4.715466 +step:489 train loss:4.710837 +step:490 train loss:4.668652 +step:491 train loss:4.682578 +step:492 train loss:4.709968 +step:493 train loss:4.633759 +step:494 train loss:4.730891 +step:495 train loss:4.681486 +step:496 train loss:4.595603 +step:497 train loss:4.741280 +step:498 train loss:4.605490 +step:499 train loss:4.639068 +step:500 validation loss:4.638124 total_sharp:3.7599e-02 L1_sharp:2.7384e-02 L2_sharp:1.5566e-02 L3_sharp:1.4752e-02 L4_sharp:9.8960e-03 L5_sharp:8.3685e-03 L6_sharp:6.7010e-03 L7_sharp:4.9361e-03 L8_sharp:3.4295e-03 L9_sharp:2.4552e-03 L10_sharp:2.0415e-03 L11_sharp:1.8808e-03 L12_sharp:1.8433e-03 total_fnorm:1.6867e+00 total_l1_linf:1.4661e+04 total_spectral:1.6867e+00 L1_fnorm:4.2458e-01 L2_fnorm:3.8607e-01 L3_fnorm:3.6560e-01 L4_fnorm:3.6364e-01 L5_fnorm:3.9362e-01 L6_fnorm:3.8499e-01 L7_fnorm:4.0766e-01 L8_fnorm:3.9541e-01 L9_fnorm:4.0936e-01 L10_fnorm:4.0740e-01 L11_fnorm:4.1313e-01 L12_fnorm:4.1058e-01 L1_l1linf:3.0870e-01 L2_l1linf:3.0242e-01 L3_l1linf:3.0023e-01 L4_l1linf:3.0706e-01 L5_l1linf:3.1320e-01 L6_l1linf:3.1799e-01 L7_l1linf:3.1637e-01 L8_l1linf:3.1764e-01 L9_l1linf:3.2136e-01 L10_l1linf:3.1937e-01 L11_l1linf:3.2118e-01 L12_l1linf:3.2646e-01 L1_spectral:8.6068e-03 L2_spectral:8.5993e-03 L3_spectral:8.6048e-03 L4_spectral:8.6044e-03 L5_spectral:8.6016e-03 L6_spectral:8.6028e-03 L7_spectral:8.6022e-03 L8_spectral:8.6020e-03 L9_spectral:8.6042e-03 L10_spectral:8.6014e-03 L11_spectral:8.6034e-03 L12_spectral:8.6016e-03 v_norm:1.6867e+00 cos_v_-g_hvp:5.8620e-02 g_hvp_norm:9.8243e-01 cos_v_-g_t:6.2579e-02 g_t_norm:9.2465e-01 hv_norm:1.6160e+00 cos_v_hv:3.9244e-02 hg_norm:4.9301e+01 cos_g_hg:7.4579e-01 v_par:4.1901e-03 v_perp:1.6867e+00 L1_cos_v_neg_g:6.0286e-02 L1_v_norm:4.2458e-01 L2_cos_v_neg_g:6.4418e-02 L2_v_norm:3.8607e-01 L3_cos_v_neg_g:5.7657e-02 L3_v_norm:3.6560e-01 L4_cos_v_neg_g:5.9985e-02 L4_v_norm:3.6364e-01 L5_cos_v_neg_g:6.6288e-02 L5_v_norm:3.9362e-01 L6_cos_v_neg_g:7.7642e-02 L6_v_norm:3.8499e-01 L7_cos_v_neg_g:8.1106e-02 L7_v_norm:4.0766e-01 L8_cos_v_neg_g:8.4691e-02 L8_v_norm:3.9541e-01 L9_cos_v_neg_g:8.0846e-02 L9_v_norm:4.0936e-01 L10_cos_v_neg_g:8.1489e-02 L10_v_norm:4.0740e-01 L11_cos_v_neg_g:7.6167e-02 L11_v_norm:4.1313e-01 L12_cos_v_neg_g:6.6056e-02 L12_v_norm:4.1058e-01 +step:500 train loss:4.732440 +step:501 train loss:4.570919 +step:502 train loss:4.677198 +step:503 train loss:4.664466 +step:504 train loss:4.580749 +step:505 train loss:4.598738 +step:506 train loss:4.714370 +step:507 train loss:4.552176 +step:508 train loss:4.598278 +step:509 train loss:4.599427 +step:510 train loss:4.553336 +step:511 train loss:4.603585 +step:512 train loss:4.681796 +step:513 train loss:4.583533 +step:514 train loss:4.557243 +step:515 train loss:4.599033 +step:516 train loss:4.599095 +step:517 train loss:4.555402 +step:518 train loss:4.524041 +step:519 train loss:4.553549 +step:520 train loss:4.516663 +step:521 train loss:4.598325 +step:522 train loss:4.518161 +step:523 train loss:4.542531 +step:524 train loss:4.553119 +step:525 train loss:4.632358 +step:526 train loss:4.527209 +step:527 train loss:4.557490 +step:528 train loss:4.580967 +step:529 train loss:4.481855 +step:530 train loss:4.611411 +step:531 train loss:4.500282 +step:532 train loss:4.527758 +step:533 train loss:4.496070 +step:534 train loss:4.500149 +step:535 train loss:4.504729 +step:536 train loss:4.545265 +step:537 train loss:4.463166 +step:538 train loss:4.503825 +step:539 train loss:4.457912 +step:540 train loss:4.495955 +step:541 train loss:4.542643 +step:542 train loss:4.483801 +step:543 train loss:4.472512 +step:544 train loss:4.554143 +step:545 train loss:4.469141 +step:546 train loss:4.483430 +step:547 train loss:4.534703 +step:548 train loss:4.495502 +step:549 train loss:4.438672 +step:550 train loss:4.456841 +step:551 train loss:4.468999 +step:552 train loss:4.451355 +step:553 train loss:4.458575 +step:554 train loss:4.551147 +step:555 train loss:4.462828 +step:556 train loss:4.460267 +step:557 train loss:4.485341 +step:558 train loss:4.539147 +step:559 train loss:4.482591 +step:560 train loss:4.424915 +step:561 train loss:4.447847 +step:562 train loss:4.424357 +step:563 train loss:4.475345 +step:564 train loss:4.458047 +step:565 train loss:4.436877 +step:566 train loss:4.502664 +step:567 train loss:4.403924 +step:568 train loss:4.480658 +step:569 train loss:4.434459 +step:570 train loss:4.424143 +step:571 train loss:4.704569 +step:572 train loss:4.449520 +step:573 train loss:4.466866 +step:574 train loss:4.392296 +step:575 train loss:4.439287 +step:576 train loss:4.361581 +step:577 train loss:4.382376 +step:578 train loss:4.419432 +step:579 train loss:4.428901 +step:580 train loss:4.441401 +step:581 train loss:4.409581 +step:582 train loss:4.427384 +step:583 train loss:4.473972 +step:584 train loss:4.419117 +step:585 train loss:4.441929 +step:586 train loss:4.340326 +step:587 train loss:4.418944 +step:588 train loss:4.383290 +step:589 train loss:4.385789 +step:590 train loss:4.406861 +step:591 train loss:4.340873 +step:592 train loss:4.401112 +step:593 train loss:4.337910 +step:594 train loss:4.403915 +step:595 train loss:4.364062 +step:596 train loss:4.340917 +step:597 train loss:4.466035 +step:598 train loss:4.443443 +step:599 train loss:4.386726 +step:600 train loss:4.319118 +step:601 train loss:4.340483 +step:602 train loss:4.447651 +step:603 train loss:4.386177 +step:604 train loss:4.366341 +step:605 train loss:4.387106 +step:606 train loss:4.276960 +step:607 train loss:4.361460 +step:608 train loss:4.352848 +step:609 train loss:4.313323 +step:610 train loss:4.350327 +step:611 train loss:4.288247 +step:612 train loss:4.389021 +step:613 train loss:4.380093 +step:614 train loss:4.273070 +step:615 train loss:4.426022 +step:616 train loss:4.238659 +step:617 train loss:4.352984 +step:618 train loss:4.283566 +step:619 train loss:4.363529 +step:620 train loss:4.396468 +step:621 train loss:4.276344 +step:622 train loss:4.328744 +step:623 train loss:4.312323 +step:624 train loss:4.341812 +step:625 train loss:4.445226 +step:626 train loss:4.293259 +step:627 train loss:4.342507 +step:628 train loss:4.272975 +step:629 train loss:4.270870 +step:630 train loss:4.291309 +step:631 train loss:4.327468 +step:632 train loss:4.213840 +step:633 train loss:4.271756 +step:634 train loss:4.246064 +step:635 train loss:4.312998 +step:636 train loss:4.277926 +step:637 train loss:4.273431 +step:638 train loss:4.329447 +step:639 train loss:4.226024 +step:640 train loss:4.298600 +step:641 train loss:4.298108 +step:642 train loss:4.243776 +step:643 train loss:4.300982 +step:644 train loss:4.260629 +step:645 train loss:4.232918 +step:646 train loss:4.394137 +step:647 train loss:4.281568 +step:648 train loss:4.310197 +step:649 train loss:4.300954 +step:650 train loss:4.224479 +step:651 train loss:4.279825 +step:652 train loss:4.296231 +step:653 train loss:4.283341 +step:654 train loss:4.322370 +step:655 train loss:4.224423 +step:656 train loss:4.250430 +step:657 train loss:4.271317 +step:658 train loss:4.248994 +step:659 train loss:4.283650 +step:660 train loss:4.260944 +step:661 train loss:4.280756 +step:662 train loss:4.305712 +step:663 train loss:4.252907 +step:664 train loss:4.307837 +step:665 train loss:4.242828 +step:666 train loss:4.188993 +step:667 train loss:4.258443 +step:668 train loss:4.216593 +step:669 train loss:4.253637 +step:670 train loss:4.297065 +step:671 train loss:4.327450 +step:672 train loss:4.230462 +step:673 train loss:4.159502 +step:674 train loss:4.281376 +step:675 train loss:4.223335 +step:676 train loss:4.226695 +step:677 train loss:4.214733 +step:678 train loss:4.289064 +step:679 train loss:4.255586 +step:680 train loss:4.166520 +step:681 train loss:4.273425 +step:682 train loss:4.208120 +step:683 train loss:4.252641 +step:684 train loss:4.219213 +step:685 train loss:4.327041 +step:686 train loss:4.217353 +step:687 train loss:4.145123 +step:688 train loss:4.236806 +step:689 train loss:4.214592 +step:690 train loss:4.222909 +step:691 train loss:4.245070 +step:692 train loss:4.110410 +step:693 train loss:4.245418 +step:694 train loss:4.243927 +step:695 train loss:4.231104 +step:696 train loss:4.267035 +step:697 train loss:4.204869 +step:698 train loss:4.188869 +step:699 train loss:4.194988 +step:700 train loss:4.241486 +step:701 train loss:4.189024 +step:702 train loss:4.151707 +step:703 train loss:4.246848 +step:704 train loss:4.208708 +step:705 train loss:4.262569 +step:706 train loss:4.215609 +step:707 train loss:4.177417 +step:708 train loss:4.213728 +step:709 train loss:4.181035 +step:710 train loss:4.227711 +step:711 train loss:4.188700 +step:712 train loss:4.138518 +step:713 train loss:4.216099 +step:714 train loss:4.145991 +step:715 train loss:4.165617 +step:716 train loss:4.173537 +step:717 train loss:4.140858 +step:718 train loss:4.235590 +step:719 train loss:4.212008 +step:720 train loss:4.186338 +step:721 train loss:4.218046 +step:722 train loss:4.156945 +step:723 train loss:4.195883 +step:724 train loss:4.164886 +step:725 train loss:4.145768 +step:726 train loss:4.176596 +step:727 train loss:4.106068 +step:728 train loss:4.256093 +step:729 train loss:4.104398 +step:730 train loss:4.184224 +step:731 train loss:4.222226 +step:732 train loss:4.101641 +step:733 train loss:4.197917 +step:734 train loss:4.148278 +step:735 train loss:4.188705 +step:736 train loss:4.174329 +step:737 train loss:4.182467 +step:738 train loss:4.227172 +step:739 train loss:4.087920 +step:740 train loss:4.194367 +step:741 train loss:4.157360 +step:742 train loss:4.126365 +step:743 train loss:4.182393 +step:744 train loss:4.154584 +step:745 train loss:4.120009 +step:746 train loss:4.173410 +step:747 train loss:4.192147 +step:748 train loss:4.180504 +step:749 train loss:4.183028 +step:750 validation loss:4.131912 +step:750 train loss:4.161314 +step:751 train loss:4.143645 +step:752 train loss:4.156637 +step:753 train loss:4.128021 +step:754 train loss:4.101523 +step:755 train loss:4.079658 +step:756 train loss:4.117460 +step:757 train loss:4.096435 +step:758 train loss:4.204925 +step:759 train loss:4.117555 +step:760 train loss:4.211647 +step:761 train loss:4.160483 +step:762 train loss:4.115344 +step:763 train loss:4.143400 +step:764 train loss:4.160568 +step:765 train loss:4.142916 +step:766 train loss:4.219387 +step:767 train loss:4.045650 +step:768 train loss:4.144051 +step:769 train loss:4.143548 +step:770 train loss:4.108844 +step:771 train loss:4.185446 +step:772 train loss:4.155291 +step:773 train loss:4.125661 +step:774 train loss:4.125363 +step:775 train loss:4.108055 +step:776 train loss:4.091192 +step:777 train loss:4.136794 +step:778 train loss:4.210910 +step:779 train loss:4.119526 +step:780 train loss:4.115448 +step:781 train loss:4.199190 +step:782 train loss:4.199233 +step:783 train loss:4.121019 +step:784 train loss:4.120699 +step:785 train loss:4.114732 +step:786 train loss:4.151691 +step:787 train loss:4.171569 +step:788 train loss:4.134418 +step:789 train loss:4.138662 +step:790 train loss:4.169252 +step:791 train loss:4.103148 +step:792 train loss:4.120534 +step:793 train loss:4.102273 +step:794 train loss:4.135423 +step:795 train loss:4.114693 +step:796 train loss:4.207996 +step:797 train loss:4.057574 +step:798 train loss:4.191837 +step:799 train loss:4.112250 +step:800 train loss:4.139753 +step:801 train loss:4.083030 +step:802 train loss:4.137484 +step:803 train loss:4.137139 +step:804 train loss:4.086842 +step:805 train loss:4.152946 +step:806 train loss:4.099314 +step:807 train loss:4.144073 +step:808 train loss:4.048530 +step:809 train loss:4.095831 +step:810 train loss:4.035141 +step:811 train loss:4.076662 +step:812 train loss:4.041770 +step:813 train loss:4.111224 +step:814 train loss:4.073849 +step:815 train loss:4.178205 +step:816 train loss:4.260577 +step:817 train loss:4.110415 +step:818 train loss:4.120581 +step:819 train loss:4.062022 +step:820 train loss:4.060827 +step:821 train loss:4.100740 +step:822 train loss:4.101942 +step:823 train loss:4.077279 +step:824 train loss:4.109219 +step:825 train loss:4.038034 +step:826 train loss:4.175198 +step:827 train loss:4.103644 +step:828 train loss:4.088586 +step:829 train loss:4.070127 +step:830 train loss:4.065487 +step:831 train loss:4.112277 +step:832 train loss:4.104495 +step:833 train loss:4.107141 +step:834 train loss:4.076429 +step:835 train loss:4.082442 +step:836 train loss:4.035007 +step:837 train loss:4.118517 +step:838 train loss:4.049674 +step:839 train loss:4.103137 +step:840 train loss:4.109684 +step:841 train loss:4.080824 +step:842 train loss:4.026815 +step:843 train loss:4.064515 +step:844 train loss:4.037803 +step:845 train loss:4.073414 +step:846 train loss:4.000443 +step:847 train loss:4.086103 +step:848 train loss:4.048943 +step:849 train loss:4.084543 +step:850 train loss:4.084702 +step:851 train loss:4.069772 +step:852 train loss:4.079864 +step:853 train loss:4.146255 +step:854 train loss:4.095519 +step:855 train loss:4.061938 +step:856 train loss:4.046411 +step:857 train loss:4.113066 +step:858 train loss:4.075840 +step:859 train loss:4.047517 +step:860 train loss:4.037522 +step:861 train loss:4.045114 +step:862 train loss:4.069460 +step:863 train loss:4.041602 +step:864 train loss:4.046223 +step:865 train loss:4.007271 +step:866 train loss:4.114382 +step:867 train loss:4.103983 +step:868 train loss:4.091152 +step:869 train loss:4.006563 +step:870 train loss:4.049505 +step:871 train loss:3.984315 +step:872 train loss:4.027536 +step:873 train loss:3.989726 +step:874 train loss:4.036615 +step:875 train loss:4.046265 +step:876 train loss:4.004498 +step:877 train loss:4.007726 +step:878 train loss:4.206895 +step:879 train loss:4.000752 +step:880 train loss:4.013303 +step:881 train loss:4.000443 +step:882 train loss:4.103059 +step:883 train loss:4.013083 +step:884 train loss:4.010915 +step:885 train loss:4.038928 +step:886 train loss:4.059650 +step:887 train loss:3.983198 +step:888 train loss:4.040573 +step:889 train loss:4.007251 +step:890 train loss:4.058545 +step:891 train loss:3.937866 +step:892 train loss:4.085720 +step:893 train loss:4.026263 +step:894 train loss:4.014137 +step:895 train loss:4.000630 +step:896 train loss:4.069901 +step:897 train loss:3.989866 +step:898 train loss:4.014104 +step:899 train loss:3.992202 +step:900 train loss:4.070118 +step:901 train loss:4.000782 +step:902 train loss:4.070106 +step:903 train loss:4.022736 +step:904 train loss:4.051740 +step:905 train loss:3.991426 +step:906 train loss:4.052277 +step:907 train loss:4.058229 +step:908 train loss:4.049581 +step:909 train loss:3.991954 +step:910 train loss:4.024994 +step:911 train loss:3.970300 +step:912 train loss:3.960475 +step:913 train loss:3.990553 +step:914 train loss:4.005822 +step:915 train loss:3.959564 +step:916 train loss:4.039233 +step:917 train loss:4.130234 +step:918 train loss:4.067996 +step:919 train loss:4.023322 +step:920 train loss:4.056433 +step:921 train loss:4.007586 +step:922 train loss:3.995843 +step:923 train loss:3.997592 +step:924 train loss:4.033407 +step:925 train loss:4.009159 +step:926 train loss:4.016603 +step:927 train loss:3.980583 +step:928 train loss:3.978985 +step:929 train loss:3.996161 +step:930 train loss:4.032390 +step:931 train loss:4.028035 +step:932 train loss:4.006871 +step:933 train loss:4.104320 +step:934 train loss:4.044277 +step:935 train loss:4.030723 +step:936 train loss:3.942916 +step:937 train loss:3.947515 +step:938 train loss:3.965380 +step:939 train loss:4.005591 +step:940 train loss:3.971886 +step:941 train loss:4.057791 +step:942 train loss:3.944995 +step:943 train loss:4.029179 +step:944 train loss:3.985094 +step:945 train loss:3.916918 +step:946 train loss:4.009407 +step:947 train loss:4.043186 +step:948 train loss:4.005270 +step:949 train loss:3.990503 +step:950 train loss:3.961638 +step:951 train loss:4.043016 +step:952 train loss:3.967088 +step:953 train loss:4.039509 +step:954 train loss:4.017302 +step:955 train loss:4.001248 +step:956 train loss:4.009066 +step:957 train loss:4.057835 +step:958 train loss:4.022093 +step:959 train loss:3.962600 +step:960 train loss:4.061436 +step:961 train loss:3.984599 +step:962 train loss:4.037554 +step:963 train loss:4.052985 +step:964 train loss:4.019018 +step:965 train loss:4.023304 +step:966 train loss:3.986466 +step:967 train loss:4.013783 +step:968 train loss:4.056406 +step:969 train loss:3.984622 +step:970 train loss:3.990877 +step:971 train loss:4.043572 +step:972 train loss:3.986240 +step:973 train loss:3.980355 +step:974 train loss:3.955099 +step:975 train loss:4.061890 +step:976 train loss:3.947974 +step:977 train loss:3.974515 +step:978 train loss:4.000941 +step:979 train loss:3.946898 +step:980 train loss:3.970869 +step:981 train loss:3.931514 +step:982 train loss:4.041248 +step:983 train loss:4.070030 +step:984 train loss:4.024347 +step:985 train loss:4.016900 +step:986 train loss:4.003030 +step:987 train loss:4.028040 +step:988 train loss:3.978476 +step:989 train loss:3.919284 +step:990 train loss:3.979265 +step:991 train loss:3.986726 +step:992 train loss:4.008096 +step:993 train loss:4.032690 +step:994 train loss:4.020679 +step:995 train loss:4.086729 +step:996 train loss:4.075232 +step:997 train loss:3.974819 +step:998 train loss:4.056804 +step:999 train loss:3.949130 +step:1000 validation loss:3.920556 total_sharp:8.8365e-03 L1_sharp:6.3315e-03 L2_sharp:8.5353e-04 L3_sharp:1.8061e-03 L4_sharp:1.8478e-03 L5_sharp:2.1031e-03 L6_sharp:2.2163e-03 L7_sharp:2.3406e-03 L8_sharp:1.6468e-03 L9_sharp:8.6189e-04 L10_sharp:6.1768e-04 L11_sharp:6.8880e-04 L12_sharp:6.6260e-04 total_fnorm:2.4203e+00 total_l1_linf:2.0720e+04 total_spectral:2.4203e+00 L1_fnorm:6.0729e-01 L2_fnorm:5.5891e-01 L3_fnorm:5.3721e-01 L4_fnorm:5.4704e-01 L5_fnorm:5.7770e-01 L6_fnorm:5.7866e-01 L7_fnorm:5.9032e-01 L8_fnorm:5.9119e-01 L9_fnorm:5.9635e-01 L10_fnorm:5.9790e-01 L11_fnorm:5.9835e-01 L12_fnorm:6.0079e-01 L1_l1linf:4.2722e-01 L2_l1linf:4.2021e-01 L3_l1linf:4.2097e-01 L4_l1linf:3.9422e-01 L5_l1linf:3.9951e-01 L6_l1linf:4.0355e-01 L7_l1linf:4.0310e-01 L8_l1linf:4.0432e-01 L9_l1linf:4.0407e-01 L10_l1linf:4.0297e-01 L11_l1linf:4.0199e-01 L12_l1linf:3.9784e-01 L1_spectral:1.2055e-02 L2_spectral:1.2064e-02 L3_spectral:1.2059e-02 L4_spectral:1.2045e-02 L5_spectral:1.2043e-02 L6_spectral:1.2056e-02 L7_spectral:1.2043e-02 L8_spectral:1.2043e-02 L9_spectral:1.2047e-02 L10_spectral:1.2049e-02 L11_spectral:1.2044e-02 L12_spectral:1.2042e-02 v_norm:2.4203e+00 cos_v_-g_hvp:5.7532e-02 g_hvp_norm:5.3474e-01 cos_v_-g_t:6.4300e-02 g_t_norm:4.7944e-01 hv_norm:7.4173e-01 cos_v_hv:2.8834e-02 hg_norm:1.3486e+01 cos_g_hg:5.6636e-01 v_par:7.9354e-03 v_perp:2.4203e+00 L1_cos_v_neg_g:5.2892e-02 L1_v_norm:6.0729e-01 L2_cos_v_neg_g:4.1339e-02 L2_v_norm:5.5891e-01 L3_cos_v_neg_g:3.9747e-02 L3_v_norm:5.3721e-01 L4_cos_v_neg_g:4.6085e-02 L4_v_norm:5.4704e-01 L5_cos_v_neg_g:5.6227e-02 L5_v_norm:5.7770e-01 L6_cos_v_neg_g:5.8458e-02 L6_v_norm:5.7866e-01 L7_cos_v_neg_g:6.2643e-02 L7_v_norm:5.9032e-01 L8_cos_v_neg_g:6.3558e-02 L8_v_norm:5.9119e-01 L9_cos_v_neg_g:6.0844e-02 L9_v_norm:5.9635e-01 L10_cos_v_neg_g:6.6111e-02 L10_v_norm:5.9790e-01 L11_cos_v_neg_g:7.5547e-02 L11_v_norm:5.9835e-01 L12_cos_v_neg_g:7.8820e-02 L12_v_norm:6.0079e-01 +step:1000 train loss:4.017238 +step:1001 train loss:3.987049 +step:1002 train loss:3.911120 +step:1003 train loss:3.959752 +step:1004 train loss:3.915982 +step:1005 train loss:4.019713 +step:1006 train loss:3.989643 +step:1007 train loss:4.044697 +step:1008 train loss:3.918804 +step:1009 train loss:4.034300 +step:1010 train loss:3.996507 +step:1011 train loss:3.963033 +step:1012 train loss:3.951405 +step:1013 train loss:3.971021 +step:1014 train loss:3.939989 +step:1015 train loss:3.910836 +step:1016 train loss:4.003733 +step:1017 train loss:4.088620 +step:1018 train loss:4.059325 +step:1019 train loss:3.905871 +step:1020 train loss:4.011382 +step:1021 train loss:3.941188 +step:1022 train loss:3.955227 +step:1023 train loss:3.934158 +step:1024 train loss:3.916731 +step:1025 train loss:3.951122 +step:1026 train loss:3.934930 +step:1027 train loss:3.914860 +step:1028 train loss:3.948565 +step:1029 train loss:3.882822 +step:1030 train loss:3.991605 +step:1031 train loss:3.966360 +step:1032 train loss:3.981451 +step:1033 train loss:3.966746 +step:1034 train loss:3.981996 +step:1035 train loss:4.089858 +step:1036 train loss:4.084778 +step:1037 train loss:3.929344 +step:1038 train loss:3.940242 +step:1039 train loss:3.960938 +step:1040 train loss:3.991323 +step:1041 train loss:4.019416 +step:1042 train loss:3.981931 +step:1043 train loss:3.937836 +step:1044 train loss:3.913929 +step:1045 train loss:3.926691 +step:1046 train loss:3.939843 +step:1047 train loss:3.961887 +step:1048 train loss:4.040008 +step:1049 train loss:4.031065 +step:1050 train loss:4.014136 +step:1051 train loss:4.115008 +step:1052 train loss:3.974329 +step:1053 train loss:4.048791 +step:1054 train loss:3.966407 +step:1055 train loss:3.920931 +step:1056 train loss:4.036979 +step:1057 train loss:3.944940 +step:1058 train loss:3.976720 +step:1059 train loss:3.974222 +step:1060 train loss:3.947297 +step:1061 train loss:3.939814 +step:1062 train loss:3.950033 +step:1063 train loss:3.960443 +step:1064 train loss:3.943889 +step:1065 train loss:3.961548 +step:1066 train loss:3.935920 +step:1067 train loss:3.986497 +step:1068 train loss:3.939990 +step:1069 train loss:3.938141 +step:1070 train loss:3.895923 +step:1071 train loss:3.992840 +step:1072 train loss:3.971749 +step:1073 train loss:3.987486 +step:1074 train loss:3.914174 +step:1075 train loss:3.974608 +step:1076 train loss:3.884451 +step:1077 train loss:3.977393 +step:1078 train loss:3.938695 +step:1079 train loss:3.944877 +step:1080 train loss:3.980692 +step:1081 train loss:3.951273 +step:1082 train loss:3.973163 +step:1083 train loss:3.962865 +step:1084 train loss:4.048649 +step:1085 train loss:3.944413 +step:1086 train loss:3.975886 +step:1087 train loss:3.941485 +step:1088 train loss:3.959278 +step:1089 train loss:3.887993 +step:1090 train loss:3.930676 +step:1091 train loss:3.900167 +step:1092 train loss:3.915990 +step:1093 train loss:3.953161 +step:1094 train loss:4.036287 +step:1095 train loss:3.883403 +step:1096 train loss:3.963192 +step:1097 train loss:3.934874 +step:1098 train loss:3.915257 +step:1099 train loss:3.922285 +step:1100 train loss:3.942881 +step:1101 train loss:3.879673 +step:1102 train loss:3.952751 +step:1103 train loss:3.961409 +step:1104 train loss:3.979374 +step:1105 train loss:3.945296 +step:1106 train loss:3.951925 +step:1107 train loss:3.883939 +step:1108 train loss:3.897624 +step:1109 train loss:3.936794 +step:1110 train loss:3.927130 +step:1111 train loss:3.880314 +step:1112 train loss:3.926569 +step:1113 train loss:3.895587 +step:1114 train loss:3.905015 +step:1115 train loss:3.918163 +step:1116 train loss:3.959699 +step:1117 train loss:3.903541 +step:1118 train loss:3.889790 +step:1119 train loss:3.917680 +step:1120 train loss:3.872717 +step:1121 train loss:3.931571 +step:1122 train loss:3.951693 +step:1123 train loss:3.922375 +step:1124 train loss:3.935104 +step:1125 train loss:3.912374 +step:1126 train loss:3.907477 +step:1127 train loss:3.876174 +step:1128 train loss:3.904721 +step:1129 train loss:3.850087 +step:1130 train loss:3.953514 +step:1131 train loss:3.904396 +step:1132 train loss:4.011772 +step:1133 train loss:3.974426 +step:1134 train loss:3.906939 +step:1135 train loss:3.921943 +step:1136 train loss:3.950566 +step:1137 train loss:3.880864 +step:1138 train loss:3.974774 +step:1139 train loss:3.906704 +step:1140 train loss:3.913599 +step:1141 train loss:3.888611 +step:1142 train loss:3.879109 +step:1143 train loss:3.885399 +step:1144 train loss:3.968457 +step:1145 train loss:3.936940 +step:1146 train loss:3.999548 +step:1147 train loss:3.923007 +step:1148 train loss:3.939657 +step:1149 train loss:3.917903 +step:1150 train loss:3.940454 +step:1151 train loss:3.859577 +step:1152 train loss:3.911730 +step:1153 train loss:3.870892 +step:1154 train loss:3.833951 +step:1155 train loss:3.886918 +step:1156 train loss:3.897177 +step:1157 train loss:3.795695 +step:1158 train loss:3.898779 +step:1159 train loss:3.860814 +step:1160 train loss:3.828297 +step:1161 train loss:3.924325 +step:1162 train loss:3.930115 +step:1163 train loss:3.900235 +step:1164 train loss:3.986841 +step:1165 train loss:3.878423 +step:1166 train loss:3.821037 +step:1167 train loss:3.878262 +step:1168 train loss:3.903805 +step:1169 train loss:3.939801 +step:1170 train loss:3.881920 +step:1171 train loss:3.904746 +step:1172 train loss:3.842161 +step:1173 train loss:3.853910 +step:1174 train loss:3.891304 +step:1175 train loss:3.952663 +step:1176 train loss:3.853072 +step:1177 train loss:3.864433 +step:1178 train loss:3.886409 +step:1179 train loss:3.877055 +step:1180 train loss:3.864336 +step:1181 train loss:3.877768 +step:1182 train loss:3.843292 +step:1183 train loss:3.834210 +step:1184 train loss:3.862497 +step:1185 train loss:3.861335 +step:1186 train loss:3.939313 +step:1187 train loss:3.840304 +step:1188 train loss:3.970093 +step:1189 train loss:3.834878 +step:1190 train loss:3.869015 +step:1191 train loss:3.819268 +step:1192 train loss:3.839825 +step:1193 train loss:3.933964 +step:1194 train loss:3.906862 +step:1195 train loss:3.957619 +step:1196 train loss:3.847593 +step:1197 train loss:3.851432 +step:1198 train loss:3.808861 +step:1199 train loss:3.841226 +step:1200 train loss:3.859733 +step:1201 train loss:3.769615 +step:1202 train loss:3.939438 +step:1203 train loss:3.781530 +step:1204 train loss:3.842967 +step:1205 train loss:3.952747 +step:1206 train loss:3.872367 +step:1207 train loss:3.835607 +step:1208 train loss:3.855239 +step:1209 train loss:3.900742 +step:1210 train loss:3.838456 +step:1211 train loss:3.881900 +step:1212 train loss:3.842201 +step:1213 train loss:3.812304 +step:1214 train loss:3.870808 +step:1215 train loss:3.909927 +step:1216 train loss:3.845347 +step:1217 train loss:3.862902 +step:1218 train loss:3.883934 +step:1219 train loss:3.830616 +step:1220 train loss:3.971345 +step:1221 train loss:3.909583 +step:1222 train loss:3.845162 +step:1223 train loss:3.789073 +step:1224 train loss:3.851683 +step:1225 train loss:3.890382 +step:1226 train loss:3.825362 +step:1227 train loss:3.856534 +step:1228 train loss:3.889891 +step:1229 train loss:3.820781 +step:1230 train loss:3.798773 +step:1231 train loss:3.863661 +step:1232 train loss:3.816331 +step:1233 train loss:3.828167 +step:1234 train loss:3.830427 +step:1235 train loss:3.868656 +step:1236 train loss:3.812114 +step:1237 train loss:3.827796 +step:1238 train loss:3.828038 +step:1239 train loss:3.881013 +step:1240 train loss:3.825408 +step:1241 train loss:3.854964 +step:1242 train loss:3.827755 +step:1243 train loss:3.820897 +step:1244 train loss:3.851030 +step:1245 train loss:3.862187 +step:1246 train loss:3.834047 +step:1247 train loss:3.780863 +step:1248 train loss:3.815119 +step:1249 train loss:3.843311 +step:1250 validation loss:3.815516 +step:1250 train loss:3.805851 +step:1251 train loss:3.872620 +step:1252 train loss:3.865549 +step:1253 train loss:3.840657 +step:1254 train loss:3.861735 +step:1255 train loss:3.815292 +step:1256 train loss:3.836765 +step:1257 train loss:3.861866 +step:1258 train loss:3.779759 +step:1259 train loss:3.849547 +step:1260 train loss:3.812342 +step:1261 train loss:3.900316 +step:1262 train loss:3.794613 +step:1263 train loss:3.927664 +step:1264 train loss:3.859350 +step:1265 train loss:3.900428 +step:1266 train loss:3.864790 +step:1267 train loss:3.833459 +step:1268 train loss:3.866303 +step:1269 train loss:3.894796 +step:1270 train loss:3.725388 +step:1271 train loss:3.841577 +step:1272 train loss:3.826871 +step:1273 train loss:3.836217 +step:1274 train loss:3.861280 +step:1275 train loss:3.926383 +step:1276 train loss:3.779398 +step:1277 train loss:3.874936 +step:1278 train loss:3.817641 +step:1279 train loss:3.801029 +step:1280 train loss:3.891971 +step:1281 train loss:3.890696 +step:1282 train loss:3.840641 +step:1283 train loss:4.061312 +step:1284 train loss:3.947665 +step:1285 train loss:3.892617 +step:1286 train loss:3.822823 +step:1287 train loss:3.864940 +step:1288 train loss:3.873987 +step:1289 train loss:3.886769 +step:1290 train loss:3.865857 +step:1291 train loss:3.934230 +step:1292 train loss:3.825149 +step:1293 train loss:3.877192 +step:1294 train loss:3.856610 +step:1295 train loss:3.827053 +step:1296 train loss:3.821421 +step:1297 train loss:3.804724 +step:1298 train loss:3.861709 +step:1299 train loss:3.890582 +step:1300 train loss:3.877129 +step:1301 train loss:3.822411 +step:1302 train loss:3.819834 +step:1303 train loss:3.831518 +step:1304 train loss:3.805318 +step:1305 train loss:3.829923 +step:1306 train loss:3.898905 +step:1307 train loss:3.765111 +step:1308 train loss:3.840287 +step:1309 train loss:3.846861 +step:1310 train loss:3.899040 +step:1311 train loss:3.783185 +step:1312 train loss:3.851552 +step:1313 train loss:3.838041 +step:1314 train loss:3.761122 +step:1315 train loss:3.808306 +step:1316 train loss:3.846741 +step:1317 train loss:3.826078 +step:1318 train loss:3.906044 +step:1319 train loss:3.917356 +step:1320 train loss:3.867838 +step:1321 train loss:3.842504 +step:1322 train loss:3.903624 +step:1323 train loss:3.785010 +step:1324 train loss:3.859156 +step:1325 train loss:3.855083 +step:1326 train loss:3.839197 +step:1327 train loss:3.788195 +step:1328 train loss:3.833057 +step:1329 train loss:3.860416 +step:1330 train loss:3.812673 +step:1331 train loss:3.782357 +step:1332 train loss:3.868244 +step:1333 train loss:3.828838 +step:1334 train loss:3.734640 +step:1335 train loss:3.789866 +step:1336 train loss:3.834708 +step:1337 train loss:3.903046 +step:1338 train loss:3.829996 +step:1339 train loss:3.808477 +step:1340 train loss:3.847865 +step:1341 train loss:3.821331 +step:1342 train loss:3.828689 +step:1343 train loss:3.802814 +step:1344 train loss:3.841110 +step:1345 train loss:3.840911 +step:1346 train loss:3.769838 +step:1347 train loss:3.863165 +step:1348 train loss:3.989293 +step:1349 train loss:3.785472 +step:1350 train loss:3.759729 +step:1351 train loss:3.900344 +step:1352 train loss:3.862920 +step:1353 train loss:3.858365 +step:1354 train loss:3.789517 +step:1355 train loss:3.844117 +step:1356 train loss:3.773483 +step:1357 train loss:3.841596 +step:1358 train loss:3.849688 +step:1359 train loss:3.796019 +step:1360 train loss:3.815139 +step:1361 train loss:3.858236 +step:1362 train loss:3.891097 +step:1363 train loss:3.845881 +step:1364 train loss:3.831895 +step:1365 train loss:3.759029 +step:1366 train loss:4.147465 +step:1367 train loss:3.879414 +step:1368 train loss:3.831673 +step:1369 train loss:3.858423 +step:1370 train loss:3.849354 +step:1371 train loss:3.849132 +step:1372 train loss:3.800470 +step:1373 train loss:3.775298 +step:1374 train loss:3.851912 +step:1375 train loss:3.842946 +step:1376 train loss:3.749602 +step:1377 train loss:3.881584 +step:1378 train loss:3.783353 +step:1379 train loss:3.830642 +step:1380 train loss:3.860326 +step:1381 train loss:3.781691 +step:1382 train loss:3.836478 +step:1383 train loss:3.777237 +step:1384 train loss:3.872374 +step:1385 train loss:3.862341 +step:1386 train loss:3.902467 +step:1387 train loss:3.774761 +step:1388 train loss:3.789396 +step:1389 train loss:3.865172 +step:1390 train loss:3.824247 +step:1391 train loss:3.799574 +step:1392 train loss:3.857686 +step:1393 train loss:3.903051 +step:1394 train loss:3.766298 +step:1395 train loss:3.855607 +step:1396 train loss:3.783995 +step:1397 train loss:3.821833 +step:1398 train loss:3.826552 +step:1399 train loss:3.799974 +step:1400 train loss:3.866806 +step:1401 train loss:3.766338 +step:1402 train loss:3.824242 +step:1403 train loss:3.797397 +step:1404 train loss:3.790848 +step:1405 train loss:3.851757 +step:1406 train loss:3.785378 +step:1407 train loss:3.771028 +step:1408 train loss:3.800829 +step:1409 train loss:3.785072 +step:1410 train loss:3.853516 +step:1411 train loss:3.831328 +step:1412 train loss:3.855168 +step:1413 train loss:3.834839 +step:1414 train loss:3.830207 +step:1415 train loss:3.800012 +step:1416 train loss:3.800116 +step:1417 train loss:3.957178 +step:1418 train loss:3.796981 +step:1419 train loss:3.776065 +step:1420 train loss:3.796371 +step:1421 train loss:3.817355 +step:1422 train loss:3.819694 +step:1423 train loss:3.777475 +step:1424 train loss:3.824947 +step:1425 train loss:3.813246 +step:1426 train loss:3.798184 +step:1427 train loss:3.887519 +step:1428 train loss:3.837962 +step:1429 train loss:3.798987 +step:1430 train loss:3.856931 +step:1431 train loss:3.931099 +step:1432 train loss:3.795913 +step:1433 train loss:3.886906 +step:1434 train loss:3.780738 +step:1435 train loss:3.853471 +step:1436 train loss:3.815207 +step:1437 train loss:3.785620 +step:1438 train loss:3.943782 +step:1439 train loss:3.778739 +step:1440 train loss:3.794783 +step:1441 train loss:3.823555 +step:1442 train loss:3.720354 +step:1443 train loss:3.832115 +step:1444 train loss:3.794370 +step:1445 train loss:3.847788 +step:1446 train loss:3.778741 +step:1447 train loss:3.808353 +step:1448 train loss:3.819126 +step:1449 train loss:3.779572 +step:1450 train loss:3.851779 +step:1451 train loss:3.790120 +step:1452 train loss:3.847023 +step:1453 train loss:3.836550 +step:1454 train loss:3.753975 +step:1455 train loss:3.817464 +step:1456 train loss:3.731510 +step:1457 train loss:3.750053 +step:1458 train loss:3.797244 +step:1459 train loss:3.737696 +step:1460 train loss:3.780956 +step:1461 train loss:3.792417 +step:1462 train loss:3.750993 +step:1463 train loss:3.838188 +step:1464 train loss:3.792401 +step:1465 train loss:3.805095 +step:1466 train loss:3.804562 +step:1467 train loss:3.765965 +step:1468 train loss:3.809036 +step:1469 train loss:3.819214 +step:1470 train loss:3.761847 +step:1471 train loss:3.886834 +step:1472 train loss:3.773078 +step:1473 train loss:3.795641 +step:1474 train loss:3.789907 +step:1475 train loss:3.784083 +step:1476 train loss:3.789693 +step:1477 train loss:3.772561 +step:1478 train loss:3.821840 +step:1479 train loss:3.739264 +step:1480 train loss:3.800457 +step:1481 train loss:3.817076 +step:1482 train loss:3.768965 +step:1483 train loss:3.762800 +step:1484 train loss:3.820697 +step:1485 train loss:3.830179 +step:1486 train loss:3.818313 +step:1487 train loss:3.861570 +step:1488 train loss:3.817932 +step:1489 train loss:3.773149 +step:1490 train loss:3.813944 +step:1491 train loss:3.701818 +step:1492 train loss:3.814626 +step:1493 train loss:3.819928 +step:1494 train loss:3.710257 +step:1495 train loss:3.738961 +step:1496 train loss:3.818315 +step:1497 train loss:3.800882 +step:1498 train loss:3.780850 +step:1499 train loss:3.801925 +step:1500 validation loss:3.740139 total_sharp:9.4929e-03 L1_sharp:1.2123e-02 L2_sharp:4.7261e-03 L3_sharp:2.6877e-03 L4_sharp:1.8248e-03 L5_sharp:1.5701e-03 L6_sharp:1.9214e-03 L7_sharp:1.7768e-03 L8_sharp:1.3302e-03 L9_sharp:8.7541e-04 L10_sharp:5.5355e-04 L11_sharp:5.7487e-04 L12_sharp:5.5244e-04 total_fnorm:2.4042e+00 total_l1_linf:2.0567e+04 total_spectral:2.4042e+00 L1_fnorm:5.9478e-01 L2_fnorm:5.1951e-01 L3_fnorm:5.1991e-01 L4_fnorm:5.4646e-01 L5_fnorm:5.7987e-01 L6_fnorm:5.8362e-01 L7_fnorm:5.9412e-01 L8_fnorm:5.9599e-01 L9_fnorm:5.9842e-01 L10_fnorm:5.9977e-01 L11_fnorm:5.9985e-01 L12_fnorm:6.0266e-01 L1_l1linf:4.2909e-01 L2_l1linf:4.0862e-01 L3_l1linf:4.0776e-01 L4_l1linf:3.9501e-01 L5_l1linf:4.0687e-01 L6_l1linf:4.0822e-01 L7_l1linf:4.0988e-01 L8_l1linf:4.1238e-01 L9_l1linf:4.1104e-01 L10_l1linf:4.1050e-01 L11_l1linf:4.0303e-01 L12_l1linf:3.9772e-01 L1_spectral:1.2049e-02 L2_spectral:1.2040e-02 L3_spectral:1.2047e-02 L4_spectral:1.2049e-02 L5_spectral:1.2047e-02 L6_spectral:1.2062e-02 L7_spectral:1.2043e-02 L8_spectral:1.2047e-02 L9_spectral:1.2044e-02 L10_spectral:1.2043e-02 L11_spectral:1.2042e-02 L12_spectral:1.2044e-02 v_norm:2.4042e+00 cos_v_-g_hvp:5.0184e-02 g_hvp_norm:6.1500e-01 cos_v_-g_t:5.5108e-02 g_t_norm:5.6323e-01 hv_norm:9.6992e-01 cos_v_hv:2.3530e-02 hg_norm:3.0955e+01 cos_g_hg:4.4997e-01 v_par:6.6620e-03 v_perp:2.4042e+00 L1_cos_v_neg_g:4.0373e-02 L1_v_norm:5.9478e-01 L2_cos_v_neg_g:4.7065e-02 L2_v_norm:5.1951e-01 L3_cos_v_neg_g:4.2657e-02 L3_v_norm:5.1991e-01 L4_cos_v_neg_g:4.6614e-02 L4_v_norm:5.4646e-01 L5_cos_v_neg_g:5.1805e-02 L5_v_norm:5.7987e-01 L6_cos_v_neg_g:5.2584e-02 L6_v_norm:5.8362e-01 L7_cos_v_neg_g:5.2412e-02 L7_v_norm:5.9412e-01 L8_cos_v_neg_g:5.2802e-02 L8_v_norm:5.9599e-01 L9_cos_v_neg_g:5.5899e-02 L9_v_norm:5.9842e-01 L10_cos_v_neg_g:6.2724e-02 L10_v_norm:5.9977e-01 L11_cos_v_neg_g:7.2148e-02 L11_v_norm:5.9985e-01 L12_cos_v_neg_g:9.6400e-02 L12_v_norm:6.0266e-01 +step:1500 train loss:3.772950 +step:1501 train loss:3.802799 +step:1502 train loss:3.759004 +step:1503 train loss:3.883704 +step:1504 train loss:3.808402 +step:1505 train loss:3.813749 +step:1506 train loss:3.792290 +step:1507 train loss:3.872199 +step:1508 train loss:3.810502 +step:1509 train loss:3.834527 +step:1510 train loss:3.809995 +step:1511 train loss:3.779110 +step:1512 train loss:3.745944 +step:1513 train loss:3.776790 +step:1514 train loss:3.833956 +step:1515 train loss:3.822742 +step:1516 train loss:3.750187 +step:1517 train loss:3.745195 +step:1518 train loss:3.765325 +step:1519 train loss:3.802921 +step:1520 train loss:3.794877 +step:1521 train loss:3.754992 +step:1522 train loss:3.803168 +step:1523 train loss:3.756108 +step:1524 train loss:3.742213 +step:1525 train loss:3.848659 +step:1526 train loss:3.816104 +step:1527 train loss:3.875036 +step:1528 train loss:3.876495 +step:1529 train loss:3.815207 +step:1530 train loss:3.776833 +step:1531 train loss:3.804903 +step:1532 train loss:3.859364 +step:1533 train loss:3.877305 +step:1534 train loss:3.814570 +step:1535 train loss:3.813502 +step:1536 train loss:3.751416 +step:1537 train loss:3.876235 +step:1538 train loss:3.745413 +step:1539 train loss:3.858035 +step:1540 train loss:3.827801 +step:1541 train loss:3.852560 +step:1542 train loss:3.766860 +step:1543 train loss:3.918817 +step:1544 train loss:3.924318 +step:1545 train loss:3.762296 +step:1546 train loss:3.827408 +step:1547 train loss:3.800191 +step:1548 train loss:3.770855 +step:1549 train loss:3.774394 +step:1550 train loss:3.746575 +step:1551 train loss:3.792063 +step:1552 train loss:3.815521 +step:1553 train loss:3.906738 +step:1554 train loss:3.785808 +step:1555 train loss:3.716272 +step:1556 train loss:3.770565 +step:1557 train loss:3.812082 +step:1558 train loss:3.806216 +step:1559 train loss:3.922713 +step:1560 train loss:3.850261 +step:1561 train loss:3.798991 +step:1562 train loss:3.841266 +step:1563 train loss:3.796266 +step:1564 train loss:3.758745 +step:1565 train loss:3.843640 +step:1566 train loss:3.763715 +step:1567 train loss:3.775537 +step:1568 train loss:4.063278 +step:1569 train loss:3.829892 +step:1570 train loss:4.016788 +step:1571 train loss:3.798620 +step:1572 train loss:3.792423 +step:1573 train loss:3.836898 +step:1574 train loss:3.757719 +step:1575 train loss:3.840229 +step:1576 train loss:3.764714 +step:1577 train loss:3.829293 +step:1578 train loss:3.900746 +step:1579 train loss:3.851716 +step:1580 train loss:3.829863 +step:1581 train loss:3.814290 +step:1582 train loss:3.769082 +step:1583 train loss:3.884801 +step:1584 train loss:3.861435 +step:1585 train loss:3.761881 +step:1586 train loss:3.824622 +step:1587 train loss:3.837291 +step:1588 train loss:3.768326 +step:1589 train loss:3.796353 +step:1590 train loss:3.848292 +step:1591 train loss:3.793825 +step:1592 train loss:3.771861 +step:1593 train loss:3.860952 +step:1594 train loss:3.776111 +step:1595 train loss:3.759870 +step:1596 train loss:3.763793 +step:1597 train loss:3.768471 +step:1598 train loss:3.776626 +step:1599 train loss:3.726828 +step:1600 train loss:3.795462 +step:1601 train loss:3.801793 +step:1602 train loss:3.810534 +step:1603 train loss:3.737710 +step:1604 train loss:3.777392 +step:1605 train loss:3.809153 +step:1606 train loss:3.720474 +step:1607 train loss:3.726384 +step:1608 train loss:3.775760 +step:1609 train loss:3.800299 +step:1610 train loss:3.783728 +step:1611 train loss:3.764997 +step:1612 train loss:3.737481 +step:1613 train loss:3.811239 +step:1614 train loss:3.773349 +step:1615 train loss:3.774016 +step:1616 train loss:3.745672 +step:1617 train loss:3.805512 +step:1618 train loss:3.828598 +step:1619 train loss:3.785092 +step:1620 train loss:3.779571 +step:1621 train loss:3.786543 +step:1622 train loss:3.765522 +step:1623 train loss:3.802028 +step:1624 train loss:3.804333 +step:1625 train loss:3.864149 +step:1626 train loss:3.814052 +step:1627 train loss:3.854677 +step:1628 train loss:3.826377 +step:1629 train loss:3.743924 +step:1630 train loss:3.752595 +step:1631 train loss:3.878444 +step:1632 train loss:3.768504 +step:1633 train loss:3.840781 +step:1634 train loss:3.832643 +step:1635 train loss:3.729737 +step:1636 train loss:3.780135 +step:1637 train loss:3.743352 +step:1638 train loss:3.794200 +step:1639 train loss:3.886396 +step:1640 train loss:3.768076 +step:1641 train loss:3.850736 +step:1642 train loss:3.771799 +step:1643 train loss:3.842170 +step:1644 train loss:3.761480 +step:1645 train loss:3.828833 +step:1646 train loss:3.900337 +step:1647 train loss:3.774633 +step:1648 train loss:3.804357 +step:1649 train loss:3.776150 +step:1650 train loss:3.752136 +step:1651 train loss:3.751538 +step:1652 train loss:3.781583 +step:1653 train loss:3.773973 +step:1654 train loss:3.849488 +step:1655 train loss:3.808525 +step:1656 train loss:3.710055 +step:1657 train loss:3.787587 +step:1658 train loss:3.786593 +step:1659 train loss:3.750384 +step:1660 train loss:3.730418 +step:1661 train loss:3.781052 +step:1662 train loss:3.769855 +step:1663 train loss:3.761983 +step:1664 train loss:3.761696 +step:1665 train loss:3.788009 +step:1666 train loss:3.828759 +step:1667 train loss:3.788173 +step:1668 train loss:3.889893 +step:1669 train loss:3.767733 +step:1670 train loss:3.749876 +step:1671 train loss:3.725348 +step:1672 train loss:3.741359 +step:1673 train loss:3.676438 +step:1674 train loss:3.714833 +step:1675 train loss:3.741495 +step:1676 train loss:3.749072 +step:1677 train loss:3.822123 +step:1678 train loss:3.760927 +step:1679 train loss:3.775095 +step:1680 train loss:3.783545 +step:1681 train loss:3.793286 +step:1682 train loss:3.793833 +step:1683 train loss:3.727958 +step:1684 train loss:3.836909 +step:1685 train loss:3.814393 +step:1686 train loss:3.736634 +step:1687 train loss:3.715723 +step:1688 train loss:3.796169 +step:1689 train loss:3.794613 +step:1690 train loss:3.873802 +step:1691 train loss:3.776628 +step:1692 train loss:3.827563 +step:1693 train loss:3.739729 +step:1694 train loss:3.726971 +step:1695 train loss:3.772596 +step:1696 train loss:3.815210 +step:1697 train loss:3.910807 +step:1698 train loss:3.789506 +step:1699 train loss:3.706872 +step:1700 train loss:3.786685 +step:1701 train loss:3.736309 +step:1702 train loss:3.717183 +step:1703 train loss:3.774652 +step:1704 train loss:3.764534 +step:1705 train loss:3.731964 +step:1706 train loss:3.741683 +step:1707 train loss:3.809184 +step:1708 train loss:3.790884 +step:1709 train loss:3.760395 +step:1710 train loss:3.758737 +step:1711 train loss:3.780320 +step:1712 train loss:3.788973 +step:1713 train loss:3.727644 +step:1714 train loss:3.759729 +step:1715 train loss:3.722101 +step:1716 train loss:3.755057 +step:1717 train loss:3.691184 +step:1718 train loss:3.803293 +step:1719 train loss:3.750295 +step:1720 train loss:3.734887 +step:1721 train loss:3.741862 +step:1722 train loss:3.762786 +step:1723 train loss:3.746963 +step:1724 train loss:3.815958 +step:1725 train loss:3.762401 +step:1726 train loss:3.763917 +step:1727 train loss:3.814994 +step:1728 train loss:3.684509 +step:1729 train loss:3.711606 +step:1730 train loss:3.719918 +step:1731 train loss:3.755583 +step:1732 train loss:3.747207 +step:1733 train loss:3.824693 +step:1734 train loss:3.714837 +step:1735 train loss:3.817295 +step:1736 train loss:3.703503 +step:1737 train loss:3.782853 +step:1738 train loss:3.687844 +step:1739 train loss:3.840911 +step:1740 train loss:3.699632 +step:1741 train loss:3.709989 +step:1742 train loss:3.758611 +step:1743 train loss:3.854017 +step:1744 train loss:3.735298 +step:1745 train loss:3.717556 +step:1746 train loss:3.929504 +step:1747 train loss:3.732143 +step:1748 train loss:3.741168 +step:1749 train loss:3.710216 +step:1750 validation loss:3.694742 +step:1750 train loss:3.752190 +step:1751 train loss:3.693453 +step:1752 train loss:3.769615 +step:1753 train loss:3.768415 +step:1754 train loss:3.757449 +step:1755 train loss:3.791534 +step:1756 train loss:3.725690 +step:1757 train loss:3.742590 +step:1758 train loss:3.750777 +step:1759 train loss:3.740116 +step:1760 train loss:3.741043 +step:1761 train loss:3.717494 +step:1762 train loss:3.770445 +step:1763 train loss:3.695745 +step:1764 train loss:3.784293 +step:1765 train loss:3.715966 +step:1766 train loss:3.767982 +step:1767 train loss:3.716187 +step:1768 train loss:3.738105 +step:1769 train loss:3.700532 +step:1770 train loss:3.759192 +step:1771 train loss:3.737823 +step:1772 train loss:3.786557 +step:1773 train loss:3.728076 +step:1774 train loss:3.799240 +step:1775 train loss:3.743848 +step:1776 train loss:3.738145 +step:1777 train loss:3.713602 +step:1778 train loss:3.696405 +step:1779 train loss:3.772174 +step:1780 train loss:3.710919 +step:1781 train loss:3.763460 +step:1782 train loss:3.786911 +step:1783 train loss:3.701607 +step:1784 train loss:3.718704 +step:1785 train loss:3.800635 +step:1786 train loss:3.762073 +step:1787 train loss:3.736865 +step:1788 train loss:3.708025 +step:1789 train loss:3.712426 +step:1790 train loss:3.710774 +step:1791 train loss:3.769559 +step:1792 train loss:3.741869 +step:1793 train loss:3.733829 +step:1794 train loss:3.850923 +step:1795 train loss:3.671878 +step:1796 train loss:3.769335 +step:1797 train loss:3.744768 +step:1798 train loss:3.785843 +step:1799 train loss:3.693586 +step:1800 train loss:3.725802 +step:1801 train loss:3.717788 +step:1802 train loss:3.775573 +step:1803 train loss:3.714734 +step:1804 train loss:3.725289 +step:1805 train loss:3.774622 +step:1806 train loss:3.709766 +step:1807 train loss:3.713227 +step:1808 train loss:3.780280 +step:1809 train loss:3.760977 +step:1810 train loss:3.678432 +step:1811 train loss:3.760412 +step:1812 train loss:3.704574 +step:1813 train loss:3.706311 +step:1814 train loss:3.782420 +step:1815 train loss:3.728236 +step:1816 train loss:3.711143 +step:1817 train loss:3.691395 +step:1818 train loss:3.722335 +step:1819 train loss:3.759834 +step:1820 train loss:3.688452 +step:1821 train loss:3.757885 +step:1822 train loss:3.694982 +step:1823 train loss:3.666508 +step:1824 train loss:3.814481 +step:1825 train loss:3.698518 +step:1826 train loss:3.709777 +step:1827 train loss:3.726838 +step:1828 train loss:3.675318 +step:1829 train loss:3.767440 +step:1830 train loss:3.736922 +step:1831 train loss:3.682294 +step:1832 train loss:3.706376 +step:1833 train loss:3.624157 +step:1834 train loss:3.682303 +step:1835 train loss:3.662642 +step:1836 train loss:3.718552 +step:1837 train loss:3.749297 +step:1838 train loss:3.723830 +step:1839 train loss:3.727483 +step:1840 train loss:3.714147 +step:1841 train loss:3.803824 +step:1842 train loss:3.720238 +step:1843 train loss:3.772550 +step:1844 train loss:3.724495 +step:1845 train loss:3.820352 +step:1846 train loss:3.730821 +step:1847 train loss:3.706985 +step:1848 train loss:3.756773 +step:1849 train loss:3.684700 +step:1850 train loss:3.835929 +step:1851 train loss:3.676211 +step:1852 train loss:3.705232 +step:1853 train loss:3.709343 +step:1854 train loss:3.750551 +step:1855 train loss:3.671200 +step:1856 train loss:3.734439 +step:1857 train loss:3.675955 +step:1858 train loss:3.700749 +step:1859 train loss:3.712655 +step:1860 train loss:3.863383 +step:1861 train loss:3.739309 +step:1862 train loss:3.735359 +step:1863 train loss:3.812351 +step:1864 train loss:3.700298 +step:1865 train loss:3.692590 +step:1866 train loss:3.673752 +step:1867 train loss:3.832186 +step:1868 train loss:3.665605 +step:1869 train loss:3.730872 +step:1870 train loss:3.758574 +step:1871 train loss:3.671415 +step:1872 train loss:3.662109 +step:1873 train loss:3.726409 +step:1874 train loss:3.675754 +step:1875 train loss:3.720898 +step:1876 train loss:3.689251 +step:1877 train loss:3.692600 +step:1878 train loss:3.727607 +step:1879 train loss:3.695232 +step:1880 train loss:3.698453 +step:1881 train loss:3.708000 +step:1882 train loss:3.639999 +step:1883 train loss:3.686581 +step:1884 train loss:3.762255 +step:1885 train loss:3.755879 +step:1886 train loss:3.719782 +step:1887 train loss:3.737474 +step:1888 train loss:3.653688 +step:1889 train loss:3.684679 +step:1890 train loss:3.661746 +step:1891 train loss:3.742214 +step:1892 train loss:3.731455 +step:1893 train loss:3.686624 +step:1894 train loss:3.703495 +step:1895 train loss:3.727413 +step:1896 train loss:3.731672 +step:1897 train loss:3.704486 +step:1898 train loss:3.744455 +step:1899 train loss:3.761297 +step:1900 train loss:3.705317 +step:1901 train loss:3.703799 +step:1902 train loss:3.767604 +step:1903 train loss:3.668539 +step:1904 train loss:3.675086 +step:1905 train loss:3.704628 +step:1906 train loss:3.718690 +step:1907 train loss:3.693003 +step:1908 train loss:3.690151 +step:1909 train loss:3.737786 +step:1910 train loss:3.679148 +step:1911 train loss:3.723517 +step:1912 train loss:3.782127 +step:1913 train loss:3.747218 +step:1914 train loss:3.735456 +step:1915 train loss:3.787520 +step:1916 train loss:3.732668 +step:1917 train loss:3.736109 +step:1918 train loss:3.714668 +step:1919 train loss:3.662834 +step:1920 train loss:3.666209 +step:1921 train loss:3.704997 +step:1922 train loss:3.774518 +step:1923 train loss:3.665620 +step:1924 train loss:3.728864 +step:1925 train loss:3.696118 +step:1926 train loss:3.725236 +step:1927 train loss:3.741201 +step:1928 train loss:3.712927 +step:1929 train loss:3.789006 +step:1930 train loss:3.691354 +step:1931 train loss:3.793104 +step:1932 train loss:3.762384 +step:1933 train loss:3.724849 +step:1934 train loss:3.631546 +step:1935 train loss:3.710070 +step:1936 train loss:3.735441 +step:1937 train loss:3.747230 +step:1938 train loss:3.672336 +step:1939 train loss:3.725077 +step:1940 train loss:3.727912 +step:1941 train loss:3.720014 +step:1942 train loss:3.686275 +step:1943 train loss:3.693437 +step:1944 train loss:3.742309 +step:1945 train loss:3.640989 +step:1946 train loss:3.730014 +step:1947 train loss:3.743268 +step:1948 train loss:3.720791 +step:1949 train loss:3.673526 +step:1950 train loss:3.684992 +step:1951 train loss:3.687968 +step:1952 train loss:3.679714 +step:1953 train loss:3.731506 +step:1954 train loss:3.701679 +step:1955 train loss:3.709254 +step:1956 train loss:3.720714 +step:1957 train loss:3.768850 +step:1958 train loss:3.751205 +step:1959 train loss:3.732933 +step:1960 train loss:3.726286 +step:1961 train loss:3.706245 +step:1962 train loss:3.763962 +step:1963 train loss:3.685415 +step:1964 train loss:3.834520 +step:1965 train loss:3.694425 +step:1966 train loss:3.704310 +step:1967 train loss:3.681698 +step:1968 train loss:3.736378 +step:1969 train loss:3.660115 +step:1970 train loss:3.778761 +step:1971 train loss:3.684582 +step:1972 train loss:3.742457 +step:1973 train loss:3.720061 +step:1974 train loss:3.656344 +step:1975 train loss:3.736998 +step:1976 train loss:3.660103 +step:1977 train loss:3.846020 +step:1978 train loss:3.735091 +step:1979 train loss:3.725698 +step:1980 train loss:3.708270 +step:1981 train loss:3.704630 +step:1982 train loss:3.653572 +step:1983 train loss:3.730621 +step:1984 train loss:3.715230 +step:1985 train loss:3.741325 +step:1986 train loss:3.766101 +step:1987 train loss:3.728694 +step:1988 train loss:3.653980 +step:1989 train loss:3.745358 +step:1990 train loss:3.710721 +step:1991 train loss:3.711183 +step:1992 train loss:3.714270 +step:1993 train loss:3.666726 +step:1994 train loss:3.684795 +step:1995 train loss:3.648470 +step:1996 train loss:3.664962 +step:1997 train loss:3.682930 +step:1998 train loss:3.678480 +step:1999 train loss:3.688252 +step:2000 validation loss:3.646748 total_sharp:8.1633e-03 L1_sharp:9.2763e-03 L2_sharp:3.0769e-03 L3_sharp:2.3015e-03 L4_sharp:1.3351e-03 L5_sharp:1.3758e-03 L6_sharp:1.8053e-03 L7_sharp:1.9522e-03 L8_sharp:1.7810e-03 L9_sharp:1.0591e-03 L10_sharp:6.1867e-04 L11_sharp:6.5500e-04 L12_sharp:1.1464e-03 total_fnorm:2.3962e+00 total_l1_linf:2.0498e+04 total_spectral:2.3962e+00 L1_fnorm:5.8845e-01 L2_fnorm:5.1165e-01 L3_fnorm:5.1084e-01 L4_fnorm:5.4952e-01 L5_fnorm:5.8154e-01 L6_fnorm:5.8451e-01 L7_fnorm:5.9683e-01 L8_fnorm:5.9664e-01 L9_fnorm:5.9728e-01 L10_fnorm:5.9767e-01 L11_fnorm:5.9781e-01 L12_fnorm:6.0342e-01 L1_l1linf:4.3536e-01 L2_l1linf:4.0476e-01 L3_l1linf:3.9682e-01 L4_l1linf:3.9769e-01 L5_l1linf:4.0909e-01 L6_l1linf:4.1233e-01 L7_l1linf:4.1447e-01 L8_l1linf:4.1289e-01 L9_l1linf:4.1619e-01 L10_l1linf:4.0909e-01 L11_l1linf:4.0621e-01 L12_l1linf:3.9945e-01 L1_spectral:1.2053e-02 L2_spectral:1.2049e-02 L3_spectral:1.2040e-02 L4_spectral:1.2045e-02 L5_spectral:1.2044e-02 L6_spectral:1.2063e-02 L7_spectral:1.2067e-02 L8_spectral:1.2059e-02 L9_spectral:1.2044e-02 L10_spectral:1.2044e-02 L11_spectral:1.2046e-02 L12_spectral:1.2049e-02 v_norm:2.3962e+00 cos_v_-g_hvp:4.0396e-02 g_hvp_norm:6.4091e-01 cos_v_-g_t:4.4844e-02 g_t_norm:5.8052e-01 hv_norm:9.1880e-01 cos_v_hv:2.1289e-02 hg_norm:2.8505e+01 cos_g_hg:6.0528e-01 v_par:6.3231e-03 v_perp:2.3962e+00 L1_cos_v_neg_g:2.9077e-02 L1_v_norm:5.8845e-01 L2_cos_v_neg_g:4.0692e-02 L2_v_norm:5.1165e-01 L3_cos_v_neg_g:3.5945e-02 L3_v_norm:5.1084e-01 L4_cos_v_neg_g:3.7740e-02 L4_v_norm:5.4952e-01 L5_cos_v_neg_g:4.4163e-02 L5_v_norm:5.8154e-01 L6_cos_v_neg_g:4.5032e-02 L6_v_norm:5.8451e-01 L7_cos_v_neg_g:4.4498e-02 L7_v_norm:5.9683e-01 L8_cos_v_neg_g:4.5098e-02 L8_v_norm:5.9664e-01 L9_cos_v_neg_g:4.8938e-02 L9_v_norm:5.9728e-01 L10_cos_v_neg_g:5.2983e-02 L10_v_norm:5.9767e-01 L11_cos_v_neg_g:6.0764e-02 L11_v_norm:5.9781e-01 L12_cos_v_neg_g:8.6779e-02 L12_v_norm:6.0342e-01 +step:2000 train loss:3.692533 +step:2001 train loss:3.709105 +step:2002 train loss:3.682483 +step:2003 train loss:3.674420 +step:2004 train loss:3.720597 +step:2005 train loss:3.729885 +step:2006 train loss:3.695668 +step:2007 train loss:3.704792 +step:2008 train loss:3.714008 +step:2009 train loss:3.677979 +step:2010 train loss:3.659937 +step:2011 train loss:3.696858 +step:2012 train loss:3.827581 +step:2013 train loss:3.688046 +step:2014 train loss:3.704614 +step:2015 train loss:3.716831 +step:2016 train loss:3.694263 +step:2017 train loss:3.752208 +step:2018 train loss:3.686328 +step:2019 train loss:3.732177 +step:2020 train loss:3.726588 +step:2021 train loss:3.711755 +step:2022 train loss:3.744430 +step:2023 train loss:3.664666 +step:2024 train loss:3.726876 +step:2025 train loss:3.729137 +step:2026 train loss:3.792121 +step:2027 train loss:3.661942 +step:2028 train loss:3.695415 +step:2029 train loss:3.668908 +step:2030 train loss:3.717191 +step:2031 train loss:3.730613 +step:2032 train loss:3.710355 +step:2033 train loss:3.736622 +step:2034 train loss:3.693549 +step:2035 train loss:3.658814 +step:2036 train loss:3.728058 +step:2037 train loss:3.681677 +step:2038 train loss:3.688406 +step:2039 train loss:3.689560 +step:2040 train loss:3.681630 +step:2041 train loss:3.684377 +step:2042 train loss:3.710354 +step:2043 train loss:3.644166 +step:2044 train loss:3.685340 +step:2045 train loss:3.676735 +step:2046 train loss:3.679641 +step:2047 train loss:3.793948 +step:2048 train loss:3.639598 +step:2049 train loss:3.731688 +step:2050 train loss:3.718033 +step:2051 train loss:3.689419 +step:2052 train loss:3.626487 +step:2053 train loss:3.655042 +step:2054 train loss:3.688205 +step:2055 train loss:3.682524 +step:2056 train loss:3.666421 +step:2057 train loss:3.683934 +step:2058 train loss:3.588349 +step:2059 train loss:3.661651 +step:2060 train loss:3.624577 +step:2061 train loss:3.722689 +step:2062 train loss:3.703761 +step:2063 train loss:3.698747 +step:2064 train loss:3.698377 +step:2065 train loss:3.670218 +step:2066 train loss:3.687693 +step:2067 train loss:3.636771 +step:2068 train loss:3.688473 +step:2069 train loss:3.709734 +step:2070 train loss:3.695430 +step:2071 train loss:3.675628 +step:2072 train loss:3.665005 +step:2073 train loss:3.739422 +step:2074 train loss:3.695951 +step:2075 train loss:3.731720 +step:2076 train loss:3.638562 +step:2077 train loss:3.659651 +step:2078 train loss:3.719699 +step:2079 train loss:3.669608 +step:2080 train loss:3.688482 +step:2081 train loss:3.662375 +step:2082 train loss:3.701152 +step:2083 train loss:3.711973 +step:2084 train loss:3.660615 +step:2085 train loss:3.681667 +step:2086 train loss:3.646412 +step:2087 train loss:3.657163 +step:2088 train loss:3.654146 +step:2089 train loss:3.736539 +step:2090 train loss:3.667651 +step:2091 train loss:3.675570 +step:2092 train loss:3.692702 +step:2093 train loss:3.687647 +step:2094 train loss:3.794494 +step:2095 train loss:3.799258 +step:2096 train loss:3.706144 +step:2097 train loss:3.683387 +step:2098 train loss:3.707699 +step:2099 train loss:3.665874 +step:2100 train loss:3.663529 +step:2101 train loss:3.666869 +step:2102 train loss:3.664578 +step:2103 train loss:3.691876 +step:2104 train loss:3.631217 +step:2105 train loss:3.831734 +step:2106 train loss:3.763149 +step:2107 train loss:3.730503 +step:2108 train loss:3.754087 +step:2109 train loss:3.670215 +step:2110 train loss:3.717730 +step:2111 train loss:3.711701 +step:2112 train loss:3.692986 +step:2113 train loss:3.681938 +step:2114 train loss:3.724775 +step:2115 train loss:3.690074 +step:2116 train loss:3.704114 +step:2117 train loss:3.716695 +step:2118 train loss:3.722819 +step:2119 train loss:3.624899 +step:2120 train loss:3.712217 +step:2121 train loss:3.669371 +step:2122 train loss:3.652562 +step:2123 train loss:3.753148 +step:2124 train loss:3.685923 +step:2125 train loss:3.662873 +step:2126 train loss:3.784956 +step:2127 train loss:3.634053 +step:2128 train loss:3.770218 +step:2129 train loss:3.682435 +step:2130 train loss:3.732925 +step:2131 train loss:3.639623 +step:2132 train loss:3.629433 +step:2133 train loss:3.640750 +step:2134 train loss:3.643033 +step:2135 train loss:3.675006 +step:2136 train loss:3.642440 +step:2137 train loss:3.604452 +step:2138 train loss:3.679339 +step:2139 train loss:3.654456 +step:2140 train loss:3.564469 +step:2141 train loss:3.719631 +step:2142 train loss:3.696852 +step:2143 train loss:3.767826 +step:2144 train loss:3.713682 +step:2145 train loss:3.682349 +step:2146 train loss:3.930110 +step:2147 train loss:3.670889 +step:2148 train loss:3.709778 +step:2149 train loss:3.649952 +step:2150 train loss:3.645212 +step:2151 train loss:3.751248 +step:2152 train loss:3.666749 +step:2153 train loss:3.668325 +step:2154 train loss:3.657065 +step:2155 train loss:3.733002 +step:2156 train loss:3.700577 +step:2157 train loss:3.677822 +step:2158 train loss:3.664532 +step:2159 train loss:3.627733 +step:2160 train loss:3.740221 +step:2161 train loss:3.697094 +step:2162 train loss:3.646941 +step:2163 train loss:3.696703 +step:2164 train loss:3.630614 +step:2165 train loss:3.694355 +step:2166 train loss:3.636446 +step:2167 train loss:3.686310 +step:2168 train loss:3.707826 +step:2169 train loss:3.677213 +step:2170 train loss:3.689976 +step:2171 train loss:3.685139 +step:2172 train loss:3.669024 +step:2173 train loss:3.666924 +step:2174 train loss:3.858210 +step:2175 train loss:3.707287 +step:2176 train loss:3.674613 +step:2177 train loss:3.685579 +step:2178 train loss:3.703918 +step:2179 train loss:3.620539 +step:2180 train loss:3.641071 +step:2181 train loss:3.626838 +step:2182 train loss:3.640266 +step:2183 train loss:3.672150 +step:2184 train loss:3.682711 +step:2185 train loss:3.687835 +step:2186 train loss:3.717367 +step:2187 train loss:3.717459 +step:2188 train loss:3.688335 +step:2189 train loss:3.676667 +step:2190 train loss:3.692051 +step:2191 train loss:3.664685 +step:2192 train loss:3.664427 +step:2193 train loss:3.669000 +step:2194 train loss:3.690660 +step:2195 train loss:3.694380 +step:2196 train loss:3.683702 +step:2197 train loss:3.698503 +step:2198 train loss:3.682952 +step:2199 train loss:3.700659 +step:2200 train loss:3.680867 +step:2201 train loss:3.671565 +step:2202 train loss:3.677078 +step:2203 train loss:3.644773 +step:2204 train loss:3.596111 +step:2205 train loss:3.703161 +step:2206 train loss:3.651551 +step:2207 train loss:3.650669 +step:2208 train loss:3.664291 +step:2209 train loss:3.629179 +step:2210 train loss:3.652652 +step:2211 train loss:3.614093 +step:2212 train loss:3.718622 +step:2213 train loss:3.694211 +step:2214 train loss:3.642662 +step:2215 train loss:3.701685 +step:2216 train loss:3.679229 +step:2217 train loss:3.673145 +step:2218 train loss:3.635443 +step:2219 train loss:3.628861 +step:2220 train loss:3.669145 +step:2221 train loss:3.666165 +step:2222 train loss:3.662136 +step:2223 train loss:3.630218 +step:2224 train loss:3.726242 +step:2225 train loss:3.646465 +step:2226 train loss:3.658793 +step:2227 train loss:3.684046 +step:2228 train loss:3.720088 +step:2229 train loss:3.615286 +step:2230 train loss:3.727869 +step:2231 train loss:3.679986 +step:2232 train loss:3.694080 +step:2233 train loss:3.664201 +step:2234 train loss:3.716794 +step:2235 train loss:3.665711 +step:2236 train loss:3.610721 +step:2237 train loss:3.693983 +step:2238 train loss:3.633720 +step:2239 train loss:3.709370 +step:2240 train loss:3.697834 +step:2241 train loss:3.680685 +step:2242 train loss:3.741534 +step:2243 train loss:3.662719 +step:2244 train loss:3.643845 +step:2245 train loss:3.808502 +step:2246 train loss:3.673702 +step:2247 train loss:3.647092 +step:2248 train loss:3.634463 +step:2249 train loss:3.733263 +step:2250 validation loss:3.616199 +step:2250 train loss:3.651065 +step:2251 train loss:3.761108 +step:2252 train loss:3.715575 +step:2253 train loss:3.669963 +step:2254 train loss:3.662053 +step:2255 train loss:3.647467 +step:2256 train loss:3.645203 +step:2257 train loss:3.622585 +step:2258 train loss:3.614618 +step:2259 train loss:3.713272 +step:2260 train loss:3.583697 +step:2261 train loss:3.682703 +step:2262 train loss:3.690598 +step:2263 train loss:3.666240 +step:2264 train loss:3.696028 +step:2265 train loss:3.626769 +step:2266 train loss:3.835964 +step:2267 train loss:3.688026 +step:2268 train loss:3.695504 +step:2269 train loss:3.657277 +step:2270 train loss:3.550907 +step:2271 train loss:3.628533 +step:2272 train loss:3.649819 +step:2273 train loss:3.695726 +step:2274 train loss:3.717588 +step:2275 train loss:3.648266 +step:2276 train loss:3.704594 +step:2277 train loss:3.616878 +step:2278 train loss:3.694402 +step:2279 train loss:3.604031 +step:2280 train loss:3.615824 +step:2281 train loss:3.648247 +step:2282 train loss:3.631730 +step:2283 train loss:3.656772 +step:2284 train loss:3.631946 +step:2285 train loss:3.663506 +step:2286 train loss:3.703194 +step:2287 train loss:3.665417 +step:2288 train loss:3.637210 +step:2289 train loss:3.717815 +step:2290 train loss:3.716078 +step:2291 train loss:3.797999 +step:2292 train loss:3.632607 +step:2293 train loss:3.679741 +step:2294 train loss:3.705125 +step:2295 train loss:3.632891 +step:2296 train loss:3.634257 +step:2297 train loss:3.672262 +step:2298 train loss:3.682917 +step:2299 train loss:3.704430 +step:2300 train loss:3.635247 +step:2301 train loss:3.618895 +step:2302 train loss:3.743585 +step:2303 train loss:3.688718 +step:2304 train loss:3.635553 +step:2305 train loss:3.642438 +step:2306 train loss:3.662341 +step:2307 train loss:3.640808 +step:2308 train loss:3.687509 +step:2309 train loss:3.701685 +step:2310 train loss:3.673215 +step:2311 train loss:3.649615 +step:2312 train loss:3.671356 +step:2313 train loss:3.686526 +step:2314 train loss:3.721285 +step:2315 train loss:3.705407 +step:2316 train loss:3.684051 +step:2317 train loss:3.608313 +step:2318 train loss:3.655929 +step:2319 train loss:3.673243 +step:2320 train loss:3.664204 +step:2321 train loss:3.650341 +step:2322 train loss:3.677868 +step:2323 train loss:3.648617 +step:2324 train loss:3.711133 +step:2325 train loss:3.697361 +step:2326 train loss:3.629850 +step:2327 train loss:3.650609 +step:2328 train loss:3.654361 +step:2329 train loss:3.655837 +step:2330 train loss:3.652703 +step:2331 train loss:3.692564 +step:2332 train loss:3.672163 +step:2333 train loss:3.600944 +step:2334 train loss:3.705933 +step:2335 train loss:3.738641 +step:2336 train loss:3.675430 +step:2337 train loss:3.682948 +step:2338 train loss:3.665175 +step:2339 train loss:3.658353 +step:2340 train loss:3.641316 +step:2341 train loss:3.696978 +step:2342 train loss:3.654855 +step:2343 train loss:3.668731 +step:2344 train loss:3.657833 +step:2345 train loss:3.620514 +step:2346 train loss:3.633704 +step:2347 train loss:3.731637 +step:2348 train loss:3.706633 +step:2349 train loss:3.644608 +step:2350 train loss:3.679503 +step:2351 train loss:3.689033 +step:2352 train loss:3.663330 +step:2353 train loss:3.582633 +step:2354 train loss:3.637331 +step:2355 train loss:3.684047 +step:2356 train loss:3.602096 +step:2357 train loss:3.702241 +step:2358 train loss:3.694354 +step:2359 train loss:3.661670 +step:2360 train loss:3.669796 +step:2361 train loss:3.695263 +step:2362 train loss:3.622305 +step:2363 train loss:3.636087 +step:2364 train loss:3.672364 +step:2365 train loss:3.687754 +step:2366 train loss:3.606657 +step:2367 train loss:3.691674 +step:2368 train loss:3.622877 +step:2369 train loss:3.600080 +step:2370 train loss:3.667479 +step:2371 train loss:3.654083 +step:2372 train loss:3.649430 +step:2373 train loss:3.728532 +step:2374 train loss:3.674999 +step:2375 train loss:3.649595 +step:2376 train loss:3.706170 +step:2377 train loss:3.687138 +step:2378 train loss:3.820340 +step:2379 train loss:3.735204 +step:2380 train loss:3.624128 +step:2381 train loss:3.664653 +step:2382 train loss:3.672652 +step:2383 train loss:3.676209 +step:2384 train loss:3.695713 +step:2385 train loss:3.695504 +step:2386 train loss:3.748559 +step:2387 train loss:3.673051 +step:2388 train loss:3.673654 +step:2389 train loss:3.649381 +step:2390 train loss:3.663314 +step:2391 train loss:3.678635 +step:2392 train loss:3.661292 +step:2393 train loss:3.644893 +step:2394 train loss:3.644499 +step:2395 train loss:3.739981 +step:2396 train loss:3.600210 +step:2397 train loss:3.657154 +step:2398 train loss:3.584290 +step:2399 train loss:3.709564 +step:2400 train loss:3.637713 +step:2401 train loss:3.656965 +step:2402 train loss:3.707916 +step:2403 train loss:3.718481 +step:2404 train loss:3.669347 +step:2405 train loss:3.644910 +step:2406 train loss:3.651823 +step:2407 train loss:3.661110 +step:2408 train loss:3.690206 +step:2409 train loss:3.599728 +step:2410 train loss:3.646612 +step:2411 train loss:3.635002 +step:2412 train loss:3.588713 +step:2413 train loss:3.687318 +step:2414 train loss:3.692407 +step:2415 train loss:3.652385 +step:2416 train loss:3.620004 +step:2417 train loss:3.739107 +step:2418 train loss:3.616124 +step:2419 train loss:3.630652 +step:2420 train loss:3.628345 +step:2421 train loss:3.643255 +step:2422 train loss:3.679046 +step:2423 train loss:3.656840 +step:2424 train loss:3.644703 +step:2425 train loss:3.683967 +step:2426 train loss:3.629878 +step:2427 train loss:3.705735 +step:2428 train loss:3.686056 +step:2429 train loss:3.638991 +step:2430 train loss:3.594484 +step:2431 train loss:3.613544 +step:2432 train loss:3.624297 +step:2433 train loss:3.660403 +step:2434 train loss:3.653021 +step:2435 train loss:3.805473 +step:2436 train loss:3.647905 +step:2437 train loss:3.633247 +step:2438 train loss:3.649481 +step:2439 train loss:3.627765 +step:2440 train loss:3.600745 +step:2441 train loss:3.613903 +step:2442 train loss:3.671306 +step:2443 train loss:3.663542 +step:2444 train loss:3.732739 +step:2445 train loss:3.663611 +step:2446 train loss:3.648805 +step:2447 train loss:3.628192 +step:2448 train loss:3.640070 +step:2449 train loss:3.675667 +step:2450 train loss:3.636999 +step:2451 train loss:3.634953 +step:2452 train loss:3.666471 +step:2453 train loss:3.642133 +step:2454 train loss:3.640730 +step:2455 train loss:3.693743 +step:2456 train loss:3.644144 +step:2457 train loss:3.664436 +step:2458 train loss:3.683236 +step:2459 train loss:3.643713 +step:2460 train loss:3.648217 +step:2461 train loss:3.660934 +step:2462 train loss:3.662560 +step:2463 train loss:3.644789 +step:2464 train loss:3.736336 +step:2465 train loss:3.782758 +step:2466 train loss:3.723872 +step:2467 train loss:3.670091 +step:2468 train loss:3.665600 +step:2469 train loss:3.666829 +step:2470 train loss:3.649049 +step:2471 train loss:3.656242 +step:2472 train loss:3.675912 +step:2473 train loss:3.640841 +step:2474 train loss:3.673141 +step:2475 train loss:3.701013 +step:2476 train loss:3.751822 +step:2477 train loss:3.681732 +step:2478 train loss:3.663271 +step:2479 train loss:3.625842 +step:2480 train loss:3.656454 +step:2481 train loss:3.632525 +step:2482 train loss:3.623530 +step:2483 train loss:3.675821 +step:2484 train loss:3.710086 +step:2485 train loss:3.742332 +step:2486 train loss:3.636040 +step:2487 train loss:3.649534 +step:2488 train loss:3.672689 +step:2489 train loss:3.600188 +step:2490 train loss:3.648777 +step:2491 train loss:3.613632 +step:2492 train loss:3.607071 +step:2493 train loss:3.652491 +step:2494 train loss:3.661298 +step:2495 train loss:3.637970 +step:2496 train loss:3.677904 +step:2497 train loss:3.686565 +step:2498 train loss:3.720836 +step:2499 train loss:3.659523 +step:2500 validation loss:3.591554 total_sharp:6.6391e-03 L1_sharp:1.0337e-02 L2_sharp:4.1904e-03 L3_sharp:2.9734e-03 L4_sharp:1.2736e-03 L5_sharp:1.0987e-03 L6_sharp:1.4652e-03 L7_sharp:1.5932e-03 L8_sharp:1.2785e-03 L9_sharp:8.2405e-04 L10_sharp:5.1729e-04 L11_sharp:4.5189e-04 L12_sharp:3.5392e-04 total_fnorm:2.3985e+00 total_l1_linf:2.0519e+04 total_spectral:2.3985e+00 L1_fnorm:5.8640e-01 L2_fnorm:4.9333e-01 L3_fnorm:4.7877e-01 L4_fnorm:5.5022e-01 L5_fnorm:5.8486e-01 L6_fnorm:5.8725e-01 L7_fnorm:5.9830e-01 L8_fnorm:5.9776e-01 L9_fnorm:5.9788e-01 L10_fnorm:5.9765e-01 L11_fnorm:5.9697e-01 L12_fnorm:6.0347e-01 L1_l1linf:4.4299e-01 L2_l1linf:4.1398e-01 L3_l1linf:3.9314e-01 L4_l1linf:4.0018e-01 L5_l1linf:4.0709e-01 L6_l1linf:4.1012e-01 L7_l1linf:4.0874e-01 L8_l1linf:4.1322e-01 L9_l1linf:4.1331e-01 L10_l1linf:4.1096e-01 L11_l1linf:4.0678e-01 L12_l1linf:3.9581e-01 L1_spectral:1.2051e-02 L2_spectral:1.2070e-02 L3_spectral:1.5225e-02 L4_spectral:1.2047e-02 L5_spectral:1.2046e-02 L6_spectral:1.2048e-02 L7_spectral:1.2069e-02 L8_spectral:1.2056e-02 L9_spectral:1.2044e-02 L10_spectral:1.2044e-02 L11_spectral:1.2048e-02 L12_spectral:1.2043e-02 v_norm:2.3985e+00 cos_v_-g_hvp:3.8091e-02 g_hvp_norm:6.2686e-01 cos_v_-g_t:4.2959e-02 g_t_norm:5.6127e-01 hv_norm:8.4963e-01 cos_v_hv:1.8742e-02 hg_norm:3.1973e+01 cos_g_hg:5.4769e-01 v_par:6.2479e-03 v_perp:2.3985e+00 L1_cos_v_neg_g:2.9897e-02 L1_v_norm:5.8640e-01 L2_cos_v_neg_g:3.9459e-02 L2_v_norm:4.9333e-01 L3_cos_v_neg_g:3.3811e-02 L3_v_norm:4.7877e-01 L4_cos_v_neg_g:3.4065e-02 L4_v_norm:5.5022e-01 L5_cos_v_neg_g:3.8375e-02 L5_v_norm:5.8486e-01 L6_cos_v_neg_g:3.9152e-02 L6_v_norm:5.8725e-01 L7_cos_v_neg_g:4.0128e-02 L7_v_norm:5.9830e-01 L8_cos_v_neg_g:4.0512e-02 L8_v_norm:5.9776e-01 L9_cos_v_neg_g:4.2038e-02 L9_v_norm:5.9788e-01 L10_cos_v_neg_g:4.9176e-02 L10_v_norm:5.9765e-01 L11_cos_v_neg_g:5.8286e-02 L11_v_norm:5.9697e-01 L12_cos_v_neg_g:8.7098e-02 L12_v_norm:6.0347e-01 +step:2500 train loss:3.653180 +step:2501 train loss:3.568283 +step:2502 train loss:3.747344 +step:2503 train loss:3.658685 +step:2504 train loss:3.678278 +step:2505 train loss:3.644463 +step:2506 train loss:3.676827 +step:2507 train loss:3.602343 +step:2508 train loss:3.661633 +step:2509 train loss:3.606451 +step:2510 train loss:3.638576 +step:2511 train loss:3.604650 +step:2512 train loss:3.697895 +step:2513 train loss:3.630374 +step:2514 train loss:3.659712 +step:2515 train loss:3.666809 +step:2516 train loss:3.628400 +step:2517 train loss:3.577260 +step:2518 train loss:3.621742 +step:2519 train loss:3.685628 +step:2520 train loss:3.610001 +step:2521 train loss:3.661036 +step:2522 train loss:3.662094 +step:2523 train loss:3.645757 +step:2524 train loss:3.681347 +step:2525 train loss:3.603783 +step:2526 train loss:3.641066 +step:2527 train loss:3.604362 +step:2528 train loss:3.724314 +step:2529 train loss:3.614533 +step:2530 train loss:3.640827 +step:2531 train loss:3.649953 +step:2532 train loss:3.640317 +step:2533 train loss:3.716941 +step:2534 train loss:3.732503 +step:2535 train loss:3.605874 +step:2536 train loss:3.647870 +step:2537 train loss:3.592596 +step:2538 train loss:3.673832 +step:2539 train loss:3.595873 +step:2540 train loss:3.643947 +step:2541 train loss:3.624671 +step:2542 train loss:3.629367 +step:2543 train loss:3.644333 +step:2544 train loss:3.638596 +step:2545 train loss:3.600717 +step:2546 train loss:3.641439 +step:2547 train loss:3.665486 +step:2548 train loss:3.651160 +step:2549 train loss:3.566725 +step:2550 train loss:3.654804 +step:2551 train loss:3.615605 +step:2552 train loss:3.670235 +step:2553 train loss:3.603966 +step:2554 train loss:3.716554 +step:2555 train loss:3.592673 +step:2556 train loss:3.637625 +step:2557 train loss:3.618236 +step:2558 train loss:3.656892 +step:2559 train loss:3.647974 +step:2560 train loss:3.653978 +step:2561 train loss:3.609699 +step:2562 train loss:3.588001 +step:2563 train loss:3.688060 +step:2564 train loss:3.659257 +step:2565 train loss:3.691405 +step:2566 train loss:3.611931 +step:2567 train loss:3.639222 +step:2568 train loss:3.558777 +step:2569 train loss:3.653507 +step:2570 train loss:3.593491 +step:2571 train loss:3.644518 +step:2572 train loss:3.561868 +step:2573 train loss:3.584495 +step:2574 train loss:3.546928 +step:2575 train loss:3.612654 +step:2576 train loss:3.621168 +step:2577 train loss:3.663291 +step:2578 train loss:3.600387 +step:2579 train loss:3.663024 +step:2580 train loss:3.640193 +step:2581 train loss:3.654985 +step:2582 train loss:3.658157 +step:2583 train loss:3.651736 +step:2584 train loss:3.636955 +step:2585 train loss:3.568707 +step:2586 train loss:3.638678 +step:2587 train loss:3.549547 +step:2588 train loss:3.582666 +step:2589 train loss:3.614841 +step:2590 train loss:3.600688 +step:2591 train loss:3.606168 +step:2592 train loss:3.653917 +step:2593 train loss:3.654531 +step:2594 train loss:3.657841 +step:2595 train loss:3.633841 +step:2596 train loss:3.663175 +step:2597 train loss:3.619884 +step:2598 train loss:3.664725 +step:2599 train loss:3.610782 +step:2600 train loss:3.626662 +step:2601 train loss:3.634815 +step:2602 train loss:3.684482 +step:2603 train loss:3.632755 +step:2604 train loss:3.617144 +step:2605 train loss:3.598243 +step:2606 train loss:3.719107 +step:2607 train loss:3.621917 +step:2608 train loss:3.668787 +step:2609 train loss:3.673224 +step:2610 train loss:3.635324 +step:2611 train loss:3.586356 +step:2612 train loss:3.623356 +step:2613 train loss:3.606174 +step:2614 train loss:3.667672 +step:2615 train loss:3.738241 +step:2616 train loss:3.669719 +step:2617 train loss:3.636708 +step:2618 train loss:3.644820 +step:2619 train loss:3.654467 +step:2620 train loss:3.697474 +step:2621 train loss:3.633543 +step:2622 train loss:3.669370 +step:2623 train loss:3.608253 +step:2624 train loss:3.609298 +step:2625 train loss:3.652408 +step:2626 train loss:3.623831 +step:2627 train loss:3.621759 +step:2628 train loss:3.736232 +step:2629 train loss:3.660048 +step:2630 train loss:3.603170 +step:2631 train loss:3.671641 +step:2632 train loss:3.626473 +step:2633 train loss:3.651638 +step:2634 train loss:3.619383 +step:2635 train loss:3.621899 +step:2636 train loss:3.568478 +step:2637 train loss:3.573974 +step:2638 train loss:3.564417 +step:2639 train loss:3.645880 +step:2640 train loss:3.654756 +step:2641 train loss:3.564094 +step:2642 train loss:3.597131 +step:2643 train loss:3.671631 +step:2644 train loss:3.755549 +step:2645 train loss:3.637519 +step:2646 train loss:3.604181 +step:2647 train loss:3.591172 +step:2648 train loss:3.708356 +step:2649 train loss:3.698589 +step:2650 train loss:3.655587 +step:2651 train loss:3.711160 +step:2652 train loss:3.724081 +step:2653 train loss:3.620595 +step:2654 train loss:3.679666 +step:2655 train loss:3.670635 +step:2656 train loss:3.597159 +step:2657 train loss:3.587445 +step:2658 train loss:3.565147 +step:2659 train loss:3.588890 +step:2660 train loss:3.649021 +step:2661 train loss:3.606140 +step:2662 train loss:3.590085 +step:2663 train loss:3.714484 +step:2664 train loss:3.611960 +step:2665 train loss:3.655525 +step:2666 train loss:3.659206 +step:2667 train loss:3.714934 +step:2668 train loss:3.628212 +step:2669 train loss:3.601466 +step:2670 train loss:3.594324 +step:2671 train loss:3.645765 +step:2672 train loss:3.613561 +step:2673 train loss:3.637994 +step:2674 train loss:3.705737 +step:2675 train loss:3.692342 +step:2676 train loss:3.566460 +step:2677 train loss:3.685277 +step:2678 train loss:3.634634 +step:2679 train loss:3.683578 +step:2680 train loss:3.637424 +step:2681 train loss:3.677696 +step:2682 train loss:3.609412 +step:2683 train loss:3.560646 +step:2684 train loss:3.652023 +step:2685 train loss:3.653841 +step:2686 train loss:3.621234 +step:2687 train loss:3.658067 +step:2688 train loss:3.585616 +step:2689 train loss:3.656424 +step:2690 train loss:3.665665 +step:2691 train loss:3.565799 +step:2692 train loss:3.671822 +step:2693 train loss:3.603756 +step:2694 train loss:3.588710 +step:2695 train loss:3.663839 +step:2696 train loss:3.657764 +step:2697 train loss:3.588466 +step:2698 train loss:3.672722 +step:2699 train loss:3.627917 +step:2700 train loss:3.565512 +step:2701 train loss:3.588768 +step:2702 train loss:3.579457 +step:2703 train loss:3.669203 +step:2704 train loss:3.577053 +step:2705 train loss:3.712538 +step:2706 train loss:3.544091 +step:2707 train loss:3.590863 +step:2708 train loss:3.607045 +step:2709 train loss:3.657489 +step:2710 train loss:3.689933 +step:2711 train loss:3.606881 +step:2712 train loss:3.553151 +step:2713 train loss:3.591960 +step:2714 train loss:3.637331 +step:2715 train loss:3.575731 +step:2716 train loss:3.621921 +step:2717 train loss:3.611903 +step:2718 train loss:3.624349 +step:2719 train loss:3.583975 +step:2720 train loss:3.640123 +step:2721 train loss:3.659107 +step:2722 train loss:3.582401 +step:2723 train loss:3.625945 +step:2724 train loss:3.584140 +step:2725 train loss:3.581094 +step:2726 train loss:3.611291 +step:2727 train loss:3.547740 +step:2728 train loss:3.607756 +step:2729 train loss:3.572701 +step:2730 train loss:3.620076 +step:2731 train loss:3.594747 +step:2732 train loss:3.622302 +step:2733 train loss:3.636914 +step:2734 train loss:3.577492 +step:2735 train loss:3.568809 +step:2736 train loss:3.628903 +step:2737 train loss:3.551376 +step:2738 train loss:3.559870 +step:2739 train loss:3.617745 +step:2740 train loss:3.595529 +step:2741 train loss:3.543291 +step:2742 train loss:3.617739 +step:2743 train loss:3.647214 +step:2744 train loss:3.615666 +step:2745 train loss:3.582875 +step:2746 train loss:3.619932 +step:2747 train loss:3.596920 +step:2748 train loss:3.593356 +step:2749 train loss:3.569387 +step:2750 validation loss:3.563063 +step:2750 train loss:3.633186 +step:2751 train loss:3.612457 +step:2752 train loss:3.609080 +step:2753 train loss:3.631875 +step:2754 train loss:3.638271 +step:2755 train loss:3.583218 +step:2756 train loss:3.605843 +step:2757 train loss:3.627794 +step:2758 train loss:3.602916 +step:2759 train loss:3.604145 +step:2760 train loss:3.626219 +step:2761 train loss:3.583978 +step:2762 train loss:3.591564 +step:2763 train loss:3.615365 +step:2764 train loss:3.655947 +step:2765 train loss:3.610946 +step:2766 train loss:3.624675 +step:2767 train loss:3.638844 +step:2768 train loss:3.595373 +step:2769 train loss:3.574368 +step:2770 train loss:3.599057 +step:2771 train loss:3.649159 +step:2772 train loss:3.719807 +step:2773 train loss:3.689627 +step:2774 train loss:3.548707 +step:2775 train loss:3.610267 +step:2776 train loss:3.609334 +step:2777 train loss:3.646421 +step:2778 train loss:3.667248 +step:2779 train loss:3.618798 +step:2780 train loss:3.618156 +step:2781 train loss:3.591059 +step:2782 train loss:3.610795 +step:2783 train loss:3.598580 +step:2784 train loss:3.686916 +step:2785 train loss:3.595252 +step:2786 train loss:3.566758 +step:2787 train loss:3.672032 +step:2788 train loss:3.603252 +step:2789 train loss:3.621741 +step:2790 train loss:3.597482 +step:2791 train loss:3.612022 +step:2792 train loss:3.598872 +step:2793 train loss:3.606478 +step:2794 train loss:3.574188 +step:2795 train loss:3.580338 +step:2796 train loss:3.624711 +step:2797 train loss:3.606343 +step:2798 train loss:3.600188 +step:2799 train loss:3.577992 +step:2800 train loss:3.630615 +step:2801 train loss:3.586423 +step:2802 train loss:3.639416 +step:2803 train loss:3.644794 +step:2804 train loss:3.578672 +step:2805 train loss:3.686072 +step:2806 train loss:3.643816 +step:2807 train loss:3.570641 +step:2808 train loss:3.592681 +step:2809 train loss:3.605457 +step:2810 train loss:3.621530 +step:2811 train loss:3.550472 +step:2812 train loss:3.630636 +step:2813 train loss:3.659835 +step:2814 train loss:3.570959 +step:2815 train loss:3.604213 +step:2816 train loss:3.549127 +step:2817 train loss:3.622229 +step:2818 train loss:3.593603 +step:2819 train loss:3.541690 +step:2820 train loss:3.605690 +step:2821 train loss:3.703292 +step:2822 train loss:3.642984 +step:2823 train loss:3.599012 +step:2824 train loss:3.637535 +step:2825 train loss:3.566898 +step:2826 train loss:3.584520 +step:2827 train loss:3.578979 +step:2828 train loss:3.550511 +step:2829 train loss:3.611227 +step:2830 train loss:3.546011 +step:2831 train loss:3.648706 +step:2832 train loss:3.626324 +step:2833 train loss:3.613936 +step:2834 train loss:3.617213 +step:2835 train loss:3.584374 +step:2836 train loss:3.611436 +step:2837 train loss:3.551801 +step:2838 train loss:3.578335 +step:2839 train loss:3.547586 +step:2840 train loss:3.658327 +step:2841 train loss:3.604859 +step:2842 train loss:3.599214 +step:2843 train loss:3.636206 +step:2844 train loss:3.556845 +step:2845 train loss:3.617586 +step:2846 train loss:3.574855 +step:2847 train loss:3.620081 +step:2848 train loss:3.612442 +step:2849 train loss:3.636587 +step:2850 train loss:3.702325 +step:2851 train loss:3.625159 +step:2852 train loss:3.654755 +step:2853 train loss:3.619212 +step:2854 train loss:3.598539 +step:2855 train loss:3.615798 +step:2856 train loss:3.730528 +step:2857 train loss:3.594938 +step:2858 train loss:3.610952 +step:2859 train loss:3.579239 +step:2860 train loss:3.609839 +step:2861 train loss:3.723685 +step:2862 train loss:3.622082 +step:2863 train loss:3.642655 +step:2864 train loss:3.647949 +step:2865 train loss:3.621205 +step:2866 train loss:3.634678 +step:2867 train loss:3.664614 +step:2868 train loss:3.593701 +step:2869 train loss:3.651383 +step:2870 train loss:3.688407 +step:2871 train loss:3.601464 +step:2872 train loss:3.619893 +step:2873 train loss:3.612814 +step:2874 train loss:3.616591 +step:2875 train loss:3.629760 +step:2876 train loss:3.628513 +step:2877 train loss:3.651874 +step:2878 train loss:3.619466 +step:2879 train loss:3.595881 +step:2880 train loss:3.678619 +step:2881 train loss:3.626817 +step:2882 train loss:3.619048 +step:2883 train loss:3.666615 +step:2884 train loss:3.662856 +step:2885 train loss:3.621546 +step:2886 train loss:3.609860 +step:2887 train loss:3.652672 +step:2888 train loss:3.671036 +step:2889 train loss:3.642333 +step:2890 train loss:3.650937 +step:2891 train loss:3.633630 +step:2892 train loss:3.619311 +step:2893 train loss:3.607227 +step:2894 train loss:3.656462 +step:2895 train loss:3.605258 +step:2896 train loss:3.630805 +step:2897 train loss:3.645491 +step:2898 train loss:3.685137 +step:2899 train loss:3.618967 +step:2900 train loss:3.607181 +step:2901 train loss:3.671479 +step:2902 train loss:3.550740 +step:2903 train loss:3.686393 +step:2904 train loss:3.663067 +step:2905 train loss:3.638917 +step:2906 train loss:3.612458 +step:2907 train loss:3.668958 +step:2908 train loss:3.606023 +step:2909 train loss:3.644257 +step:2910 train loss:3.684591 +step:2911 train loss:3.569486 +step:2912 train loss:3.602923 +step:2913 train loss:3.640278 +step:2914 train loss:3.597607 +step:2915 train loss:3.619078 +step:2916 train loss:3.575835 +step:2917 train loss:3.599093 +step:2918 train loss:3.667444 +step:2919 train loss:3.656266 +step:2920 train loss:3.625992 +step:2921 train loss:3.616519 +step:2922 train loss:3.587823 +step:2923 train loss:3.611672 +step:2924 train loss:3.606304 +step:2925 train loss:3.651874 +step:2926 train loss:3.631759 +step:2927 train loss:3.573957 +step:2928 train loss:3.637191 +step:2929 train loss:3.590213 +step:2930 train loss:3.580037 +step:2931 train loss:3.610628 +step:2932 train loss:3.643666 +step:2933 train loss:3.717115 +step:2934 train loss:3.621181 +step:2935 train loss:3.593019 +step:2936 train loss:3.615940 +step:2937 train loss:3.632775 +step:2938 train loss:3.636804 +step:2939 train loss:3.750340 +step:2940 train loss:3.652315 +step:2941 train loss:3.682293 +step:2942 train loss:3.610298 +step:2943 train loss:3.637963 +step:2944 train loss:3.628053 +step:2945 train loss:3.635174 +step:2946 train loss:3.616129 +step:2947 train loss:3.590803 +step:2948 train loss:3.583320 +step:2949 train loss:3.600938 +step:2950 train loss:3.678938 +step:2951 train loss:3.631781 +step:2952 train loss:3.631752 +step:2953 train loss:3.622321 +step:2954 train loss:3.632860 +step:2955 train loss:3.704939 +step:2956 train loss:3.611423 +step:2957 train loss:3.623866 +step:2958 train loss:3.656822 +step:2959 train loss:3.608134 +step:2960 train loss:3.635507 +step:2961 train loss:3.605580 +step:2962 train loss:3.617671 +step:2963 train loss:3.594816 +step:2964 train loss:3.640226 +step:2965 train loss:3.681017 +step:2966 train loss:3.560475 +step:2967 train loss:3.628668 +step:2968 train loss:3.549527 +step:2969 train loss:3.646098 +step:2970 train loss:3.596711 +step:2971 train loss:3.574465 +step:2972 train loss:3.543977 +step:2973 train loss:3.632604 +step:2974 train loss:3.553319 +step:2975 train loss:3.568130 +step:2976 train loss:3.592256 +step:2977 train loss:3.583817 +step:2978 train loss:3.602256 +step:2979 train loss:3.557351 +step:2980 train loss:3.592226 +step:2981 train loss:3.630662 +step:2982 train loss:3.551478 +step:2983 train loss:3.587140 +step:2984 train loss:3.601235 +step:2985 train loss:3.593722 +step:2986 train loss:3.620478 +step:2987 train loss:3.581210 +step:2988 train loss:3.608300 +step:2989 train loss:3.624869 +step:2990 train loss:3.566488 +step:2991 train loss:3.625214 +step:2992 train loss:3.552921 +step:2993 train loss:3.540398 +step:2994 train loss:3.631256 +step:2995 train loss:3.570503 +step:2996 train loss:3.570658 +step:2997 train loss:3.570380 +step:2998 train loss:3.574843 +step:2999 train loss:3.556639 +step:3000 validation loss:3.548152 total_sharp:8.9624e-03 L1_sharp:2.6818e-02 L2_sharp:9.0769e-03 L3_sharp:1.9179e-03 L4_sharp:1.2048e-03 L5_sharp:1.1265e-03 L6_sharp:1.4938e-03 L7_sharp:1.3397e-03 L8_sharp:1.1517e-03 L9_sharp:8.3280e-04 L10_sharp:4.8488e-04 L11_sharp:4.8187e-04 L12_sharp:4.1993e-04 total_fnorm:2.3853e+00 total_l1_linf:2.0390e+04 total_spectral:2.3853e+00 L1_fnorm:5.7708e-01 L2_fnorm:4.7115e-01 L3_fnorm:4.4422e-01 L4_fnorm:5.5144e-01 L5_fnorm:5.8467e-01 L6_fnorm:5.8923e-01 L7_fnorm:5.9867e-01 L8_fnorm:5.9946e-01 L9_fnorm:5.9766e-01 L10_fnorm:5.9766e-01 L11_fnorm:5.9408e-01 L12_fnorm:6.0201e-01 L1_l1linf:4.4135e-01 L2_l1linf:4.1911e-01 L3_l1linf:4.1059e-01 L4_l1linf:4.0124e-01 L5_l1linf:4.0857e-01 L6_l1linf:4.0985e-01 L7_l1linf:4.1073e-01 L8_l1linf:4.1411e-01 L9_l1linf:4.1498e-01 L10_l1linf:4.1097e-01 L11_l1linf:4.0795e-01 L12_l1linf:3.9411e-01 L1_spectral:1.2051e-02 L2_spectral:1.2062e-02 L3_spectral:1.5687e-02 L4_spectral:1.2041e-02 L5_spectral:1.2048e-02 L6_spectral:1.2044e-02 L7_spectral:1.2055e-02 L8_spectral:1.2066e-02 L9_spectral:1.2047e-02 L10_spectral:1.2043e-02 L11_spectral:1.2046e-02 L12_spectral:1.2044e-02 v_norm:2.3853e+00 cos_v_-g_hvp:2.8201e-02 g_hvp_norm:8.7129e-01 cos_v_-g_t:3.1815e-02 g_t_norm:7.6151e-01 hv_norm:1.4234e+00 cos_v_hv:1.5019e-02 hg_norm:9.1542e+01 cos_g_hg:7.1676e-01 v_par:6.6558e-03 v_perp:2.3853e+00 L1_cos_v_neg_g:2.7237e-02 L1_v_norm:5.7708e-01 L2_cos_v_neg_g:3.4997e-02 L2_v_norm:4.7115e-01 L3_cos_v_neg_g:2.7813e-02 L3_v_norm:4.4422e-01 L4_cos_v_neg_g:3.2415e-02 L4_v_norm:5.5144e-01 L5_cos_v_neg_g:3.5132e-02 L5_v_norm:5.8467e-01 L6_cos_v_neg_g:3.4507e-02 L6_v_norm:5.8923e-01 L7_cos_v_neg_g:3.4350e-02 L7_v_norm:5.9867e-01 L8_cos_v_neg_g:3.4423e-02 L8_v_norm:5.9946e-01 L9_cos_v_neg_g:3.6259e-02 L9_v_norm:5.9766e-01 L10_cos_v_neg_g:3.8223e-02 L10_v_norm:5.9766e-01 L11_cos_v_neg_g:4.5805e-02 L11_v_norm:5.9408e-01 L12_cos_v_neg_g:7.7596e-02 L12_v_norm:6.0201e-01 +step:3000 train loss:3.528898 +step:3001 train loss:3.596698 +step:3002 train loss:3.639840 +step:3003 train loss:3.605237 +step:3004 train loss:3.619078 +step:3005 train loss:3.607733 +step:3006 train loss:3.627776 +step:3007 train loss:3.662678 +step:3008 train loss:3.631588 +step:3009 train loss:3.520993 +step:3010 train loss:3.605533 +step:3011 train loss:3.589903 +step:3012 train loss:3.557802 +step:3013 train loss:3.577762 +step:3014 train loss:3.541918 +step:3015 train loss:3.588023 +step:3016 train loss:3.586773 +step:3017 train loss:3.660956 +step:3018 train loss:3.616646 +step:3019 train loss:3.539995 +step:3020 train loss:3.601465 +step:3021 train loss:3.593696 +step:3022 train loss:3.561037 +step:3023 train loss:3.559839 +step:3024 train loss:3.580565 +step:3025 train loss:3.611204 +step:3026 train loss:3.613748 +step:3027 train loss:3.550632 +step:3028 train loss:3.626435 +step:3029 train loss:3.564473 +step:3030 train loss:3.629042 +step:3031 train loss:3.557527 +step:3032 train loss:3.574421 +step:3033 train loss:3.669670 +step:3034 train loss:3.533025 +step:3035 train loss:3.634247 +step:3036 train loss:3.578345 +step:3037 train loss:3.543847 +step:3038 train loss:3.600555 +step:3039 train loss:3.538952 +step:3040 train loss:3.599663 +step:3041 train loss:3.605397 +step:3042 train loss:3.561136 +step:3043 train loss:3.583563 +step:3044 train loss:3.512809 +step:3045 train loss:3.604240 +step:3046 train loss:3.680175 +step:3047 train loss:3.653561 +step:3048 train loss:3.606781 +step:3049 train loss:3.631111 +step:3050 train loss:3.581615 +step:3051 train loss:3.590971 +step:3052 train loss:3.603690 +step:3053 train loss:3.599712 +step:3054 train loss:3.553970 +step:3055 train loss:3.524117 +step:3056 train loss:3.600418 +step:3057 train loss:3.647984 +step:3058 train loss:3.633758 +step:3059 train loss:3.627079 +step:3060 train loss:3.612416 +step:3061 train loss:3.586345 +step:3062 train loss:3.591162 +step:3063 train loss:3.538078 +step:3064 train loss:3.613324 +step:3065 train loss:3.533159 +step:3066 train loss:3.580807 +step:3067 train loss:3.562477 +step:3068 train loss:3.485005 +step:3069 train loss:3.626839 +step:3070 train loss:3.575748 +step:3071 train loss:3.617137 +step:3072 train loss:3.591915 +step:3073 train loss:3.830128 +step:3074 train loss:3.603048 +step:3075 train loss:3.541332 +step:3076 train loss:3.618095 +step:3077 train loss:3.520536 +step:3078 train loss:3.590295 +step:3079 train loss:3.638774 +step:3080 train loss:3.533063 +step:3081 train loss:3.650908 +step:3082 train loss:3.544968 +step:3083 train loss:3.600008 +step:3084 train loss:3.577349 +step:3085 train loss:3.571353 +step:3086 train loss:3.672389 +step:3087 train loss:3.580992 +step:3088 train loss:3.575573 +step:3089 train loss:3.628967 +step:3090 train loss:3.525983 +step:3091 train loss:3.599564 +step:3092 train loss:3.521408 +step:3093 train loss:3.587339 +step:3094 train loss:3.581157 +step:3095 train loss:3.566720 +step:3096 train loss:3.562305 +step:3097 train loss:3.501216 +step:3098 train loss:3.638848 +step:3099 train loss:3.615056 +step:3100 train loss:3.554726 +step:3101 train loss:3.541961 +step:3102 train loss:3.636507 +step:3103 train loss:3.603641 +step:3104 train loss:3.599072 +step:3105 train loss:3.575373 +step:3106 train loss:3.580531 +step:3107 train loss:3.554644 +step:3108 train loss:3.609229 +step:3109 train loss:3.560164 +step:3110 train loss:3.601161 +step:3111 train loss:3.639202 +step:3112 train loss:3.556720 +step:3113 train loss:3.614285 +step:3114 train loss:3.555434 +step:3115 train loss:3.559021 +step:3116 train loss:3.610012 +step:3117 train loss:3.601301 +step:3118 train loss:3.574141 +step:3119 train loss:3.506509 +step:3120 train loss:3.599622 +step:3121 train loss:3.574010 +step:3122 train loss:3.605372 +step:3123 train loss:3.566712 +step:3124 train loss:3.602520 +step:3125 train loss:3.563148 +step:3126 train loss:3.521666 +step:3127 train loss:3.574366 +step:3128 train loss:3.571101 +step:3129 train loss:3.546094 +step:3130 train loss:3.550525 +step:3131 train loss:3.564912 +step:3132 train loss:3.628039 +step:3133 train loss:3.549799 +step:3134 train loss:3.609375 +step:3135 train loss:3.561481 +step:3136 train loss:3.564767 +step:3137 train loss:3.606985 +step:3138 train loss:3.530513 +step:3139 train loss:3.634532 +step:3140 train loss:3.519907 +step:3141 train loss:3.636998 +step:3142 train loss:3.550838 +step:3143 train loss:3.572017 +step:3144 train loss:3.561416 +step:3145 train loss:3.563849 +step:3146 train loss:3.549157 +step:3147 train loss:3.521529 +step:3148 train loss:3.615799 +step:3149 train loss:3.544617 +step:3150 train loss:3.592400 +step:3151 train loss:3.595476 +step:3152 train loss:3.555162 +step:3153 train loss:3.566832 +step:3154 train loss:3.558472 +step:3155 train loss:3.562977 +step:3156 train loss:3.634829 +step:3157 train loss:3.640956 +step:3158 train loss:3.613583 +step:3159 train loss:3.587091 +step:3160 train loss:3.595118 +step:3161 train loss:3.677435 +step:3162 train loss:3.650578 +step:3163 train loss:3.660563 +step:3164 train loss:3.649026 +step:3165 train loss:3.591266 +step:3166 train loss:3.578885 +step:3167 train loss:3.562661 +step:3168 train loss:3.670356 +step:3169 train loss:3.571621 +step:3170 train loss:3.628732 +step:3171 train loss:3.643290 +step:3172 train loss:3.619059 +step:3173 train loss:3.640022 +step:3174 train loss:3.637113 +step:3175 train loss:3.593905 +step:3176 train loss:3.573150 +step:3177 train loss:3.531085 +step:3178 train loss:3.634135 +step:3179 train loss:3.585091 +step:3180 train loss:3.572852 +step:3181 train loss:3.616912 +step:3182 train loss:3.617350 +step:3183 train loss:3.620726 +step:3184 train loss:3.620434 +step:3185 train loss:3.563361 +step:3186 train loss:3.641213 +step:3187 train loss:3.540000 +step:3188 train loss:3.598679 +step:3189 train loss:3.742443 +step:3190 train loss:3.580575 +step:3191 train loss:3.577664 +step:3192 train loss:3.573534 +step:3193 train loss:3.597796 +step:3194 train loss:3.579463 +step:3195 train loss:3.673965 +step:3196 train loss:3.604030 +step:3197 train loss:3.540504 +step:3198 train loss:3.611279 +step:3199 train loss:3.598278 +step:3200 train loss:3.566029 +step:3201 train loss:3.621760 +step:3202 train loss:3.531792 +step:3203 train loss:3.643470 +step:3204 train loss:3.590730 +step:3205 train loss:3.612903 +step:3206 train loss:3.631742 +step:3207 train loss:3.722967 +step:3208 train loss:3.679843 +step:3209 train loss:3.581378 +step:3210 train loss:3.623728 +step:3211 train loss:3.588585 +step:3212 train loss:3.614472 +step:3213 train loss:3.640165 +step:3214 train loss:3.641047 +step:3215 train loss:3.570396 +step:3216 train loss:3.583240 +step:3217 train loss:3.611936 +step:3218 train loss:3.613108 +step:3219 train loss:3.573732 +step:3220 train loss:3.620870 +step:3221 train loss:3.614687 +step:3222 train loss:3.554740 +step:3223 train loss:3.658417 +step:3224 train loss:3.570507 +step:3225 train loss:3.604484 +step:3226 train loss:3.566929 +step:3227 train loss:3.626349 +step:3228 train loss:3.590809 +step:3229 train loss:3.558812 +step:3230 train loss:3.566221 +step:3231 train loss:3.588798 +step:3232 train loss:3.592937 +step:3233 train loss:3.561997 +step:3234 train loss:3.568410 +step:3235 train loss:3.683605 +step:3236 train loss:3.617653 +step:3237 train loss:3.587290 +step:3238 train loss:3.626615 +step:3239 train loss:3.582119 +step:3240 train loss:3.594983 +step:3241 train loss:3.547423 +step:3242 train loss:3.597665 +step:3243 train loss:3.624626 +step:3244 train loss:3.574555 +step:3245 train loss:3.572710 +step:3246 train loss:3.585964 +step:3247 train loss:3.649937 +step:3248 train loss:3.584156 +step:3249 train loss:3.572101 +step:3250 validation loss:3.532331 +step:3250 train loss:3.615587 +step:3251 train loss:3.613945 +step:3252 train loss:3.576250 +step:3253 train loss:3.559015 +step:3254 train loss:3.581466 +step:3255 train loss:3.597518 +step:3256 train loss:3.562163 +step:3257 train loss:3.673008 +step:3258 train loss:3.676568 +step:3259 train loss:3.613719 +step:3260 train loss:3.586175 +step:3261 train loss:3.580856 +step:3262 train loss:3.550302 +step:3263 train loss:3.592251 +step:3264 train loss:3.622787 +step:3265 train loss:3.571513 +step:3266 train loss:3.530312 +step:3267 train loss:3.585636 +step:3268 train loss:3.570468 +step:3269 train loss:3.609687 +step:3270 train loss:3.635615 +step:3271 train loss:3.597258 +step:3272 train loss:3.578376 +step:3273 train loss:3.624832 +step:3274 train loss:3.728051 +step:3275 train loss:3.613760 +step:3276 train loss:3.561730 +step:3277 train loss:3.602230 +step:3278 train loss:3.603402 +step:3279 train loss:3.599711 +step:3280 train loss:3.581129 +step:3281 train loss:3.603754 +step:3282 train loss:3.586036 +step:3283 train loss:3.514093 +step:3284 train loss:3.545045 +step:3285 train loss:3.578677 +step:3286 train loss:3.614615 +step:3287 train loss:3.598316 +step:3288 train loss:3.604813 +step:3289 train loss:3.604802 +step:3290 train loss:3.632105 +step:3291 train loss:3.550536 +step:3292 train loss:3.620813 +step:3293 train loss:3.570394 +step:3294 train loss:3.631295 +step:3295 train loss:3.558877 +step:3296 train loss:3.656552 +step:3297 train loss:3.597363 +step:3298 train loss:3.619774 +step:3299 train loss:3.551197 +step:3300 train loss:3.577975 +step:3301 train loss:3.594834 +step:3302 train loss:3.682455 +step:3303 train loss:3.608963 +step:3304 train loss:3.588873 +step:3305 train loss:3.568922 +step:3306 train loss:3.570608 +step:3307 train loss:3.577046 +step:3308 train loss:3.668177 +step:3309 train loss:3.603210 +step:3310 train loss:3.581180 +step:3311 train loss:3.635474 +step:3312 train loss:3.665272 +step:3313 train loss:3.556771 +step:3314 train loss:3.629881 +step:3315 train loss:3.570038 +step:3316 train loss:3.626055 +step:3317 train loss:3.623740 +step:3318 train loss:3.622993 +step:3319 train loss:3.621957 +step:3320 train loss:3.576027 +step:3321 train loss:3.556334 +step:3322 train loss:3.588827 +step:3323 train loss:3.547426 +step:3324 train loss:3.564012 +step:3325 train loss:3.664270 +step:3326 train loss:3.524713 +step:3327 train loss:3.591391 +step:3328 train loss:3.586275 +step:3329 train loss:3.542958 +step:3330 train loss:3.522659 +step:3331 train loss:3.660655 +step:3332 train loss:3.628492 +step:3333 train loss:3.582375 +step:3334 train loss:3.600711 +step:3335 train loss:3.658550 +step:3336 train loss:3.615964 +step:3337 train loss:3.717539 +step:3338 train loss:3.561635 +step:3339 train loss:3.652761 +step:3340 train loss:3.655098 +step:3341 train loss:3.540944 +step:3342 train loss:3.576936 +step:3343 train loss:3.582492 +step:3344 train loss:3.509548 +step:3345 train loss:3.617555 +step:3346 train loss:3.622209 +step:3347 train loss:3.599526 +step:3348 train loss:3.586219 +step:3349 train loss:3.563682 +step:3350 train loss:3.619359 +step:3351 train loss:3.593342 +step:3352 train loss:3.608477 +step:3353 train loss:3.559867 +step:3354 train loss:3.607225 +step:3355 train loss:3.551272 +step:3356 train loss:3.571920 +step:3357 train loss:3.645462 +step:3358 train loss:3.561876 +step:3359 train loss:3.525568 +step:3360 train loss:3.614962 +step:3361 train loss:3.564472 +step:3362 train loss:3.619157 +step:3363 train loss:3.578564 +step:3364 train loss:3.568183 +step:3365 train loss:3.587024 +step:3366 train loss:3.585065 +step:3367 train loss:3.545582 +step:3368 train loss:3.555944 +step:3369 train loss:3.513746 +step:3370 train loss:3.587028 +step:3371 train loss:3.581622 +step:3372 train loss:3.586141 +step:3373 train loss:3.623608 +step:3374 train loss:3.588612 +step:3375 train loss:3.600488 +step:3376 train loss:3.532834 +step:3377 train loss:3.546169 +step:3378 train loss:3.520829 +step:3379 train loss:3.567161 +step:3380 train loss:3.604775 +step:3381 train loss:3.596690 +step:3382 train loss:3.525779 +step:3383 train loss:3.612897 +step:3384 train loss:3.572160 +step:3385 train loss:3.560054 +step:3386 train loss:3.604528 +step:3387 train loss:3.579787 +step:3388 train loss:3.606853 +step:3389 train loss:3.518689 +step:3390 train loss:3.576949 +step:3391 train loss:3.617480 +step:3392 train loss:3.565854 +step:3393 train loss:3.526563 +step:3394 train loss:3.574278 +step:3395 train loss:3.577742 +step:3396 train loss:3.605856 +step:3397 train loss:3.716779 +step:3398 train loss:3.509386 +step:3399 train loss:3.570131 +step:3400 train loss:3.540248 +step:3401 train loss:3.584193 +step:3402 train loss:3.579178 +step:3403 train loss:3.659872 +step:3404 train loss:3.565560 +step:3405 train loss:3.613645 +step:3406 train loss:3.559694 +step:3407 train loss:3.601449 +step:3408 train loss:3.623413 +step:3409 train loss:3.639931 +step:3410 train loss:3.661091 +step:3411 train loss:3.598895 +step:3412 train loss:3.571831 +step:3413 train loss:3.528443 +step:3414 train loss:3.552801 +step:3415 train loss:3.573755 +step:3416 train loss:3.651353 +step:3417 train loss:3.588610 +step:3418 train loss:3.576712 +step:3419 train loss:3.556575 +step:3420 train loss:3.647512 +step:3421 train loss:3.631754 +step:3422 train loss:3.594571 +step:3423 train loss:3.552232 +step:3424 train loss:3.617606 +step:3425 train loss:3.597880 +step:3426 train loss:3.630339 +step:3427 train loss:3.601351 +step:3428 train loss:3.541457 +step:3429 train loss:3.605277 +step:3430 train loss:3.624672 +step:3431 train loss:3.560646 +step:3432 train loss:3.577044 +step:3433 train loss:3.605503 +step:3434 train loss:3.560768 +step:3435 train loss:3.517964 +step:3436 train loss:3.572365 +step:3437 train loss:3.592373 +step:3438 train loss:3.585572 +step:3439 train loss:3.541632 +step:3440 train loss:3.588178 +step:3441 train loss:3.612384 +step:3442 train loss:3.532032 +step:3443 train loss:3.518878 +step:3444 train loss:3.532365 +step:3445 train loss:3.540077 +step:3446 train loss:3.566699 +step:3447 train loss:3.541156 +step:3448 train loss:3.560734 +step:3449 train loss:3.586253 +step:3450 train loss:3.609075 +step:3451 train loss:3.574012 +step:3452 train loss:3.514876 +step:3453 train loss:3.556052 +step:3454 train loss:3.600349 +step:3455 train loss:3.562000 +step:3456 train loss:3.551726 +step:3457 train loss:3.551266 +step:3458 train loss:3.574604 +step:3459 train loss:3.810884 +step:3460 train loss:3.572531 +step:3461 train loss:3.580980 +step:3462 train loss:3.560493 +step:3463 train loss:3.638662 +step:3464 train loss:3.597642 +step:3465 train loss:3.587035 +step:3466 train loss:3.520064 +step:3467 train loss:3.567988 +step:3468 train loss:3.546338 +step:3469 train loss:3.580993 +step:3470 train loss:3.527598 +step:3471 train loss:3.591593 +step:3472 train loss:3.583994 +step:3473 train loss:3.622079 +step:3474 train loss:3.569935 +step:3475 train loss:3.540535 +step:3476 train loss:3.630239 +step:3477 train loss:3.563820 +step:3478 train loss:3.635252 +step:3479 train loss:3.527750 +step:3480 train loss:3.668134 +step:3481 train loss:3.575143 +step:3482 train loss:3.597948 +step:3483 train loss:3.561606 +step:3484 train loss:3.572696 +step:3485 train loss:3.596110 +step:3486 train loss:3.557725 +step:3487 train loss:3.651028 +step:3488 train loss:3.514373 +step:3489 train loss:3.551990 +step:3490 train loss:3.567750 +step:3491 train loss:3.524771 +step:3492 train loss:3.568687 +step:3493 train loss:3.719513 +step:3494 train loss:3.561624 +step:3495 train loss:3.562768 +step:3496 train loss:3.503474 +step:3497 train loss:3.523145 +step:3498 train loss:3.585957 +step:3499 train loss:3.556685 +step:3500 validation loss:3.514195 total_sharp:5.8596e-03 L1_sharp:1.2046e-02 L2_sharp:3.5329e-03 L3_sharp:2.2351e-03 L4_sharp:1.0759e-03 L5_sharp:1.0405e-03 L6_sharp:1.2513e-03 L7_sharp:1.1574e-03 L8_sharp:9.7596e-04 L9_sharp:6.4363e-04 L10_sharp:4.3581e-04 L11_sharp:4.6148e-04 L12_sharp:3.9326e-04 total_fnorm:2.3784e+00 total_l1_linf:2.0329e+04 total_spectral:2.3784e+00 L1_fnorm:5.7171e-01 L2_fnorm:4.7738e-01 L3_fnorm:4.2614e-01 L4_fnorm:5.5437e-01 L5_fnorm:5.8503e-01 L6_fnorm:5.8953e-01 L7_fnorm:6.0000e-01 L8_fnorm:6.0044e-01 L9_fnorm:5.9875e-01 L10_fnorm:5.9719e-01 L11_fnorm:5.9212e-01 L12_fnorm:6.0176e-01 L1_l1linf:4.4554e-01 L2_l1linf:4.2698e-01 L3_l1linf:5.0308e-01 L4_l1linf:4.0571e-01 L5_l1linf:4.0725e-01 L6_l1linf:4.1223e-01 L7_l1linf:4.1518e-01 L8_l1linf:4.1637e-01 L9_l1linf:4.1125e-01 L10_l1linf:4.1590e-01 L11_l1linf:4.0981e-01 L12_l1linf:3.9618e-01 L1_spectral:1.2053e-02 L2_spectral:1.2052e-02 L3_spectral:1.9951e-02 L4_spectral:1.2045e-02 L5_spectral:1.2053e-02 L6_spectral:1.2045e-02 L7_spectral:1.2057e-02 L8_spectral:1.2049e-02 L9_spectral:1.2068e-02 L10_spectral:1.2046e-02 L11_spectral:1.2047e-02 L12_spectral:1.2045e-02 v_norm:2.3784e+00 cos_v_-g_hvp:3.2695e-02 g_hvp_norm:6.3216e-01 cos_v_-g_t:3.7830e-02 g_t_norm:5.4860e-01 hv_norm:1.0122e+00 cos_v_hv:1.3768e-02 hg_norm:5.6290e+01 cos_g_hg:5.1530e-01 v_par:6.3750e-03 v_perp:2.3784e+00 L1_cos_v_neg_g:2.6371e-02 L1_v_norm:5.7171e-01 L2_cos_v_neg_g:3.3454e-02 L2_v_norm:4.7738e-01 L3_cos_v_neg_g:2.8330e-02 L3_v_norm:4.2614e-01 L4_cos_v_neg_g:2.8919e-02 L4_v_norm:5.5437e-01 L5_cos_v_neg_g:3.3711e-02 L5_v_norm:5.8503e-01 L6_cos_v_neg_g:3.3618e-02 L6_v_norm:5.8953e-01 L7_cos_v_neg_g:3.2320e-02 L7_v_norm:6.0000e-01 L8_cos_v_neg_g:3.3393e-02 L8_v_norm:6.0044e-01 L9_cos_v_neg_g:3.2946e-02 L9_v_norm:5.9875e-01 L10_cos_v_neg_g:3.7693e-02 L10_v_norm:5.9719e-01 L11_cos_v_neg_g:4.5024e-02 L11_v_norm:5.9212e-01 L12_cos_v_neg_g:7.2436e-02 L12_v_norm:6.0176e-01 +step:3500 train loss:3.552141 +step:3501 train loss:3.571008 +step:3502 train loss:3.527273 +step:3503 train loss:3.551873 +step:3504 train loss:3.567696 +step:3505 train loss:3.547971 +step:3506 train loss:3.581944 +step:3507 train loss:3.548445 +step:3508 train loss:3.547070 +step:3509 train loss:3.597132 +step:3510 train loss:3.521240 +step:3511 train loss:3.604787 +step:3512 train loss:3.641453 +step:3513 train loss:3.614271 +step:3514 train loss:3.678058 +step:3515 train loss:3.531101 +step:3516 train loss:3.565613 +step:3517 train loss:3.553010 +step:3518 train loss:3.575361 +step:3519 train loss:3.560526 +step:3520 train loss:3.534145 +step:3521 train loss:3.593964 +step:3522 train loss:3.555330 +step:3523 train loss:3.523956 +step:3524 train loss:3.552744 +step:3525 train loss:3.522908 +step:3526 train loss:3.547741 +step:3527 train loss:3.599050 +step:3528 train loss:3.579106 +step:3529 train loss:3.528665 +step:3530 train loss:3.498164 +step:3531 train loss:3.589691 +step:3532 train loss:3.539291 +step:3533 train loss:3.527891 +step:3534 train loss:3.554270 +step:3535 train loss:3.569153 +step:3536 train loss:3.559429 +step:3537 train loss:3.585605 +step:3538 train loss:3.539985 +step:3539 train loss:3.561002 +step:3540 train loss:3.559748 +step:3541 train loss:3.584229 +step:3542 train loss:3.565566 +step:3543 train loss:3.583322 +step:3544 train loss:3.497073 +step:3545 train loss:3.554293 +step:3546 train loss:3.501071 +step:3547 train loss:3.506908 +step:3548 train loss:3.540558 +step:3549 train loss:3.542827 +step:3550 train loss:3.522581 +step:3551 train loss:3.601221 +step:3552 train loss:3.582959 +step:3553 train loss:3.564153 +step:3554 train loss:3.644138 +step:3555 train loss:3.533602 +step:3556 train loss:3.522939 +step:3557 train loss:3.553599 +step:3558 train loss:3.540023 +step:3559 train loss:3.591471 +step:3560 train loss:3.651629 +step:3561 train loss:3.566268 +step:3562 train loss:3.569350 +step:3563 train loss:3.662643 +step:3564 train loss:3.504920 +step:3565 train loss:3.530031 +step:3566 train loss:3.565464 +step:3567 train loss:3.613886 +step:3568 train loss:3.550769 +step:3569 train loss:3.549499 +step:3570 train loss:3.566732 +step:3571 train loss:3.546862 +step:3572 train loss:3.622178 +step:3573 train loss:3.563944 +step:3574 train loss:3.570318 +step:3575 train loss:3.521899 +step:3576 train loss:3.539471 +step:3577 train loss:3.549994 +step:3578 train loss:3.572691 +step:3579 train loss:3.483781 +step:3580 train loss:3.537806 +step:3581 train loss:3.527623 +step:3582 train loss:3.490463 +step:3583 train loss:3.546389 +step:3584 train loss:3.535235 +step:3585 train loss:3.564746 +step:3586 train loss:3.525981 +step:3587 train loss:3.536819 +step:3588 train loss:3.520483 +step:3589 train loss:3.546607 +step:3590 train loss:3.580107 +step:3591 train loss:3.567614 +step:3592 train loss:3.606857 +step:3593 train loss:3.572578 +step:3594 train loss:3.515739 +step:3595 train loss:3.622199 +step:3596 train loss:3.575307 +step:3597 train loss:3.503313 +step:3598 train loss:3.590399 +step:3599 train loss:3.544509 +step:3600 train loss:3.530276 +step:3601 train loss:3.541559 +step:3602 train loss:3.528606 +step:3603 train loss:3.489498 +step:3604 train loss:3.618312 +step:3605 train loss:3.532147 +step:3606 train loss:3.605452 +step:3607 train loss:3.625650 +step:3608 train loss:3.566949 +step:3609 train loss:3.701467 +step:3610 train loss:3.587475 +step:3611 train loss:3.549827 +step:3612 train loss:3.538171 +step:3613 train loss:3.504891 +step:3614 train loss:3.525611 +step:3615 train loss:3.551164 +step:3616 train loss:3.544882 +step:3617 train loss:3.492035 +step:3618 train loss:3.542799 +step:3619 train loss:3.532646 +step:3620 train loss:3.557007 +step:3621 train loss:3.640196 +step:3622 train loss:3.513817 +step:3623 train loss:3.461614 +step:3624 train loss:3.597787 +step:3625 train loss:3.559543 +step:3626 train loss:3.474996 +step:3627 train loss:3.571153 +step:3628 train loss:3.562754 +step:3629 train loss:3.542015 +step:3630 train loss:3.534562 +step:3631 train loss:3.538579 +step:3632 train loss:3.523613 +step:3633 train loss:3.563000 +step:3634 train loss:3.535185 +step:3635 train loss:3.588635 +step:3636 train loss:3.591670 +step:3637 train loss:3.670825 +step:3638 train loss:3.500594 +step:3639 train loss:3.568783 +step:3640 train loss:3.559352 +step:3641 train loss:3.551952 +step:3642 train loss:3.543417 +step:3643 train loss:3.544279 +step:3644 train loss:3.553101 +step:3645 train loss:3.552915 +step:3646 train loss:3.544893 +step:3647 train loss:3.532391 +step:3648 train loss:3.582833 +step:3649 train loss:3.602869 +step:3650 train loss:3.522183 +step:3651 train loss:3.592418 +step:3652 train loss:3.532576 +step:3653 train loss:3.537311 +step:3654 train loss:3.527986 +step:3655 train loss:3.500139 +step:3656 train loss:3.549085 +step:3657 train loss:3.517344 +step:3658 train loss:3.615882 +step:3659 train loss:3.547651 +step:3660 train loss:3.500839 +step:3661 train loss:3.527739 +step:3662 train loss:3.556763 +step:3663 train loss:3.582298 +step:3664 train loss:3.545832 +step:3665 train loss:3.529942 +step:3666 train loss:3.529539 +step:3667 train loss:3.537117 +step:3668 train loss:3.532799 +step:3669 train loss:3.588619 +step:3670 train loss:3.562033 +step:3671 train loss:3.574232 +step:3672 train loss:3.576217 +step:3673 train loss:3.524427 +step:3674 train loss:3.536996 +step:3675 train loss:3.558025 +step:3676 train loss:3.508779 +step:3677 train loss:3.496233 +step:3678 train loss:3.598142 +step:3679 train loss:3.563039 +step:3680 train loss:3.559089 +step:3681 train loss:3.601398 +step:3682 train loss:3.493548 +step:3683 train loss:3.498846 +step:3684 train loss:3.545743 +step:3685 train loss:3.567178 +step:3686 train loss:3.547843 +step:3687 train loss:3.938084 +step:3688 train loss:3.589113 +step:3689 train loss:3.536231 +step:3690 train loss:3.535296 +step:3691 train loss:3.574741 +step:3692 train loss:3.567785 +step:3693 train loss:3.526052 +step:3694 train loss:3.547951 +step:3695 train loss:3.492134 +step:3696 train loss:3.494567 +step:3697 train loss:3.553756 +step:3698 train loss:3.528289 +step:3699 train loss:3.573730 +step:3700 train loss:3.608501 +step:3701 train loss:3.525740 +step:3702 train loss:3.526323 +step:3703 train loss:3.548695 +step:3704 train loss:3.544898 +step:3705 train loss:3.493572 +step:3706 train loss:3.514772 +step:3707 train loss:3.542920 +step:3708 train loss:3.531292 +step:3709 train loss:3.467402 +step:3710 train loss:3.532336 +step:3711 train loss:3.518623 +step:3712 train loss:3.530139 +step:3713 train loss:3.519937 +step:3714 train loss:3.489700 +step:3715 train loss:3.507732 +step:3716 train loss:3.556188 +step:3717 train loss:3.536193 +step:3718 train loss:3.578888 +step:3719 train loss:3.486473 +step:3720 train loss:3.608176 +step:3721 train loss:3.617956 +step:3722 train loss:3.519639 +step:3723 train loss:3.502458 +step:3724 train loss:3.681984 +step:3725 train loss:3.567201 +step:3726 train loss:3.589397 +step:3727 train loss:3.545214 +step:3728 train loss:3.554970 +step:3729 train loss:3.659772 +step:3730 train loss:3.867816 +step:3731 train loss:3.546395 +step:3732 train loss:3.558784 +step:3733 train loss:3.644754 +step:3734 train loss:3.601270 +step:3735 train loss:3.591834 +step:3736 train loss:3.593436 +step:3737 train loss:3.559602 +step:3738 train loss:3.569393 +step:3739 train loss:3.542080 +step:3740 train loss:3.561690 +step:3741 train loss:3.631991 +step:3742 train loss:3.551294 +step:3743 train loss:3.588293 +step:3744 train loss:3.480004 +step:3745 train loss:3.538433 +step:3746 train loss:3.566489 +step:3747 train loss:3.585165 +step:3748 train loss:3.644776 +step:3749 train loss:3.545336 +step:3750 validation loss:3.516703 +step:3750 train loss:3.534309 +step:3751 train loss:3.587650 +step:3752 train loss:3.572475 +step:3753 train loss:3.511493 +step:3754 train loss:3.561815 +step:3755 train loss:3.525070 +step:3756 train loss:3.540065 +step:3757 train loss:3.504624 +step:3758 train loss:3.484971 +step:3759 train loss:3.589127 +step:3760 train loss:3.600925 +step:3761 train loss:3.522844 +step:3762 train loss:3.576853 +step:3763 train loss:3.513309 +step:3764 train loss:3.539013 +step:3765 train loss:3.568620 +step:3766 train loss:3.533603 +step:3767 train loss:3.548743 +step:3768 train loss:3.555786 +step:3769 train loss:3.597276 +step:3770 train loss:3.587514 +step:3771 train loss:3.482018 +step:3772 train loss:3.596731 +step:3773 train loss:3.533728 +step:3774 train loss:3.607365 +step:3775 train loss:3.596889 +step:3776 train loss:3.565281 +step:3777 train loss:3.648250 +step:3778 train loss:3.557978 +step:3779 train loss:3.606394 +step:3780 train loss:3.567883 +step:3781 train loss:3.489350 +step:3782 train loss:3.600270 +step:3783 train loss:3.565293 +step:3784 train loss:3.514688 +step:3785 train loss:3.598886 +step:3786 train loss:3.558503 +step:3787 train loss:3.583242 +step:3788 train loss:3.557981 +step:3789 train loss:3.575672 +step:3790 train loss:3.562045 +step:3791 train loss:3.515240 +step:3792 train loss:3.605731 +step:3793 train loss:3.524251 +step:3794 train loss:3.551251 +step:3795 train loss:3.552944 +step:3796 train loss:3.573294 +step:3797 train loss:3.565222 +step:3798 train loss:3.523297 +step:3799 train loss:3.539449 +step:3800 train loss:3.570885 +step:3801 train loss:3.558210 +step:3802 train loss:3.558127 +step:3803 train loss:3.511307 +step:3804 train loss:3.590183 +step:3805 train loss:3.478169 +step:3806 train loss:3.565969 +step:3807 train loss:3.552660 +step:3808 train loss:3.527397 +step:3809 train loss:3.595987 +step:3810 train loss:3.600911 +step:3811 train loss:3.555687 +step:3812 train loss:3.561318 +step:3813 train loss:3.563637 +step:3814 train loss:3.528941 +step:3815 train loss:3.559166 +step:3816 train loss:3.539807 +step:3817 train loss:3.547308 +step:3818 train loss:3.504432 +step:3819 train loss:3.484679 +step:3820 train loss:3.558516 +step:3821 train loss:3.643201 +step:3822 train loss:3.599636 +step:3823 train loss:3.525060 +step:3824 train loss:3.564431 +step:3825 train loss:3.564967 +step:3826 train loss:3.524890 +step:3827 train loss:3.528531 +step:3828 train loss:3.546196 +step:3829 train loss:3.518676 +step:3830 train loss:3.590561 +step:3831 train loss:3.521823 +step:3832 train loss:3.520281 +step:3833 train loss:3.537188 +step:3834 train loss:3.511705 +step:3835 train loss:3.473045 +step:3836 train loss:3.580544 +step:3837 train loss:3.518772 +step:3838 train loss:3.516387 +step:3839 train loss:3.531547 +step:3840 train loss:3.565628 +step:3841 train loss:3.514117 +step:3842 train loss:3.589159 +step:3843 train loss:3.573526 +step:3844 train loss:3.522787 +step:3845 train loss:3.493392 +step:3846 train loss:3.504510 +step:3847 train loss:3.589335 +step:3848 train loss:3.613703 +step:3849 train loss:3.520318 +step:3850 train loss:3.532790 +step:3851 train loss:3.517306 +step:3852 train loss:3.571290 +step:3853 train loss:3.538673 +step:3854 train loss:3.542685 +step:3855 train loss:3.521068 +step:3856 train loss:3.563090 +step:3857 train loss:3.599368 +step:3858 train loss:3.511169 +step:3859 train loss:3.578391 +step:3860 train loss:3.536457 +step:3861 train loss:3.552177 +step:3862 train loss:3.514799 +step:3863 train loss:3.579767 +step:3864 train loss:3.531139 +step:3865 train loss:3.530540 +step:3866 train loss:3.533069 +step:3867 train loss:3.561412 +step:3868 train loss:3.636580 +step:3869 train loss:3.533897 +step:3870 train loss:3.564515 +step:3871 train loss:3.509991 +step:3872 train loss:3.595833 +step:3873 train loss:3.516446 +step:3874 train loss:3.497815 +step:3875 train loss:3.585911 +step:3876 train loss:3.516547 +step:3877 train loss:3.541304 +step:3878 train loss:3.548949 +step:3879 train loss:3.557415 +step:3880 train loss:3.535275 +step:3881 train loss:3.573358 +step:3882 train loss:3.526192 +step:3883 train loss:3.533206 +step:3884 train loss:3.517069 +step:3885 train loss:3.618202 +step:3886 train loss:3.622045 +step:3887 train loss:3.540742 +step:3888 train loss:3.484717 +step:3889 train loss:3.540807 +step:3890 train loss:3.467173 +step:3891 train loss:3.524909 +step:3892 train loss:3.564156 +step:3893 train loss:3.528350 +step:3894 train loss:3.540046 +step:3895 train loss:3.525462 +step:3896 train loss:3.511522 +step:3897 train loss:3.562506 +step:3898 train loss:3.620438 +step:3899 train loss:3.566729 +step:3900 train loss:3.558136 +step:3901 train loss:3.584755 +step:3902 train loss:3.519847 +step:3903 train loss:3.525115 +step:3904 train loss:3.531034 +step:3905 train loss:3.559221 +step:3906 train loss:3.529950 +step:3907 train loss:3.561738 +step:3908 train loss:3.526096 +step:3909 train loss:3.589391 +step:3910 train loss:3.583423 +step:3911 train loss:3.574110 +step:3912 train loss:3.607792 +step:3913 train loss:3.644388 +step:3914 train loss:3.485534 +step:3915 train loss:3.582271 +step:3916 train loss:3.547150 +step:3917 train loss:3.528857 +step:3918 train loss:3.557859 +step:3919 train loss:3.545497 +step:3920 train loss:3.634713 +step:3921 train loss:3.550308 +step:3922 train loss:3.605534 +step:3923 train loss:3.500559 +step:3924 train loss:3.625594 +step:3925 train loss:3.575491 +step:3926 train loss:3.570391 +step:3927 train loss:3.546401 +step:3928 train loss:3.497154 +step:3929 train loss:3.602912 +step:3930 train loss:3.651833 +step:3931 train loss:3.574939 +step:3932 train loss:3.613041 +step:3933 train loss:3.586714 +step:3934 train loss:3.614578 +step:3935 train loss:3.546643 +step:3936 train loss:3.482335 +step:3937 train loss:3.465789 +step:3938 train loss:3.594463 +step:3939 train loss:3.567806 +step:3940 train loss:3.555594 +step:3941 train loss:3.523520 +step:3942 train loss:3.594843 +step:3943 train loss:3.609617 +step:3944 train loss:3.559886 +step:3945 train loss:3.577536 +step:3946 train loss:3.543632 +step:3947 train loss:3.567029 +step:3948 train loss:3.555544 +step:3949 train loss:3.565086 +step:3950 train loss:3.567303 +step:3951 train loss:3.544893 +step:3952 train loss:3.660346 +step:3953 train loss:3.562533 +step:3954 train loss:3.594006 +step:3955 train loss:3.552520 +step:3956 train loss:3.595163 +step:3957 train loss:3.530207 +step:3958 train loss:3.570415 +step:3959 train loss:3.512892 +step:3960 train loss:3.583485 +step:3961 train loss:3.539071 +step:3962 train loss:3.544753 +step:3963 train loss:3.538558 +step:3964 train loss:3.523909 +step:3965 train loss:3.529173 +step:3966 train loss:3.591113 +step:3967 train loss:3.514784 +step:3968 train loss:3.561002 +step:3969 train loss:3.527352 +step:3970 train loss:3.559187 +step:3971 train loss:3.562892 +step:3972 train loss:3.591033 +step:3973 train loss:3.515864 +step:3974 train loss:3.581717 +step:3975 train loss:3.508416 +step:3976 train loss:3.583519 +step:3977 train loss:3.601386 +step:3978 train loss:3.550870 +step:3979 train loss:3.509869 +step:3980 train loss:3.556715 +step:3981 train loss:3.537601 +step:3982 train loss:3.538937 +step:3983 train loss:3.609282 +step:3984 train loss:3.528938 +step:3985 train loss:3.566221 +step:3986 train loss:3.559993 +step:3987 train loss:3.523849 +step:3988 train loss:3.544200 +step:3989 train loss:3.525708 +step:3990 train loss:3.552188 +step:3991 train loss:3.552132 +step:3992 train loss:3.562382 +step:3993 train loss:3.648197 +step:3994 train loss:3.485767 +step:3995 train loss:3.555972 +step:3996 train loss:3.605967 +step:3997 train loss:3.547140 +step:3998 train loss:3.641116 +step:3999 train loss:3.576383 +step:4000 validation loss:3.501455 total_sharp:5.4736e-03 L1_sharp:6.1486e-03 L2_sharp:1.6774e-03 L3_sharp:3.3678e-03 L4_sharp:1.6085e-03 L5_sharp:1.1083e-03 L6_sharp:1.5003e-03 L7_sharp:1.5237e-03 L8_sharp:1.3087e-03 L9_sharp:8.6758e-04 L10_sharp:5.0882e-04 L11_sharp:5.7333e-04 L12_sharp:4.6771e-04 total_fnorm:2.3710e+00 total_l1_linf:2.0241e+04 total_spectral:2.3710e+00 L1_fnorm:5.6853e-01 L2_fnorm:4.6048e-01 L3_fnorm:4.0747e-01 L4_fnorm:5.4619e-01 L5_fnorm:5.8532e-01 L6_fnorm:5.8926e-01 L7_fnorm:5.9946e-01 L8_fnorm:6.0089e-01 L9_fnorm:5.9748e-01 L10_fnorm:5.9706e-01 L11_fnorm:5.8755e-01 L12_fnorm:6.0094e-01 L1_l1linf:4.3669e-01 L2_l1linf:4.4028e-01 L3_l1linf:6.1088e-01 L4_l1linf:4.2184e-01 L5_l1linf:4.0582e-01 L6_l1linf:4.1216e-01 L7_l1linf:4.1052e-01 L8_l1linf:4.1357e-01 L9_l1linf:4.0915e-01 L10_l1linf:4.1296e-01 L11_l1linf:4.0888e-01 L12_l1linf:4.0517e-01 L1_spectral:1.2045e-02 L2_spectral:1.2040e-02 L3_spectral:2.2564e-02 L4_spectral:1.2043e-02 L5_spectral:1.2047e-02 L6_spectral:1.2053e-02 L7_spectral:1.2046e-02 L8_spectral:1.2048e-02 L9_spectral:1.2051e-02 L10_spectral:1.2052e-02 L11_spectral:1.2041e-02 L12_spectral:1.2045e-02 v_norm:2.3710e+00 cos_v_-g_hvp:3.0362e-02 g_hvp_norm:6.5796e-01 cos_v_-g_t:3.5323e-02 g_t_norm:6.2782e-01 hv_norm:7.4584e-01 cos_v_hv:1.7400e-02 hg_norm:3.4051e+01 cos_g_hg:6.1044e-01 v_par:5.8959e-03 v_perp:2.3710e+00 L1_cos_v_neg_g:1.6918e-02 L1_v_norm:5.6853e-01 L2_cos_v_neg_g:2.6029e-02 L2_v_norm:4.6048e-01 L3_cos_v_neg_g:2.7059e-02 L3_v_norm:4.0747e-01 L4_cos_v_neg_g:2.9693e-02 L4_v_norm:5.4619e-01 L5_cos_v_neg_g:3.3615e-02 L5_v_norm:5.8532e-01 L6_cos_v_neg_g:3.3595e-02 L6_v_norm:5.8926e-01 L7_cos_v_neg_g:3.3674e-02 L7_v_norm:5.9946e-01 L8_cos_v_neg_g:3.3087e-02 L8_v_norm:6.0089e-01 L9_cos_v_neg_g:3.3160e-02 L9_v_norm:5.9748e-01 L10_cos_v_neg_g:3.6538e-02 L10_v_norm:5.9706e-01 L11_cos_v_neg_g:4.6046e-02 L11_v_norm:5.8755e-01 L12_cos_v_neg_g:7.8679e-02 L12_v_norm:6.0094e-01 +step:4000 train loss:3.572560 +step:4001 train loss:3.703653 +step:4002 train loss:3.564040 +step:4003 train loss:3.579208 +step:4004 train loss:3.595126 +step:4005 train loss:3.560208 +step:4006 train loss:3.564165 +step:4007 train loss:3.577772 +step:4008 train loss:3.547225 +step:4009 train loss:3.596157 +step:4010 train loss:3.627363 +step:4011 train loss:3.581656 +step:4012 train loss:3.524541 +step:4013 train loss:3.536737 +step:4014 train loss:3.563134 +step:4015 train loss:3.537591 +step:4016 train loss:3.543779 +step:4017 train loss:3.560502 +step:4018 train loss:3.603245 +step:4019 train loss:3.526657 +step:4020 train loss:3.563390 +step:4021 train loss:3.528202 +step:4022 train loss:3.585848 +step:4023 train loss:3.499906 +step:4024 train loss:3.539767 +step:4025 train loss:3.548401 +step:4026 train loss:3.574127 +step:4027 train loss:3.522609 +step:4028 train loss:3.570382 +step:4029 train loss:3.595691 +step:4030 train loss:3.594051 +step:4031 train loss:3.674034 +step:4032 train loss:3.523245 +step:4033 train loss:3.619131 +step:4034 train loss:3.567438 +step:4035 train loss:3.599754 +step:4036 train loss:3.522119 +step:4037 train loss:3.540969 +step:4038 train loss:3.536989 +step:4039 train loss:3.584513 +step:4040 train loss:3.538760 +step:4041 train loss:3.532011 +step:4042 train loss:3.531572 +step:4043 train loss:3.540077 +step:4044 train loss:3.584424 +step:4045 train loss:3.580702 +step:4046 train loss:3.606298 +step:4047 train loss:3.556839 +step:4048 train loss:3.631918 +step:4049 train loss:3.605268 +step:4050 train loss:3.555446 +step:4051 train loss:3.583097 +step:4052 train loss:3.632333 +step:4053 train loss:3.552846 +step:4054 train loss:3.563662 +step:4055 train loss:3.558067 +step:4056 train loss:3.531811 +step:4057 train loss:3.580033 +step:4058 train loss:3.606601 +step:4059 train loss:3.558095 +step:4060 train loss:3.582319 +step:4061 train loss:3.570127 +step:4062 train loss:3.545954 +step:4063 train loss:3.578796 +step:4064 train loss:3.558388 +step:4065 train loss:3.598845 +step:4066 train loss:3.546176 +step:4067 train loss:3.749645 +step:4068 train loss:3.470531 +step:4069 train loss:3.553109 +step:4070 train loss:3.543299 +step:4071 train loss:3.555304 +step:4072 train loss:3.538376 +step:4073 train loss:3.580998 +step:4074 train loss:3.504243 +step:4075 train loss:3.580573 +step:4076 train loss:3.552372 +step:4077 train loss:3.559040 +step:4078 train loss:3.522583 +step:4079 train loss:3.565494 +step:4080 train loss:3.707164 +step:4081 train loss:3.669915 +step:4082 train loss:3.667432 +step:4083 train loss:3.552875 +step:4084 train loss:3.572491 +step:4085 train loss:3.553430 +step:4086 train loss:3.518503 +step:4087 train loss:3.496764 +step:4088 train loss:3.538596 +step:4089 train loss:3.544876 +step:4090 train loss:3.572171 +step:4091 train loss:3.491098 +step:4092 train loss:3.548798 +step:4093 train loss:3.517370 +step:4094 train loss:3.536871 +step:4095 train loss:3.608780 +step:4096 train loss:3.614051 +step:4097 train loss:3.550543 +step:4098 train loss:3.552263 +step:4099 train loss:3.582318 +step:4100 train loss:3.596190 +step:4101 train loss:3.597521 +step:4102 train loss:3.487106 +step:4103 train loss:3.537714 +step:4104 train loss:3.491147 +step:4105 train loss:3.578148 +step:4106 train loss:3.507404 +step:4107 train loss:3.552942 +step:4108 train loss:3.491470 +step:4109 train loss:3.633065 +step:4110 train loss:3.520901 +step:4111 train loss:3.535861 +step:4112 train loss:3.666286 +step:4113 train loss:3.459465 +step:4114 train loss:3.563838 +step:4115 train loss:3.506107 +step:4116 train loss:3.608872 +step:4117 train loss:3.549016 +step:4118 train loss:3.514444 +step:4119 train loss:3.590271 +step:4120 train loss:3.521641 +step:4121 train loss:3.499222 +step:4122 train loss:3.501653 +step:4123 train loss:3.542098 +step:4124 train loss:3.493992 +step:4125 train loss:3.505784 +step:4126 train loss:3.628036 +step:4127 train loss:3.496660 +step:4128 train loss:3.519896 +step:4129 train loss:3.515098 +step:4130 train loss:3.556078 +step:4131 train loss:3.549772 +step:4132 train loss:3.554226 +step:4133 train loss:3.525743 +step:4134 train loss:3.523194 +step:4135 train loss:3.584234 +step:4136 train loss:3.509303 +step:4137 train loss:3.506178 +step:4138 train loss:3.537873 +step:4139 train loss:3.482199 +step:4140 train loss:3.518368 +step:4141 train loss:3.566099 +step:4142 train loss:3.476940 +step:4143 train loss:3.582668 +step:4144 train loss:3.497847 +step:4145 train loss:3.532615 +step:4146 train loss:3.573031 +step:4147 train loss:3.520118 +step:4148 train loss:3.552893 +step:4149 train loss:3.506824 +step:4150 train loss:3.567865 +step:4151 train loss:3.535901 +step:4152 train loss:3.503813 +step:4153 train loss:3.511360 +step:4154 train loss:3.574647 +step:4155 train loss:3.680721 +step:4156 train loss:3.561245 +step:4157 train loss:3.516559 +step:4158 train loss:3.500333 +step:4159 train loss:3.510113 +step:4160 train loss:3.546266 +step:4161 train loss:3.569033 +step:4162 train loss:3.549369 +step:4163 train loss:3.536702 +step:4164 train loss:3.555458 +step:4165 train loss:3.515506 +step:4166 train loss:3.610906 +step:4167 train loss:3.582366 +step:4168 train loss:3.556927 +step:4169 train loss:3.521937 +step:4170 train loss:3.499686 +step:4171 train loss:3.494117 +step:4172 train loss:3.499492 +step:4173 train loss:3.536924 +step:4174 train loss:3.507195 +step:4175 train loss:3.498713 +step:4176 train loss:3.601114 +step:4177 train loss:3.498203 +step:4178 train loss:3.553351 +step:4179 train loss:3.527946 +step:4180 train loss:3.499534 +step:4181 train loss:3.540087 +step:4182 train loss:3.462972 +step:4183 train loss:3.483185 +step:4184 train loss:3.508439 +step:4185 train loss:3.542442 +step:4186 train loss:3.563802 +step:4187 train loss:3.508194 +step:4188 train loss:3.518154 +step:4189 train loss:3.595982 +step:4190 train loss:3.568483 +step:4191 train loss:3.483739 +step:4192 train loss:3.504127 +step:4193 train loss:3.502747 +step:4194 train loss:3.455124 +step:4195 train loss:3.556878 +step:4196 train loss:3.584291 +step:4197 train loss:3.466028 +step:4198 train loss:3.534028 +step:4199 train loss:3.449622 +step:4200 train loss:3.552221 +step:4201 train loss:3.520428 +step:4202 train loss:3.532691 +step:4203 train loss:3.546425 +step:4204 train loss:3.507854 +step:4205 train loss:3.551864 +step:4206 train loss:3.520749 +step:4207 train loss:3.536408 +step:4208 train loss:3.529677 +step:4209 train loss:3.521251 +step:4210 train loss:3.566322 +step:4211 train loss:3.612954 +step:4212 train loss:3.626138 +step:4213 train loss:3.484122 +step:4214 train loss:3.533223 +step:4215 train loss:3.494531 +step:4216 train loss:3.485535 +step:4217 train loss:3.470219 +step:4218 train loss:3.495552 +step:4219 train loss:3.468422 +step:4220 train loss:3.526097 +step:4221 train loss:3.523547 +step:4222 train loss:3.535330 +step:4223 train loss:3.500204 +step:4224 train loss:3.516060 +step:4225 train loss:3.488240 +step:4226 train loss:3.524308 +step:4227 train loss:3.550616 +step:4228 train loss:3.494914 +step:4229 train loss:3.499488 +step:4230 train loss:3.456587 +step:4231 train loss:3.509055 +step:4232 train loss:3.478586 +step:4233 train loss:3.534396 +step:4234 train loss:3.498586 +step:4235 train loss:3.523837 +step:4236 train loss:3.574155 +step:4237 train loss:3.533226 +step:4238 train loss:3.506314 +step:4239 train loss:3.572000 +step:4240 train loss:3.482423 +step:4241 train loss:3.577360 +step:4242 train loss:3.534993 +step:4243 train loss:3.503568 +step:4244 train loss:3.504497 +step:4245 train loss:3.518737 +step:4246 train loss:3.536865 +step:4247 train loss:3.544799 +step:4248 train loss:3.581911 +step:4249 train loss:3.510634 +step:4250 validation loss:3.494664 +step:4250 train loss:3.508204 +step:4251 train loss:3.507020 +step:4252 train loss:3.529609 +step:4253 train loss:3.527426 +step:4254 train loss:3.582585 +step:4255 train loss:3.534765 +step:4256 train loss:3.529536 +step:4257 train loss:3.513947 +step:4258 train loss:3.567551 +step:4259 train loss:3.562888 +step:4260 train loss:3.517004 +step:4261 train loss:3.540192 +step:4262 train loss:3.508771 +step:4263 train loss:3.520068 +step:4264 train loss:3.505413 +step:4265 train loss:3.494689 +step:4266 train loss:3.527636 +step:4267 train loss:3.465585 +step:4268 train loss:3.520606 +step:4269 train loss:3.458424 +step:4270 train loss:3.544801 +step:4271 train loss:3.572570 +step:4272 train loss:3.526273 +step:4273 train loss:3.511510 +step:4274 train loss:3.457058 +step:4275 train loss:3.552693 +step:4276 train loss:3.522394 +step:4277 train loss:3.588074 +step:4278 train loss:3.495532 +step:4279 train loss:3.552318 +step:4280 train loss:3.629154 +step:4281 train loss:3.653732 +step:4282 train loss:3.505731 +step:4283 train loss:3.527238 +step:4284 train loss:3.560703 +step:4285 train loss:3.562915 +step:4286 train loss:3.493065 +step:4287 train loss:3.534283 +step:4288 train loss:3.515969 +step:4289 train loss:3.614563 +step:4290 train loss:3.485471 +step:4291 train loss:3.505873 +step:4292 train loss:3.495467 +step:4293 train loss:3.514056 +step:4294 train loss:3.520611 +step:4295 train loss:3.520915 +step:4296 train loss:3.475165 +step:4297 train loss:3.525504 +step:4298 train loss:3.540438 +step:4299 train loss:3.509200 +step:4300 train loss:3.577685 +step:4301 train loss:3.609583 +step:4302 train loss:3.598425 +step:4303 train loss:3.539867 +step:4304 train loss:3.518297 +step:4305 train loss:3.598155 +step:4306 train loss:3.528438 +step:4307 train loss:3.562293 +step:4308 train loss:3.645141 +step:4309 train loss:3.563815 +step:4310 train loss:3.529974 +step:4311 train loss:3.559082 +step:4312 train loss:3.566458 +step:4313 train loss:3.568375 +step:4314 train loss:3.603004 +step:4315 train loss:3.651754 +step:4316 train loss:3.620916 +step:4317 train loss:3.576009 +step:4318 train loss:3.617515 +step:4319 train loss:3.610090 +step:4320 train loss:3.594716 +step:4321 train loss:3.691003 +step:4322 train loss:3.541169 +step:4323 train loss:3.612667 +step:4324 train loss:3.648256 +step:4325 train loss:3.578891 +step:4326 train loss:3.580020 +step:4327 train loss:3.532248 +step:4328 train loss:3.522618 +step:4329 train loss:3.530580 +step:4330 train loss:3.578410 +step:4331 train loss:3.526911 +step:4332 train loss:3.453603 +step:4333 train loss:3.540557 +step:4334 train loss:3.573797 +step:4335 train loss:3.525269 +step:4336 train loss:3.588568 +step:4337 train loss:3.538809 +step:4338 train loss:3.570162 +step:4339 train loss:3.525004 +step:4340 train loss:3.543335 +step:4341 train loss:3.540707 +step:4342 train loss:3.520447 +step:4343 train loss:3.575591 +step:4344 train loss:3.617184 +step:4345 train loss:3.688033 +step:4346 train loss:3.561405 +step:4347 train loss:3.537829 +step:4348 train loss:3.576279 +step:4349 train loss:3.548322 +step:4350 train loss:3.578457 +step:4351 train loss:3.537594 +step:4352 train loss:3.601765 +step:4353 train loss:3.597773 +step:4354 train loss:3.532581 +step:4355 train loss:3.527365 +step:4356 train loss:3.558027 +step:4357 train loss:3.508233 +step:4358 train loss:3.563036 +step:4359 train loss:3.592100 +step:4360 train loss:3.578521 +step:4361 train loss:3.542066 +step:4362 train loss:3.531484 +step:4363 train loss:3.551237 +step:4364 train loss:3.594945 +step:4365 train loss:3.589060 +step:4366 train loss:3.519568 +step:4367 train loss:3.537874 +step:4368 train loss:3.550708 +step:4369 train loss:3.475522 +step:4370 train loss:3.649525 +step:4371 train loss:3.711324 +step:4372 train loss:3.535249 +step:4373 train loss:3.557521 +step:4374 train loss:3.591777 +step:4375 train loss:3.573617 +step:4376 train loss:3.462559 +step:4377 train loss:3.541058 +step:4378 train loss:3.504045 +step:4379 train loss:3.552200 +step:4380 train loss:3.588815 +step:4381 train loss:3.546974 +step:4382 train loss:3.564185 +step:4383 train loss:3.554211 +step:4384 train loss:3.552721 +step:4385 train loss:3.565795 +step:4386 train loss:3.553960 +step:4387 train loss:3.521041 +step:4388 train loss:3.540446 +step:4389 train loss:3.577949 +step:4390 train loss:3.579487 +step:4391 train loss:3.542943 +step:4392 train loss:3.562951 +step:4393 train loss:3.671509 +step:4394 train loss:3.546282 +step:4395 train loss:3.576044 +step:4396 train loss:3.556219 +step:4397 train loss:3.526985 +step:4398 train loss:3.566094 +step:4399 train loss:3.637580 +step:4400 train loss:3.609468 +step:4401 train loss:3.523860 +step:4402 train loss:3.545950 +step:4403 train loss:3.564710 +step:4404 train loss:3.634064 +step:4405 train loss:3.588577 +step:4406 train loss:3.628362 +step:4407 train loss:3.551084 +step:4408 train loss:3.491723 +step:4409 train loss:3.589981 +step:4410 train loss:3.656122 +step:4411 train loss:3.559200 +step:4412 train loss:3.565320 +step:4413 train loss:3.523376 +step:4414 train loss:3.558624 +step:4415 train loss:3.565524 +step:4416 train loss:3.678044 +step:4417 train loss:3.550843 +step:4418 train loss:3.460032 +step:4419 train loss:3.644487 +step:4420 train loss:3.556801 +step:4421 train loss:3.580510 +step:4422 train loss:3.580182 +step:4423 train loss:3.529978 +step:4424 train loss:3.535342 +step:4425 train loss:3.543009 +step:4426 train loss:3.587113 +step:4427 train loss:3.570118 +step:4428 train loss:3.599273 +step:4429 train loss:3.684349 +step:4430 train loss:3.583995 +step:4431 train loss:3.535848 +step:4432 train loss:3.526481 +step:4433 train loss:3.629798 +step:4434 train loss:3.674979 +step:4435 train loss:3.544319 +step:4436 train loss:3.553089 +step:4437 train loss:3.519138 +step:4438 train loss:3.508002 +step:4439 train loss:3.547050 +step:4440 train loss:3.530779 +step:4441 train loss:3.562614 +step:4442 train loss:3.522397 +step:4443 train loss:3.541697 +step:4444 train loss:3.556955 +step:4445 train loss:3.561578 +step:4446 train loss:3.537498 +step:4447 train loss:3.844450 +step:4448 train loss:3.602121 +step:4449 train loss:3.574903 +step:4450 train loss:3.568644 +step:4451 train loss:3.593089 +step:4452 train loss:3.534254 +step:4453 train loss:3.534334 +step:4454 train loss:3.523530 +step:4455 train loss:3.548650 +step:4456 train loss:3.542966 +step:4457 train loss:3.510859 +step:4458 train loss:3.569980 +step:4459 train loss:3.578551 +step:4460 train loss:3.504641 +step:4461 train loss:3.557342 +step:4462 train loss:3.570634 +step:4463 train loss:3.572091 +step:4464 train loss:3.560743 +step:4465 train loss:3.549896 +step:4466 train loss:3.540043 +step:4467 train loss:3.478049 +step:4468 train loss:3.545211 +step:4469 train loss:3.601250 +step:4470 train loss:3.548056 +step:4471 train loss:3.556380 +step:4472 train loss:3.568710 +step:4473 train loss:3.562022 +step:4474 train loss:3.532765 +step:4475 train loss:3.544887 +step:4476 train loss:3.568725 +step:4477 train loss:3.515976 +step:4478 train loss:3.533660 +step:4479 train loss:3.534703 +step:4480 train loss:3.535899 +step:4481 train loss:3.576276 +step:4482 train loss:3.567078 +step:4483 train loss:3.602139 +step:4484 train loss:3.586777 +step:4485 train loss:3.609328 +step:4486 train loss:3.555423 +step:4487 train loss:3.580906 +step:4488 train loss:3.570159 +step:4489 train loss:3.548320 +step:4490 train loss:3.563099 +step:4491 train loss:3.555369 +step:4492 train loss:3.564182 +step:4493 train loss:3.615283 +step:4494 train loss:3.546118 +step:4495 train loss:3.581325 +step:4496 train loss:3.595631 +step:4497 train loss:3.580988 +step:4498 train loss:3.510055 +step:4499 train loss:3.573001 +step:4500 validation loss:3.494839 total_sharp:1.8459e-02 L1_sharp:1.9431e-02 L2_sharp:8.1960e-02 L3_sharp:5.7705e-02 L4_sharp:3.0880e-03 L5_sharp:1.4337e-03 L6_sharp:1.7028e-03 L7_sharp:1.6287e-03 L8_sharp:1.4335e-03 L9_sharp:9.0849e-04 L10_sharp:5.0299e-04 L11_sharp:5.7375e-04 L12_sharp:4.2932e-04 total_fnorm:2.3326e+00 total_l1_linf:1.9792e+04 total_spectral:2.3326e+00 L1_fnorm:5.5119e-01 L2_fnorm:3.5869e-01 L3_fnorm:3.6309e-01 L4_fnorm:5.2177e-01 L5_fnorm:5.8253e-01 L6_fnorm:5.8941e-01 L7_fnorm:5.9888e-01 L8_fnorm:6.0110e-01 L9_fnorm:5.9662e-01 L10_fnorm:5.9619e-01 L11_fnorm:5.8313e-01 L12_fnorm:6.0007e-01 L1_l1linf:4.2368e-01 L2_l1linf:4.2129e-01 L3_l1linf:4.5768e-01 L4_l1linf:4.1086e-01 L5_l1linf:4.0250e-01 L6_l1linf:4.0665e-01 L7_l1linf:4.0845e-01 L8_l1linf:4.0729e-01 L9_l1linf:4.0683e-01 L10_l1linf:4.1249e-01 L11_l1linf:4.1204e-01 L12_l1linf:3.9884e-01 L1_spectral:1.2040e-02 L2_spectral:1.3773e-02 L3_spectral:1.8818e-02 L4_spectral:1.2039e-02 L5_spectral:1.2048e-02 L6_spectral:1.2047e-02 L7_spectral:1.2054e-02 L8_spectral:1.2056e-02 L9_spectral:1.2048e-02 L10_spectral:1.2071e-02 L11_spectral:1.2045e-02 L12_spectral:1.2044e-02 v_norm:2.3326e+00 cos_v_-g_hvp:2.4937e-02 g_hvp_norm:1.1498e+00 cos_v_-g_t:2.6640e-02 g_t_norm:1.1839e+00 hv_norm:3.2009e+00 cos_v_hv:1.3452e-02 hg_norm:5.8819e+02 cos_g_hg:2.6966e-01 v_par:5.5525e-03 v_perp:2.3326e+00 L1_cos_v_neg_g:2.0170e-02 L1_v_norm:5.5119e-01 L2_cos_v_neg_g:5.1827e-02 L2_v_norm:3.5869e-01 L3_cos_v_neg_g:4.7559e-02 L3_v_norm:3.6309e-01 L4_cos_v_neg_g:3.4992e-02 L4_v_norm:5.2177e-01 L5_cos_v_neg_g:3.2322e-02 L5_v_norm:5.8253e-01 L6_cos_v_neg_g:3.2546e-02 L6_v_norm:5.8941e-01 L7_cos_v_neg_g:3.1286e-02 L7_v_norm:5.9888e-01 L8_cos_v_neg_g:3.0478e-02 L8_v_norm:6.0111e-01 L9_cos_v_neg_g:3.1331e-02 L9_v_norm:5.9662e-01 L10_cos_v_neg_g:3.3798e-02 L10_v_norm:5.9619e-01 L11_cos_v_neg_g:4.3354e-02 L11_v_norm:5.8313e-01 L12_cos_v_neg_g:7.8125e-02 L12_v_norm:6.0007e-01 +step:4500 train loss:3.608284 +step:4501 train loss:3.575484 +step:4502 train loss:3.515235 +step:4503 train loss:3.556680 +step:4504 train loss:3.583476 +step:4505 train loss:3.597982 +step:4506 train loss:3.570577 +step:4507 train loss:3.606101 +step:4508 train loss:3.532795 +step:4509 train loss:3.558189 +step:4510 train loss:3.569165 +step:4511 train loss:3.571496 +step:4512 train loss:3.629244 +step:4513 train loss:3.585714 +step:4514 train loss:3.525643 +step:4515 train loss:3.585543 +step:4516 train loss:3.531195 +step:4517 train loss:3.535326 +step:4518 train loss:3.534765 +step:4519 train loss:3.603974 +step:4520 train loss:3.561539 +step:4521 train loss:3.575150 +step:4522 train loss:3.575301 +step:4523 train loss:3.564639 +step:4524 train loss:3.541054 +step:4525 train loss:3.611035 +step:4526 train loss:3.631248 +step:4527 train loss:3.615344 +step:4528 train loss:3.588554 +step:4529 train loss:3.622964 +step:4530 train loss:3.534924 +step:4531 train loss:3.512467 +step:4532 train loss:3.552154 +step:4533 train loss:3.564850 +step:4534 train loss:3.551775 +step:4535 train loss:3.556887 +step:4536 train loss:3.560422 +step:4537 train loss:3.530507 +step:4538 train loss:3.517784 +step:4539 train loss:3.621961 +step:4540 train loss:3.542488 +step:4541 train loss:3.565350 +step:4542 train loss:3.611291 +step:4543 train loss:3.519540 +step:4544 train loss:3.547957 +step:4545 train loss:3.656401 +step:4546 train loss:3.577891 +step:4547 train loss:3.621763 +step:4548 train loss:3.539357 +step:4549 train loss:3.546435 +step:4550 train loss:3.619773 +step:4551 train loss:3.622111 +step:4552 train loss:3.549958 +step:4553 train loss:3.573810 +step:4554 train loss:3.516158 +step:4555 train loss:3.554742 +step:4556 train loss:3.551990 +step:4557 train loss:3.538040 +step:4558 train loss:3.598950 +step:4559 train loss:3.564507 +step:4560 train loss:3.585717 +step:4561 train loss:3.539889 +step:4562 train loss:3.550648 +step:4563 train loss:3.559751 +step:4564 train loss:3.615717 +step:4565 train loss:3.545291 +step:4566 train loss:3.625035 +step:4567 train loss:3.560647 +step:4568 train loss:3.573128 +step:4569 train loss:3.655994 +step:4570 train loss:3.553643 +step:4571 train loss:3.513277 +step:4572 train loss:3.546961 +step:4573 train loss:3.578348 +step:4574 train loss:3.588133 +step:4575 train loss:3.608757 +step:4576 train loss:3.545330 +step:4577 train loss:3.552677 +step:4578 train loss:3.572886 +step:4579 train loss:3.565634 +step:4580 train loss:3.609580 +step:4581 train loss:3.646883 +step:4582 train loss:3.585640 +step:4583 train loss:3.558510 +step:4584 train loss:3.528902 +step:4585 train loss:3.683251 +step:4586 train loss:3.533209 +step:4587 train loss:3.528097 +step:4588 train loss:3.513657 +step:4589 train loss:3.611899 +step:4590 train loss:3.536195 +step:4591 train loss:3.517875 +step:4592 train loss:3.504267 +step:4593 train loss:3.485154 +step:4594 train loss:3.521109 +step:4595 train loss:3.561588 +step:4596 train loss:3.551544 +step:4597 train loss:3.546751 +step:4598 train loss:3.497115 +step:4599 train loss:3.556715 +step:4600 train loss:3.591316 +step:4601 train loss:3.521949 +step:4602 train loss:3.591824 +step:4603 train loss:3.561944 +step:4604 train loss:3.574511 +step:4605 train loss:3.539969 +step:4606 train loss:3.589685 +step:4607 train loss:3.526584 +step:4608 train loss:3.521525 +step:4609 train loss:3.564753 +step:4610 train loss:3.614779 +step:4611 train loss:3.567241 +step:4612 train loss:3.545152 +step:4613 train loss:3.483349 +step:4614 train loss:3.556976 +step:4615 train loss:3.515990 +step:4616 train loss:3.569670 +step:4617 train loss:3.516366 +step:4618 train loss:3.575115 +step:4619 train loss:3.586078 +step:4620 train loss:3.518686 +step:4621 train loss:3.528607 +step:4622 train loss:3.510917 +step:4623 train loss:3.514431 +step:4624 train loss:3.544061 +step:4625 train loss:3.579853 +step:4626 train loss:3.545134 +step:4627 train loss:3.528842 +step:4628 train loss:3.579077 +step:4629 train loss:3.537520 +step:4630 train loss:3.534825 +step:4631 train loss:3.576485 +step:4632 train loss:3.493913 +step:4633 train loss:3.598114 +step:4634 train loss:3.497155 +step:4635 train loss:3.542233 +step:4636 train loss:3.600160 +step:4637 train loss:3.590477 +step:4638 train loss:3.555946 +step:4639 train loss:3.531642 +step:4640 train loss:3.510849 +step:4641 train loss:3.551501 +step:4642 train loss:3.528451 +step:4643 train loss:3.535763 +step:4644 train loss:3.575849 +step:4645 train loss:3.570347 +step:4646 train loss:3.472824 +step:4647 train loss:3.535916 +step:4648 train loss:3.467546 +step:4649 train loss:3.467516 +step:4650 train loss:3.544787 +step:4651 train loss:3.541815 +step:4652 train loss:3.508839 +step:4653 train loss:3.529423 +step:4654 train loss:3.515682 +step:4655 train loss:3.533665 +step:4656 train loss:3.579942 +step:4657 train loss:3.511597 +step:4658 train loss:3.541450 +step:4659 train loss:3.493563 +step:4660 train loss:3.575841 +step:4661 train loss:3.611935 +step:4662 train loss:3.570315 +step:4663 train loss:3.508146 +step:4664 train loss:3.521789 +step:4665 train loss:3.498036 +step:4666 train loss:3.522014 +step:4667 train loss:3.577486 +step:4668 train loss:3.570111 +step:4669 train loss:3.543101 +step:4670 train loss:3.485048 +step:4671 train loss:3.577464 +step:4672 train loss:3.596031 +step:4673 train loss:3.542536 +step:4674 train loss:3.537402 +step:4675 train loss:3.536511 +step:4676 train loss:3.540169 +step:4677 train loss:3.513307 +step:4678 train loss:3.556270 +step:4679 train loss:3.557395 +step:4680 train loss:3.555727 +step:4681 train loss:3.509141 +step:4682 train loss:3.563043 +step:4683 train loss:3.543307 +step:4684 train loss:3.583041 +step:4685 train loss:3.549966 +step:4686 train loss:3.555982 +step:4687 train loss:3.562503 +step:4688 train loss:3.525922 +step:4689 train loss:3.576673 +step:4690 train loss:3.560023 +step:4691 train loss:3.594829 +step:4692 train loss:3.554642 +step:4693 train loss:3.538670 +step:4694 train loss:3.560671 +step:4695 train loss:3.560447 +step:4696 train loss:3.546389 +step:4697 train loss:3.568473 +step:4698 train loss:3.525259 +step:4699 train loss:3.512144 +step:4700 train loss:3.520781 +step:4701 train loss:3.555429 +step:4702 train loss:3.543885 +step:4703 train loss:3.588597 +step:4704 train loss:3.630536 +step:4705 train loss:3.647355 +step:4706 train loss:3.600880 +step:4707 train loss:3.596524 +step:4708 train loss:3.550536 +step:4709 train loss:3.565645 +step:4710 train loss:3.498815 +step:4711 train loss:3.540531 +step:4712 train loss:3.559662 +step:4713 train loss:3.551419 +step:4714 train loss:3.537160 +step:4715 train loss:3.521108 +step:4716 train loss:3.565212 +step:4717 train loss:3.497765 +step:4718 train loss:3.570377 +step:4719 train loss:3.552407 +step:4720 train loss:3.536403 +step:4721 train loss:3.592243 +step:4722 train loss:3.525615 +step:4723 train loss:3.562812 +step:4724 train loss:3.495601 +step:4725 train loss:3.535240 +step:4726 train loss:3.570768 +step:4727 train loss:3.561929 +step:4728 train loss:3.520887 +step:4729 train loss:3.554155 +step:4730 train loss:3.591652 +step:4731 train loss:3.544356 +step:4732 train loss:3.572250 +step:4733 train loss:3.660836 +step:4734 train loss:3.538160 +step:4735 train loss:3.495377 +step:4736 train loss:3.553691 +step:4737 train loss:3.622342 +step:4738 train loss:3.568456 +step:4739 train loss:3.544144 +step:4740 train loss:3.528141 +step:4741 train loss:3.585112 +step:4742 train loss:3.592290 +step:4743 train loss:3.596265 +step:4744 train loss:3.563075 +step:4745 train loss:3.522080 +step:4746 train loss:3.575387 +step:4747 train loss:3.589352 +step:4748 train loss:3.577630 +step:4749 train loss:3.532190 +step:4750 validation loss:3.479490 +step:4750 train loss:3.551812 +step:4751 train loss:3.622764 +step:4752 train loss:3.591661 +step:4753 train loss:3.577639 +step:4754 train loss:3.575408 +step:4755 train loss:3.526035 +step:4756 train loss:3.500863 +step:4757 train loss:3.540233 +step:4758 train loss:3.535678 +step:4759 train loss:3.521407 +step:4760 train loss:3.557446 +step:4761 train loss:3.566980 +step:4762 train loss:3.657373 +step:4763 train loss:3.491760 +step:4764 train loss:3.576984 +step:4765 train loss:3.661399 +step:4766 train loss:3.642273 +step:4767 train loss:3.534575 +step:4768 train loss:3.543314 +step:4769 train loss:3.525478 +step:4770 train loss:3.542359 +step:4771 train loss:3.520549 +step:4772 train loss:3.502121 +step:4773 train loss:3.558896 +step:4774 train loss:3.522494 +step:4775 train loss:3.541624 +step:4776 train loss:3.522928 +step:4777 train loss:3.541075 +step:4778 train loss:3.520294 +step:4779 train loss:3.556975 +step:4780 train loss:3.550389 +step:4781 train loss:3.567366 +step:4782 train loss:3.649864 +step:4783 train loss:3.532239 +step:4784 train loss:3.536950 +step:4785 train loss:3.528613 +step:4786 train loss:3.578469 +step:4787 train loss:3.519546 +step:4788 train loss:3.521793 +step:4789 train loss:3.526641 +step:4790 train loss:3.548873 +step:4791 train loss:3.584752 +step:4792 train loss:3.530047 +step:4793 train loss:3.509857 +step:4794 train loss:3.460306 +step:4795 train loss:3.515934 +step:4796 train loss:3.542660 +step:4797 train loss:3.565645 +step:4798 train loss:3.579382 +step:4799 train loss:3.510291 +step:4800 train loss:3.547032 +step:4801 train loss:3.552938 +step:4802 train loss:3.500203 +step:4803 train loss:3.531361 +step:4804 train loss:3.602057 +step:4805 train loss:3.568048 +step:4806 train loss:3.529168 +step:4807 train loss:3.581600 +step:4808 train loss:3.516226 +step:4809 train loss:3.546704 +step:4810 train loss:3.529172 +step:4811 train loss:3.565478 +step:4812 train loss:3.638138 +step:4813 train loss:3.694782 +step:4814 train loss:3.564456 +step:4815 train loss:3.564055 +step:4816 train loss:3.541438 +step:4817 train loss:3.494632 +step:4818 train loss:3.498322 +step:4819 train loss:3.599546 +step:4820 train loss:3.579132 +step:4821 train loss:3.581644 +step:4822 train loss:3.614615 +step:4823 train loss:3.566303 +step:4824 train loss:3.514829 +step:4825 train loss:3.510441 +step:4826 train loss:3.549456 +step:4827 train loss:3.522849 +step:4828 train loss:3.516095 +step:4829 train loss:3.580598 +step:4830 train loss:3.578620 +step:4831 train loss:3.527356 +step:4832 train loss:3.578989 +step:4833 train loss:3.547255 +step:4834 train loss:3.518340 +step:4835 train loss:3.529143 +step:4836 train loss:3.549316 +step:4837 train loss:3.605448 +step:4838 train loss:3.692386 +step:4839 train loss:3.545308 +step:4840 train loss:3.518091 +step:4841 train loss:3.566892 +step:4842 train loss:3.516765 +step:4843 train loss:3.498852 +step:4844 train loss:3.540258 +step:4845 train loss:3.536474 +step:4846 train loss:3.522546 +step:4847 train loss:3.626961 +step:4848 train loss:3.556043 +step:4849 train loss:3.533730 +step:4850 train loss:3.528146 +step:4851 train loss:3.527125 +step:4852 train loss:3.507287 +step:4853 train loss:3.529834 +step:4854 train loss:3.555167 +step:4855 train loss:3.581142 +step:4856 train loss:3.639370 +step:4857 train loss:3.591105 +step:4858 train loss:3.555546 +step:4859 train loss:3.506057 +step:4860 train loss:3.571770 +step:4861 train loss:3.469501 +step:4862 train loss:3.576794 +step:4863 train loss:3.563789 +step:4864 train loss:3.548977 +step:4865 train loss:3.519514 +step:4866 train loss:3.507600 +step:4867 train loss:3.571679 +step:4868 train loss:3.540994 +step:4869 train loss:3.492607 +step:4870 train loss:3.554908 +step:4871 train loss:3.505664 +step:4872 train loss:3.528039 +step:4873 train loss:3.584743 +step:4874 train loss:3.540939 +step:4875 train loss:3.572126 +step:4876 train loss:3.571781 +step:4877 train loss:3.570102 +step:4878 train loss:3.541877 +step:4879 train loss:3.552646 +step:4880 train loss:3.556085 +step:4881 train loss:3.490987 +step:4882 train loss:3.517316 +step:4883 train loss:3.555454 +step:4884 train loss:3.514919 +step:4885 train loss:3.557567 +step:4886 train loss:3.540365 +step:4887 train loss:3.628054 +step:4888 train loss:3.546294 +step:4889 train loss:3.574946 +step:4890 train loss:3.499429 +step:4891 train loss:3.534724 +step:4892 train loss:3.567736 +step:4893 train loss:3.559160 +step:4894 train loss:3.533611 +step:4895 train loss:3.598143 +step:4896 train loss:3.472804 +step:4897 train loss:3.515540 +step:4898 train loss:3.616742 +step:4899 train loss:3.604807 +step:4900 train loss:3.568246 +step:4901 train loss:3.487548 +step:4902 train loss:3.544880 +step:4903 train loss:3.595933 +step:4904 train loss:3.535910 +step:4905 train loss:3.534065 +step:4906 train loss:3.551193 +step:4907 train loss:3.540166 +step:4908 train loss:3.520447 +step:4909 train loss:3.565375 +step:4910 train loss:3.532980 +step:4911 train loss:3.520151 +step:4912 train loss:3.546910 +step:4913 train loss:3.538212 +step:4914 train loss:3.515204 +step:4915 train loss:3.565294 +step:4916 train loss:3.584692 +step:4917 train loss:3.528608 +step:4918 train loss:3.581446 +step:4919 train loss:3.544751 +step:4920 train loss:3.579772 +step:4921 train loss:3.546122 +step:4922 train loss:3.579174 +step:4923 train loss:3.486142 +step:4924 train loss:3.506423 +step:4925 train loss:3.487267 +step:4926 train loss:3.571566 +step:4927 train loss:3.576283 +step:4928 train loss:3.512889 +step:4929 train loss:3.575831 +step:4930 train loss:3.522298 +step:4931 train loss:3.536495 +step:4932 train loss:3.555942 +step:4933 train loss:3.512707 +step:4934 train loss:3.607580 +step:4935 train loss:3.532711 +step:4936 train loss:3.486353 +step:4937 train loss:3.538290 +step:4938 train loss:3.529970 +step:4939 train loss:3.534518 +step:4940 train loss:3.526486 +step:4941 train loss:3.511045 +step:4942 train loss:3.606594 +step:4943 train loss:3.522255 +step:4944 train loss:3.538930 +step:4945 train loss:3.524073 +step:4946 train loss:3.561196 +step:4947 train loss:3.567330 +step:4948 train loss:3.528774 +step:4949 train loss:3.518270 +step:4950 train loss:3.660968 +step:4951 train loss:3.563728 +step:4952 train loss:3.570803 +step:4953 train loss:3.542855 +step:4954 train loss:3.558862 +step:4955 train loss:3.547210 +step:4956 train loss:3.553632 +step:4957 train loss:3.563480 +step:4958 train loss:3.527417 +step:4959 train loss:3.491684 +step:4960 train loss:3.545666 +step:4961 train loss:3.540818 +step:4962 train loss:3.503223 +step:4963 train loss:3.581663 +step:4964 train loss:3.543314 +step:4965 train loss:3.507418 +step:4966 train loss:3.605359 +step:4967 train loss:3.619757 +step:4968 train loss:3.513440 +step:4969 train loss:3.520539 +step:4970 train loss:3.554823 +step:4971 train loss:3.499003 +step:4972 train loss:3.582178 +step:4973 train loss:3.576636 +step:4974 train loss:3.537446 +step:4975 train loss:3.567768 +step:4976 train loss:3.534992 +step:4977 train loss:3.493368 +step:4978 train loss:3.548551 +step:4979 train loss:3.547386 +step:4980 train loss:3.493347 +step:4981 train loss:3.471751 +step:4982 train loss:3.738307 +step:4983 train loss:3.580318 +step:4984 train loss:3.547027 +step:4985 train loss:3.502595 +step:4986 train loss:3.492440 +step:4987 train loss:3.563806 +step:4988 train loss:3.545047 +step:4989 train loss:3.552835 +step:4990 train loss:3.521406 +step:4991 train loss:3.493578 +step:4992 train loss:3.539765 +step:4993 train loss:3.501793 +step:4994 train loss:3.528664 +step:4995 train loss:3.546485 +step:4996 train loss:3.546989 +step:4997 train loss:3.583270 +step:4998 train loss:3.515334 +step:4999 train loss:3.513932 +step:5000 validation loss:3.486055 total_sharp:7.7294e-03 L1_sharp:1.1693e-02 L2_sharp:1.2497e-02 L3_sharp:9.9015e-03 L4_sharp:1.7561e-03 L5_sharp:9.4294e-04 L6_sharp:1.2050e-03 L7_sharp:1.2729e-03 L8_sharp:1.1537e-03 L9_sharp:8.2978e-04 L10_sharp:4.7086e-04 L11_sharp:5.6904e-04 L12_sharp:4.3753e-04 total_fnorm:2.3543e+00 total_l1_linf:2.0060e+04 total_spectral:2.3543e+00 L1_fnorm:5.5123e-01 L2_fnorm:4.0839e-01 L3_fnorm:4.2177e-01 L4_fnorm:5.2985e-01 L5_fnorm:5.8235e-01 L6_fnorm:5.9004e-01 L7_fnorm:5.9950e-01 L8_fnorm:5.9990e-01 L9_fnorm:5.9595e-01 L10_fnorm:5.9610e-01 L11_fnorm:5.7988e-01 L12_fnorm:5.9942e-01 L1_l1linf:4.2758e-01 L2_l1linf:3.7644e-01 L3_l1linf:4.6361e-01 L4_l1linf:4.0082e-01 L5_l1linf:4.0426e-01 L6_l1linf:4.0702e-01 L7_l1linf:4.0689e-01 L8_l1linf:4.0549e-01 L9_l1linf:4.0565e-01 L10_l1linf:4.0954e-01 L11_l1linf:4.1738e-01 L12_l1linf:4.1740e-01 L1_spectral:1.2043e-02 L2_spectral:1.2033e-02 L3_spectral:1.9072e-02 L4_spectral:1.2043e-02 L5_spectral:1.2044e-02 L6_spectral:1.2048e-02 L7_spectral:1.2051e-02 L8_spectral:1.2056e-02 L9_spectral:1.2047e-02 L10_spectral:1.2058e-02 L11_spectral:1.2045e-02 L12_spectral:1.2044e-02 v_norm:2.3543e+00 cos_v_-g_hvp:2.5974e-02 g_hvp_norm:7.8968e-01 cos_v_-g_t:3.0242e-02 g_t_norm:7.8876e-01 hv_norm:1.7265e+00 cos_v_hv:1.0540e-02 hg_norm:6.6646e+02 cos_g_hg:9.7018e-02 v_par:6.7527e-03 v_perp:2.3543e+00 L1_cos_v_neg_g:1.5494e-02 L1_v_norm:5.5123e-01 L2_cos_v_neg_g:2.7124e-02 L2_v_norm:4.0839e-01 L3_cos_v_neg_g:3.6802e-02 L3_v_norm:4.2177e-01 L4_cos_v_neg_g:2.8238e-02 L4_v_norm:5.2985e-01 L5_cos_v_neg_g:2.8839e-02 L5_v_norm:5.8235e-01 L6_cos_v_neg_g:3.1065e-02 L6_v_norm:5.9004e-01 L7_cos_v_neg_g:2.9503e-02 L7_v_norm:5.9950e-01 L8_cos_v_neg_g:3.0753e-02 L8_v_norm:5.9990e-01 L9_cos_v_neg_g:3.0889e-02 L9_v_norm:5.9595e-01 L10_cos_v_neg_g:3.4537e-02 L10_v_norm:5.9610e-01 L11_cos_v_neg_g:4.3607e-02 L11_v_norm:5.7988e-01 L12_cos_v_neg_g:7.3506e-02 L12_v_norm:5.9942e-01 +step:5000 train loss:3.569846 +step:5001 train loss:3.625298 +step:5002 train loss:3.511419 +step:5003 train loss:3.469653 +step:5004 train loss:3.533720 +step:5005 train loss:3.526206 +step:5006 train loss:3.489655 +step:5007 train loss:3.506825 +step:5008 train loss:3.491580 +step:5009 train loss:3.583500 +step:5010 train loss:3.569913 +step:5011 train loss:3.537260 +step:5012 train loss:3.557558 +step:5013 train loss:3.555206 +step:5014 train loss:3.630646 +step:5015 train loss:3.511930 +step:5016 train loss:3.464788 +step:5017 train loss:3.493428 +step:5018 train loss:3.470011 +step:5019 train loss:3.567626 +step:5020 train loss:3.532926 +step:5021 train loss:3.507889 +step:5022 train loss:3.594321 +step:5023 train loss:3.520440 +step:5024 train loss:3.487775 +step:5025 train loss:3.518791 +step:5026 train loss:3.512592 +step:5027 train loss:3.597618 +step:5028 train loss:3.536848 +step:5029 train loss:3.559467 +step:5030 train loss:3.565084 +step:5031 train loss:3.564646 +step:5032 train loss:3.542351 +step:5033 train loss:3.482396 +step:5034 train loss:3.539715 +step:5035 train loss:3.512229 +step:5036 train loss:3.517622 +step:5037 train loss:3.556966 +step:5038 train loss:3.597783 +step:5039 train loss:3.472603 +step:5040 train loss:3.527205 +step:5041 train loss:3.598974 +step:5042 train loss:3.547260 +step:5043 train loss:3.582102 +step:5044 train loss:3.526778 +step:5045 train loss:3.541841 +step:5046 train loss:3.584386 +step:5047 train loss:3.652726 +step:5048 train loss:3.538501 +step:5049 train loss:3.561579 +step:5050 train loss:3.524801 +step:5051 train loss:3.591356 +step:5052 train loss:3.556523 +step:5053 train loss:3.583212 +step:5054 train loss:3.537078 +step:5055 train loss:3.539403 +step:5056 train loss:3.527610 +step:5057 train loss:3.545660 +step:5058 train loss:3.514770 +step:5059 train loss:3.498431 +step:5060 train loss:3.632558 +step:5061 train loss:3.539090 +step:5062 train loss:3.661303 +step:5063 train loss:3.529128 +step:5064 train loss:3.569995 +step:5065 train loss:3.626317 +step:5066 train loss:3.614750 +step:5067 train loss:3.582017 +step:5068 train loss:3.650439 +step:5069 train loss:3.579911 +step:5070 train loss:3.610243 +step:5071 train loss:3.638933 +step:5072 train loss:3.605188 +step:5073 train loss:3.562400 +step:5074 train loss:3.662405 +step:5075 train loss:3.695460 +step:5076 train loss:3.611648 +step:5077 train loss:3.570652 +step:5078 train loss:3.524907 +step:5079 train loss:3.557166 +step:5080 train loss:3.590061 +step:5081 train loss:3.584662 +step:5082 train loss:3.621884 +step:5083 train loss:3.601357 +step:5084 train loss:3.612638 +step:5085 train loss:3.587730 +step:5086 train loss:3.621898 +step:5087 train loss:3.587978 +step:5088 train loss:3.533917 +step:5089 train loss:3.657062 +step:5090 train loss:3.728251 +step:5091 train loss:3.603090 +step:5092 train loss:3.547541 +step:5093 train loss:3.550626 +step:5094 train loss:3.548510 +step:5095 train loss:3.576068 +step:5096 train loss:3.509402 +step:5097 train loss:3.542396 +step:5098 train loss:3.566709 +step:5099 train loss:3.589812 +step:5100 train loss:3.597286 +step:5101 train loss:3.609489 +step:5102 train loss:3.596528 +step:5103 train loss:3.677926 +step:5104 train loss:3.556503 +step:5105 train loss:3.605259 +step:5106 train loss:3.595303 +step:5107 train loss:3.554501 +step:5108 train loss:3.578021 +step:5109 train loss:3.584056 +step:5110 train loss:3.584510 +step:5111 train loss:3.561604 +step:5112 train loss:3.524957 +step:5113 train loss:3.538324 +step:5114 train loss:3.563241 +step:5115 train loss:3.581280 +step:5116 train loss:3.528660 +step:5117 train loss:3.556256 +step:5118 train loss:3.535683 +step:5119 train loss:3.567801 +step:5120 train loss:3.558433 +step:5121 train loss:3.562746 +step:5122 train loss:3.598537 +step:5123 train loss:3.598022 +step:5124 train loss:3.566437 +step:5125 train loss:3.552175 +step:5126 train loss:3.571456 +step:5127 train loss:3.627731 +step:5128 train loss:3.530831 +step:5129 train loss:3.576493 +step:5130 train loss:3.515853 +step:5131 train loss:3.544194 +step:5132 train loss:3.548117 +step:5133 train loss:3.521386 +step:5134 train loss:3.501088 +step:5135 train loss:3.565166 +step:5136 train loss:3.588277 +step:5137 train loss:3.508135 +step:5138 train loss:3.537463 +step:5139 train loss:3.577455 +step:5140 train loss:3.554220 +step:5141 train loss:3.591820 +step:5142 train loss:3.542814 +step:5143 train loss:3.547971 +step:5144 train loss:3.572540 +step:5145 train loss:3.549666 +step:5146 train loss:3.555405 +step:5147 train loss:3.525350 +step:5148 train loss:3.549574 +step:5149 train loss:3.529505 +step:5150 train loss:3.567526 +step:5151 train loss:3.578074 +step:5152 train loss:3.602619 +step:5153 train loss:3.569359 +step:5154 train loss:3.522688 +step:5155 train loss:3.550626 +step:5156 train loss:3.501084 +step:5157 train loss:3.519909 +step:5158 train loss:3.538924 +step:5159 train loss:3.523450 +step:5160 train loss:3.524430 +step:5161 train loss:3.574277 +step:5162 train loss:3.552493 +step:5163 train loss:3.506989 +step:5164 train loss:3.529989 +step:5165 train loss:3.564823 +step:5166 train loss:3.493784 +step:5167 train loss:3.486300 +step:5168 train loss:3.492735 +step:5169 train loss:3.482069 +step:5170 train loss:3.648360 +step:5171 train loss:3.520091 +step:5172 train loss:3.522902 +step:5173 train loss:3.504058 +step:5174 train loss:3.465147 +step:5175 train loss:3.542450 +step:5176 train loss:3.552108 +step:5177 train loss:3.537847 +step:5178 train loss:3.504593 +step:5179 train loss:3.548863 +step:5180 train loss:3.533003 +step:5181 train loss:3.504755 +step:5182 train loss:3.556110 +step:5183 train loss:3.507359 +step:5184 train loss:3.469278 +step:5185 train loss:3.490054 +step:5186 train loss:3.518252 +step:5187 train loss:3.582035 +step:5188 train loss:3.543911 +step:5189 train loss:3.494700 +step:5190 train loss:3.531248 +step:5191 train loss:3.595120 +step:5192 train loss:3.516140 +step:5193 train loss:3.512443 +step:5194 train loss:3.512595 +step:5195 train loss:3.502674 +step:5196 train loss:3.566949 +step:5197 train loss:3.559540 +step:5198 train loss:3.486689 +step:5199 train loss:3.518804 +step:5200 train loss:3.505046 +step:5201 train loss:3.537620 +step:5202 train loss:3.482099 +step:5203 train loss:3.463888 +step:5204 train loss:3.484993 +step:5205 train loss:3.608110 +step:5206 train loss:3.506721 +step:5207 train loss:3.517906 +step:5208 train loss:3.522614 +step:5209 train loss:3.496933 +step:5210 train loss:3.473080 +step:5211 train loss:3.544514 +step:5212 train loss:3.532742 +step:5213 train loss:3.537497 +step:5214 train loss:3.551142 +step:5215 train loss:3.574931 +step:5216 train loss:3.518448 +step:5217 train loss:3.512691 +step:5218 train loss:3.462503 +step:5219 train loss:3.552389 +step:5220 train loss:3.552073 +step:5221 train loss:3.492132 +step:5222 train loss:3.564565 +step:5223 train loss:3.520130 +step:5224 train loss:3.532741 +step:5225 train loss:3.458621 +step:5226 train loss:3.534983 +step:5227 train loss:3.490610 +step:5228 train loss:3.464633 +step:5229 train loss:3.493675 +step:5230 train loss:3.555556 +step:5231 train loss:3.459642 +step:5232 train loss:3.503601 +step:5233 train loss:3.494956 +step:5234 train loss:3.439908 +step:5235 train loss:3.493113 +step:5236 train loss:3.432915 +step:5237 train loss:3.492945 +step:5238 train loss:3.476567 +step:5239 train loss:3.535045 +step:5240 train loss:3.468908 +step:5241 train loss:3.474656 +step:5242 train loss:3.519534 +step:5243 train loss:3.524053 +step:5244 train loss:3.521297 +step:5245 train loss:3.522716 +step:5246 train loss:3.486137 +step:5247 train loss:3.578070 +step:5248 train loss:3.537299 +step:5249 train loss:3.542265 +step:5250 validation loss:3.464923 +step:5250 train loss:3.505097 +step:5251 train loss:3.542636 +step:5252 train loss:3.518669 +step:5253 train loss:3.480964 +step:5254 train loss:3.521369 +step:5255 train loss:3.472463 +step:5256 train loss:3.582274 +step:5257 train loss:3.522406 +step:5258 train loss:3.515742 +step:5259 train loss:3.533141 +step:5260 train loss:3.508170 +step:5261 train loss:3.559842 +step:5262 train loss:3.546609 +step:5263 train loss:3.546740 +step:5264 train loss:3.502296 +step:5265 train loss:3.556024 +step:5266 train loss:3.524167 +step:5267 train loss:3.549037 +step:5268 train loss:3.539104 +step:5269 train loss:3.540896 +step:5270 train loss:3.506887 +step:5271 train loss:3.539702 +step:5272 train loss:3.580212 +step:5273 train loss:3.599860 +step:5274 train loss:3.578951 +step:5275 train loss:3.591938 +step:5276 train loss:3.600466 +step:5277 train loss:3.524719 +step:5278 train loss:3.541423 +step:5279 train loss:3.570373 +step:5280 train loss:3.567364 +step:5281 train loss:3.524262 +step:5282 train loss:3.486693 +step:5283 train loss:3.595975 +step:5284 train loss:3.509946 +step:5285 train loss:3.536449 +step:5286 train loss:3.491575 +step:5287 train loss:3.513754 +step:5288 train loss:3.524941 +step:5289 train loss:3.549795 +step:5290 train loss:3.539445 +step:5291 train loss:3.539019 +step:5292 train loss:3.584139 +step:5293 train loss:3.512077 +step:5294 train loss:3.501750 +step:5295 train loss:3.533727 +step:5296 train loss:3.502065 +step:5297 train loss:3.535409 +step:5298 train loss:3.491892 +step:5299 train loss:3.489779 +step:5300 train loss:3.506670 +step:5301 train loss:3.530964 +step:5302 train loss:3.504260 +step:5303 train loss:3.511058 +step:5304 train loss:3.496041 +step:5305 train loss:3.493365 +step:5306 train loss:3.560040 +step:5307 train loss:3.502073 +step:5308 train loss:3.516308 +step:5309 train loss:3.465928 +step:5310 train loss:3.515728 +step:5311 train loss:3.498533 +step:5312 train loss:3.490278 +step:5313 train loss:3.501445 +step:5314 train loss:3.501301 +step:5315 train loss:3.520948 +step:5316 train loss:3.522517 +step:5317 train loss:3.480195 +step:5318 train loss:3.559955 +step:5319 train loss:3.493852 +step:5320 train loss:3.542486 +step:5321 train loss:3.530221 +step:5322 train loss:3.550520 +step:5323 train loss:3.483686 +step:5324 train loss:3.489687 +step:5325 train loss:3.487263 +step:5326 train loss:3.507776 +step:5327 train loss:3.543562 +step:5328 train loss:3.521558 +step:5329 train loss:3.476135 +step:5330 train loss:3.485355 +step:5331 train loss:3.537455 +step:5332 train loss:3.544139 +step:5333 train loss:3.462035 +step:5334 train loss:3.478441 +step:5335 train loss:3.530438 +step:5336 train loss:3.605050 +step:5337 train loss:3.514357 +step:5338 train loss:3.476024 +step:5339 train loss:3.540270 +step:5340 train loss:3.566834 +step:5341 train loss:3.537835 +step:5342 train loss:3.591084 +step:5343 train loss:3.523899 +step:5344 train loss:3.491134 +step:5345 train loss:3.539643 +step:5346 train loss:3.474958 +step:5347 train loss:3.490075 +step:5348 train loss:3.550784 +step:5349 train loss:3.513720 +step:5350 train loss:3.493663 +step:5351 train loss:3.549152 +step:5352 train loss:3.526521 +step:5353 train loss:3.510558 +step:5354 train loss:3.485857 +step:5355 train loss:3.426125 +step:5356 train loss:3.551927 +step:5357 train loss:3.526989 +step:5358 train loss:3.646585 +step:5359 train loss:3.554551 +step:5360 train loss:3.515294 +step:5361 train loss:3.498838 +step:5362 train loss:3.491307 +step:5363 train loss:3.561805 +step:5364 train loss:3.563358 +step:5365 train loss:3.490505 +step:5366 train loss:3.563787 +step:5367 train loss:3.595314 +step:5368 train loss:3.549260 +step:5369 train loss:3.594836 +step:5370 train loss:3.546579 +step:5371 train loss:3.509973 +step:5372 train loss:3.520076 +step:5373 train loss:3.493647 +step:5374 train loss:3.490659 +step:5375 train loss:3.517664 +step:5376 train loss:3.469745 +step:5377 train loss:3.501370 +step:5378 train loss:3.542506 +step:5379 train loss:3.543359 +step:5380 train loss:3.538285 +step:5381 train loss:3.556957 +step:5382 train loss:3.573006 +step:5383 train loss:3.524301 +step:5384 train loss:3.427831 +step:5385 train loss:3.517404 +step:5386 train loss:3.519794 +step:5387 train loss:3.478664 +step:5388 train loss:3.509171 +step:5389 train loss:3.551061 +step:5390 train loss:3.540138 +step:5391 train loss:3.502873 +step:5392 train loss:3.561990 +step:5393 train loss:3.567992 +step:5394 train loss:3.545659 +step:5395 train loss:3.519781 +step:5396 train loss:3.581712 +step:5397 train loss:3.534771 +step:5398 train loss:3.535220 +step:5399 train loss:3.495567 +step:5400 train loss:3.495935 +step:5401 train loss:3.502776 +step:5402 train loss:3.529487 +step:5403 train loss:3.538268 +step:5404 train loss:3.535799 +step:5405 train loss:3.491472 +step:5406 train loss:3.464894 +step:5407 train loss:3.505474 +step:5408 train loss:3.499129 +step:5409 train loss:3.569167 +step:5410 train loss:3.529890 +step:5411 train loss:3.493550 +step:5412 train loss:3.512349 +step:5413 train loss:3.515594 +step:5414 train loss:3.530419 +step:5415 train loss:3.543595 +step:5416 train loss:3.527528 +step:5417 train loss:3.490406 +step:5418 train loss:3.545945 +step:5419 train loss:3.524243 +step:5420 train loss:3.488750 +step:5421 train loss:3.454732 +step:5422 train loss:3.494203 +step:5423 train loss:3.506975 +step:5424 train loss:3.490830 +step:5425 train loss:3.509151 +step:5426 train loss:3.545333 +step:5427 train loss:3.507383 +step:5428 train loss:3.547426 +step:5429 train loss:3.478217 +step:5430 train loss:3.513672 +step:5431 train loss:3.525480 +step:5432 train loss:3.521959 +step:5433 train loss:3.510862 +step:5434 train loss:3.536189 +step:5435 train loss:3.478684 +step:5436 train loss:3.505496 +step:5437 train loss:3.494208 +step:5438 train loss:3.549808 +step:5439 train loss:3.482260 +step:5440 train loss:3.497017 +step:5441 train loss:3.551224 +step:5442 train loss:3.507727 +step:5443 train loss:3.465145 +step:5444 train loss:3.537131 +step:5445 train loss:3.567498 +step:5446 train loss:3.585252 +step:5447 train loss:3.505476 +step:5448 train loss:3.528406 +step:5449 train loss:3.528546 +step:5450 train loss:3.615900 +step:5451 train loss:3.548010 +step:5452 train loss:3.567433 +step:5453 train loss:3.566261 +step:5454 train loss:3.539741 +step:5455 train loss:3.523665 +step:5456 train loss:3.527092 +step:5457 train loss:3.491836 +step:5458 train loss:3.506837 +step:5459 train loss:3.537181 +step:5460 train loss:3.551755 +step:5461 train loss:3.531523 +step:5462 train loss:3.542369 +step:5463 train loss:3.536638 +step:5464 train loss:3.505879 +step:5465 train loss:3.512991 +step:5466 train loss:3.518051 +step:5467 train loss:3.549042 +step:5468 train loss:3.565281 +step:5469 train loss:3.518432 +step:5470 train loss:3.536250 +step:5471 train loss:3.520116 +step:5472 train loss:3.512547 +step:5473 train loss:3.538604 +step:5474 train loss:3.509752 +step:5475 train loss:3.532360 +step:5476 train loss:3.543578 +step:5477 train loss:3.545568 +step:5478 train loss:3.546939 +step:5479 train loss:3.600164 +step:5480 train loss:3.561717 +step:5481 train loss:3.555073 +step:5482 train loss:3.516879 +step:5483 train loss:3.486503 +step:5484 train loss:3.524860 +step:5485 train loss:3.507902 +step:5486 train loss:3.512274 +step:5487 train loss:3.525848 +step:5488 train loss:3.527467 +step:5489 train loss:3.484148 +step:5490 train loss:3.547133 +step:5491 train loss:3.545068 +step:5492 train loss:3.527226 +step:5493 train loss:3.592112 +step:5494 train loss:3.526003 +step:5495 train loss:3.508989 +step:5496 train loss:3.501624 +step:5497 train loss:3.566679 +step:5498 train loss:3.574104 +step:5499 train loss:3.521577 +step:5500 validation loss:3.459189 total_sharp:1.0600e-02 L1_sharp:3.8752e-02 L2_sharp:2.0060e-02 L3_sharp:3.2116e-03 L4_sharp:1.6986e-03 L5_sharp:1.0487e-03 L6_sharp:1.6600e-03 L7_sharp:1.4889e-03 L8_sharp:1.3441e-03 L9_sharp:9.3357e-04 L10_sharp:5.0080e-04 L11_sharp:6.3961e-04 L12_sharp:1.9323e-03 total_fnorm:2.3188e+00 total_l1_linf:1.9675e+04 total_spectral:2.3188e+00 L1_fnorm:5.2032e-01 L2_fnorm:3.5200e-01 L3_fnorm:4.0040e-01 L4_fnorm:4.8673e-01 L5_fnorm:5.7321e-01 L6_fnorm:5.7054e-01 L7_fnorm:5.9599e-01 L8_fnorm:5.9964e-01 L9_fnorm:5.9619e-01 L10_fnorm:5.9584e-01 L11_fnorm:5.7832e-01 L12_fnorm:5.9908e-01 L1_l1linf:3.9067e-01 L2_l1linf:4.3860e-01 L3_l1linf:4.3053e-01 L4_l1linf:4.2790e-01 L5_l1linf:4.0137e-01 L6_l1linf:4.0521e-01 L7_l1linf:4.0548e-01 L8_l1linf:4.0636e-01 L9_l1linf:4.0740e-01 L10_l1linf:4.1334e-01 L11_l1linf:4.1642e-01 L12_l1linf:4.1946e-01 L1_spectral:1.2041e-02 L2_spectral:1.7756e-02 L3_spectral:1.5196e-02 L4_spectral:1.6281e-02 L5_spectral:1.2042e-02 L6_spectral:1.2046e-02 L7_spectral:1.2046e-02 L8_spectral:1.2051e-02 L9_spectral:1.2054e-02 L10_spectral:1.2053e-02 L11_spectral:1.2046e-02 L12_spectral:1.2047e-02 v_norm:2.3188e+00 cos_v_-g_hvp:1.9416e-02 g_hvp_norm:1.1389e+00 cos_v_-g_t:1.9774e-02 g_t_norm:1.2909e+00 hv_norm:2.8082e+00 cos_v_hv:8.7521e-03 hg_norm:2.1420e+03 cos_g_hg:1.8540e-01 v_par:7.9762e-03 v_perp:2.3187e+00 L1_cos_v_neg_g:2.0721e-02 L1_v_norm:5.2032e-01 L2_cos_v_neg_g:3.3417e-02 L2_v_norm:3.5200e-01 L3_cos_v_neg_g:3.0987e-02 L3_v_norm:4.0040e-01 L4_cos_v_neg_g:2.7176e-02 L4_v_norm:4.8673e-01 L5_cos_v_neg_g:2.8707e-02 L5_v_norm:5.7321e-01 L6_cos_v_neg_g:2.8744e-02 L6_v_norm:5.7054e-01 L7_cos_v_neg_g:2.8035e-02 L7_v_norm:5.9599e-01 L8_cos_v_neg_g:2.5750e-02 L8_v_norm:5.9964e-01 L9_cos_v_neg_g:2.7408e-02 L9_v_norm:5.9619e-01 L10_cos_v_neg_g:3.1335e-02 L10_v_norm:5.9584e-01 L11_cos_v_neg_g:4.0777e-02 L11_v_norm:5.7832e-01 L12_cos_v_neg_g:7.5995e-02 L12_v_norm:5.9908e-01 +step:5500 train loss:3.535069 +step:5501 train loss:3.557360 +step:5502 train loss:3.509779 +step:5503 train loss:3.617820 +step:5504 train loss:3.538599 +step:5505 train loss:3.506447 +step:5506 train loss:3.537777 +step:5507 train loss:3.521163 +step:5508 train loss:3.517519 +step:5509 train loss:3.543216 +step:5510 train loss:3.552778 +step:5511 train loss:3.473484 +step:5512 train loss:3.511612 +step:5513 train loss:3.619352 +step:5514 train loss:3.496881 +step:5515 train loss:3.537946 +step:5516 train loss:3.590393 +step:5517 train loss:3.520213 +step:5518 train loss:3.530267 +step:5519 train loss:3.543355 +step:5520 train loss:3.516693 +step:5521 train loss:3.540676 +step:5522 train loss:3.533978 +step:5523 train loss:3.528859 +step:5524 train loss:3.579689 +step:5525 train loss:3.510751 +step:5526 train loss:3.557667 +step:5527 train loss:3.536438 +step:5528 train loss:3.522039 +step:5529 train loss:3.514297 +step:5530 train loss:3.604421 +step:5531 train loss:3.701242 +step:5532 train loss:3.513181 +step:5533 train loss:3.523630 +step:5534 train loss:3.553019 +step:5535 train loss:3.527059 +step:5536 train loss:3.526821 +step:5537 train loss:3.534713 +step:5538 train loss:3.534050 +step:5539 train loss:3.490002 +step:5540 train loss:3.546502 +step:5541 train loss:3.542027 +step:5542 train loss:3.543900 +step:5543 train loss:3.536422 +step:5544 train loss:3.526597 +step:5545 train loss:3.608676 +step:5546 train loss:3.512699 +step:5547 train loss:3.540451 +step:5548 train loss:3.521177 +step:5549 train loss:3.451302 +step:5550 train loss:3.500636 +step:5551 train loss:3.510533 +step:5552 train loss:3.523341 +step:5553 train loss:3.536640 +step:5554 train loss:3.550935 +step:5555 train loss:3.537436 +step:5556 train loss:3.515850 +step:5557 train loss:3.504060 +step:5558 train loss:3.523467 +step:5559 train loss:3.539511 +step:5560 train loss:3.490306 +step:5561 train loss:3.522235 +step:5562 train loss:3.528875 +step:5563 train loss:3.552623 +step:5564 train loss:3.578774 +step:5565 train loss:3.486411 +step:5566 train loss:3.513716 +step:5567 train loss:3.496474 +step:5568 train loss:3.523388 +step:5569 train loss:3.529169 +step:5570 train loss:3.541729 +step:5571 train loss:3.537407 +step:5572 train loss:3.525453 +step:5573 train loss:3.540625 +step:5574 train loss:3.540851 +step:5575 train loss:3.487459 +step:5576 train loss:3.504496 +step:5577 train loss:3.547534 +step:5578 train loss:3.513505 +step:5579 train loss:3.524160 +step:5580 train loss:3.544976 +step:5581 train loss:3.497247 +step:5582 train loss:3.494186 +step:5583 train loss:3.510943 +step:5584 train loss:3.542484 +step:5585 train loss:3.491461 +step:5586 train loss:3.545779 +step:5587 train loss:3.531291 +step:5588 train loss:3.512274 +step:5589 train loss:3.510552 +step:5590 train loss:3.497220 +step:5591 train loss:3.521338 +step:5592 train loss:3.574575 +step:5593 train loss:3.579366 +step:5594 train loss:3.535797 +step:5595 train loss:3.513677 +step:5596 train loss:3.573596 +step:5597 train loss:3.544591 +step:5598 train loss:3.556372 +step:5599 train loss:3.656447 +step:5600 train loss:3.552366 +step:5601 train loss:3.599176 +step:5602 train loss:3.518576 +step:5603 train loss:3.540864 +step:5604 train loss:3.468338 +step:5605 train loss:3.467025 +step:5606 train loss:3.479761 +step:5607 train loss:3.489586 +step:5608 train loss:3.558753 +step:5609 train loss:3.557355 +step:5610 train loss:3.582229 +step:5611 train loss:3.625388 +step:5612 train loss:3.553012 +step:5613 train loss:3.516504 +step:5614 train loss:3.535700 +step:5615 train loss:3.587852 +step:5616 train loss:3.512757 +step:5617 train loss:3.475013 +step:5618 train loss:3.492061 +step:5619 train loss:3.537723 +step:5620 train loss:3.529269 +step:5621 train loss:3.547830 +step:5622 train loss:3.507402 +step:5623 train loss:3.565038 +step:5624 train loss:3.497169 +step:5625 train loss:3.528901 +step:5626 train loss:3.524013 +step:5627 train loss:3.544064 +step:5628 train loss:3.560737 +step:5629 train loss:3.540664 +step:5630 train loss:3.533407 +step:5631 train loss:3.548584 +step:5632 train loss:3.581740 +step:5633 train loss:3.532000 +step:5634 train loss:3.510451 +step:5635 train loss:3.536630 +step:5636 train loss:3.550549 +step:5637 train loss:3.514645 +step:5638 train loss:3.502021 +step:5639 train loss:3.477915 +step:5640 train loss:3.512571 +step:5641 train loss:3.571991 +step:5642 train loss:3.513486 +step:5643 train loss:3.527352 +step:5644 train loss:3.565773 +step:5645 train loss:3.517872 +step:5646 train loss:3.523321 +step:5647 train loss:3.473708 +step:5648 train loss:3.538220 +step:5649 train loss:3.549903 +step:5650 train loss:3.504946 +step:5651 train loss:3.607092 +step:5652 train loss:3.482947 +step:5653 train loss:3.485354 +step:5654 train loss:3.544660 +step:5655 train loss:3.534211 +step:5656 train loss:3.559230 +step:5657 train loss:3.525946 +step:5658 train loss:3.548742 +step:5659 train loss:3.563670 +step:5660 train loss:3.507249 +step:5661 train loss:3.518946 +step:5662 train loss:3.627802 +step:5663 train loss:3.534502 +step:5664 train loss:3.521524 +step:5665 train loss:3.530819 +step:5666 train loss:3.510914 +step:5667 train loss:3.509502 +step:5668 train loss:3.509751 +step:5669 train loss:3.538349 +step:5670 train loss:3.528099 +step:5671 train loss:3.495631 +step:5672 train loss:3.532339 +step:5673 train loss:3.497608 +step:5674 train loss:3.517578 +step:5675 train loss:3.511625 +step:5676 train loss:3.504941 +step:5677 train loss:3.514857 +step:5678 train loss:3.536535 +step:5679 train loss:3.515780 +step:5680 train loss:3.494232 +step:5681 train loss:3.540817 +step:5682 train loss:3.547199 +step:5683 train loss:3.512765 +step:5684 train loss:3.572687 +step:5685 train loss:3.610731 +step:5686 train loss:3.483853 +step:5687 train loss:3.511234 +step:5688 train loss:3.506325 +step:5689 train loss:3.564099 +step:5690 train loss:3.464212 +step:5691 train loss:3.475755 +step:5692 train loss:3.506667 +step:5693 train loss:3.457380 +step:5694 train loss:3.531641 +step:5695 train loss:3.520747 +step:5696 train loss:3.458754 +step:5697 train loss:3.526341 +step:5698 train loss:3.526520 +step:5699 train loss:3.569468 +step:5700 train loss:3.502903 +step:5701 train loss:3.546093 +step:5702 train loss:3.513376 +step:5703 train loss:3.531762 +step:5704 train loss:3.506092 +step:5705 train loss:3.531777 +step:5706 train loss:3.532582 +step:5707 train loss:3.544036 +step:5708 train loss:3.512868 +step:5709 train loss:3.594589 +step:5710 train loss:3.587852 +step:5711 train loss:3.508763 +step:5712 train loss:3.540310 +step:5713 train loss:3.511312 +step:5714 train loss:3.512293 +step:5715 train loss:3.537981 +step:5716 train loss:3.539024 +step:5717 train loss:3.561911 +step:5718 train loss:3.507332 +step:5719 train loss:3.513649 +step:5720 train loss:3.501477 +step:5721 train loss:3.484869 +step:5722 train loss:3.570106 +step:5723 train loss:3.496862 +step:5724 train loss:3.477324 +step:5725 train loss:3.567248 +step:5726 train loss:3.544083 +step:5727 train loss:3.500661 +step:5728 train loss:3.548023 +step:5729 train loss:3.554105 +step:5730 train loss:3.526776 +step:5731 train loss:3.479927 +step:5732 train loss:3.519530 +step:5733 train loss:3.519049 +step:5734 train loss:3.504922 +step:5735 train loss:3.626144 +step:5736 train loss:3.494950 +step:5737 train loss:3.525996 +step:5738 train loss:3.542823 +step:5739 train loss:3.525254 +step:5740 train loss:3.612617 +step:5741 train loss:3.523554 +step:5742 train loss:3.489423 +step:5743 train loss:3.499569 +step:5744 train loss:3.511611 +step:5745 train loss:3.511609 +step:5746 train loss:3.570438 +step:5747 train loss:3.477149 +step:5748 train loss:3.511849 +step:5749 train loss:3.487361 +step:5750 validation loss:3.462231 +step:5750 train loss:3.496364 +step:5751 train loss:3.531435 +step:5752 train loss:3.563103 +step:5753 train loss:3.503702 +step:5754 train loss:3.559987 +step:5755 train loss:3.518382 +step:5756 train loss:3.588643 +step:5757 train loss:3.475024 +step:5758 train loss:3.518647 +step:5759 train loss:3.528083 +step:5760 train loss:3.524820 +step:5761 train loss:3.604791 +step:5762 train loss:3.574289 +step:5763 train loss:3.579697 +step:5764 train loss:3.534785 +step:5765 train loss:3.467885 +step:5766 train loss:3.537260 +step:5767 train loss:3.488965 +step:5768 train loss:3.562043 +step:5769 train loss:3.560411 +step:5770 train loss:3.544948 +step:5771 train loss:3.551741 +step:5772 train loss:3.506704 +step:5773 train loss:3.420296 +step:5774 train loss:3.476917 +step:5775 train loss:3.474031 +step:5776 train loss:3.532785 +step:5777 train loss:3.571607 +step:5778 train loss:3.643604 +step:5779 train loss:3.592780 +step:5780 train loss:3.581884 +step:5781 train loss:3.591511 +step:5782 train loss:3.546633 +step:5783 train loss:3.516870 +step:5784 train loss:3.481098 +step:5785 train loss:3.527631 +step:5786 train loss:3.529916 +step:5787 train loss:3.495648 +step:5788 train loss:3.535756 +step:5789 train loss:3.530090 +step:5790 train loss:3.489084 +step:5791 train loss:3.540684 +step:5792 train loss:3.563507 +step:5793 train loss:3.520764 +step:5794 train loss:3.539379 +step:5795 train loss:3.524706 +step:5796 train loss:3.544693 +step:5797 train loss:3.542313 +step:5798 train loss:3.579423 +step:5799 train loss:3.517146 +step:5800 train loss:3.550465 +step:5801 train loss:3.537939 +step:5802 train loss:3.550101 +step:5803 train loss:3.524144 +step:5804 train loss:3.546549 +step:5805 train loss:3.509625 +step:5806 train loss:3.564281 +step:5807 train loss:3.515700 +step:5808 train loss:3.537679 +step:5809 train loss:3.479969 +step:5810 train loss:3.598698 +step:5811 train loss:3.555094 +step:5812 train loss:3.603204 +step:5813 train loss:3.563140 +step:5814 train loss:3.704341 +step:5815 train loss:3.634737 +step:5816 train loss:3.561161 +step:5817 train loss:3.597471 +step:5818 train loss:3.488044 +step:5819 train loss:3.552251 +step:5820 train loss:3.578466 +step:5821 train loss:3.515714 +step:5822 train loss:3.581518 +step:5823 train loss:3.559068 +step:5824 train loss:3.544583 +step:5825 train loss:3.526229 +step:5826 train loss:3.580616 +step:5827 train loss:3.518144 +step:5828 train loss:3.548190 +step:5829 train loss:3.541054 +step:5830 train loss:3.546709 +step:5831 train loss:3.564911 +step:5832 train loss:3.449460 +step:5833 train loss:3.596607 +step:5834 train loss:3.567945 +step:5835 train loss:3.513696 +step:5836 train loss:3.565204 +step:5837 train loss:3.536984 +step:5838 train loss:3.523169 +step:5839 train loss:3.533238 +step:5840 train loss:3.585857 +step:5841 train loss:3.586363 +step:5842 train loss:3.567731 +step:5843 train loss:3.522058 +step:5844 train loss:3.516446 +step:5845 train loss:3.571170 +step:5846 train loss:3.522776 +step:5847 train loss:3.495733 +step:5848 train loss:3.541648 +step:5849 train loss:3.496221 +step:5850 train loss:3.571955 +step:5851 train loss:3.599299 +step:5852 train loss:3.590455 +step:5853 train loss:3.573201 +step:5854 train loss:3.554040 +step:5855 train loss:3.542034 +step:5856 train loss:3.559824 +step:5857 train loss:3.599371 +step:5858 train loss:3.555746 +step:5859 train loss:3.572150 +step:5860 train loss:3.554676 +step:5861 train loss:3.554699 +step:5862 train loss:3.544317 +step:5863 train loss:3.528432 +step:5864 train loss:3.584819 +step:5865 train loss:3.524459 +step:5866 train loss:3.539609 +step:5867 train loss:3.544130 +step:5868 train loss:3.522856 +step:5869 train loss:3.557444 +step:5870 train loss:3.547918 +step:5871 train loss:3.570737 +step:5872 train loss:3.541749 +step:5873 train loss:3.512122 +step:5874 train loss:3.554871 +step:5875 train loss:3.541620 +step:5876 train loss:3.561644 +step:5877 train loss:3.528779 +step:5878 train loss:3.571321 +step:5879 train loss:3.564485 +step:5880 train loss:3.573623 +step:5881 train loss:3.593568 +step:5882 train loss:3.521417 +step:5883 train loss:3.553402 +step:5884 train loss:3.550719 +step:5885 train loss:3.535775 +step:5886 train loss:3.562034 +step:5887 train loss:3.536299 +step:5888 train loss:3.561604 +step:5889 train loss:3.547626 +step:5890 train loss:3.506389 +step:5891 train loss:3.502321 +step:5892 train loss:3.557144 +step:5893 train loss:3.533646 +step:5894 train loss:3.516776 +step:5895 train loss:3.564991 +step:5896 train loss:3.556686 +step:5897 train loss:3.499523 +step:5898 train loss:3.561885 +step:5899 train loss:3.525073 +step:5900 train loss:3.553750 +step:5901 train loss:3.503931 +step:5902 train loss:3.519177 +step:5903 train loss:3.616709 +step:5904 train loss:3.547903 +step:5905 train loss:3.579441 +step:5906 train loss:3.516857 +step:5907 train loss:3.551205 +step:5908 train loss:3.501530 +step:5909 train loss:3.544130 +step:5910 train loss:3.510583 +step:5911 train loss:3.620826 +step:5912 train loss:3.643722 +step:5913 train loss:3.544583 +step:5914 train loss:3.527697 +step:5915 train loss:3.521884 +step:5916 train loss:3.543462 +step:5917 train loss:3.516733 +step:5918 train loss:3.487261 +step:5919 train loss:3.557395 +step:5920 train loss:3.517079 +step:5921 train loss:3.503771 +step:5922 train loss:3.531556 +step:5923 train loss:3.582603 +step:5924 train loss:3.585948 +step:5925 train loss:3.612272 +step:5926 train loss:3.536565 +step:5927 train loss:3.549889 +step:5928 train loss:3.570530 +step:5929 train loss:3.554199 +step:5930 train loss:3.559095 +step:5931 train loss:3.568943 +step:5932 train loss:3.570235 +step:5933 train loss:3.615635 +step:5934 train loss:3.530605 +step:5935 train loss:3.515833 +step:5936 train loss:3.519775 +step:5937 train loss:3.493807 +step:5938 train loss:3.540183 +step:5939 train loss:3.569721 +step:5940 train loss:3.592578 +step:5941 train loss:3.542750 +step:5942 train loss:3.609779 +step:5943 train loss:3.547688 +step:5944 train loss:3.525877 +step:5945 train loss:3.528568 +step:5946 train loss:3.530893 +step:5947 train loss:3.516221 +step:5948 train loss:3.599134 +step:5949 train loss:3.539210 +step:5950 train loss:3.541228 +step:5951 train loss:3.592926 +step:5952 train loss:3.450002 +step:5953 train loss:3.599919 +step:5954 train loss:3.506126 +step:5955 train loss:3.491494 +step:5956 train loss:3.560381 +step:5957 train loss:3.496434 +step:5958 train loss:3.564473 +step:5959 train loss:3.484283 +step:5960 train loss:3.513071 +step:5961 train loss:3.508281 +step:5962 train loss:3.507396 +step:5963 train loss:3.582384 +step:5964 train loss:3.526336 +step:5965 train loss:3.552144 +step:5966 train loss:3.521292 +step:5967 train loss:3.518361 +step:5968 train loss:3.532043 +step:5969 train loss:3.537180 +step:5970 train loss:3.553689 +step:5971 train loss:3.527151 +step:5972 train loss:3.535800 +step:5973 train loss:3.557989 +step:5974 train loss:3.529327 +step:5975 train loss:3.536365 +step:5976 train loss:3.549812 +step:5977 train loss:3.506369 +step:5978 train loss:3.595465 +step:5979 train loss:3.642661 +step:5980 train loss:3.582608 +step:5981 train loss:3.542906 +step:5982 train loss:3.562985 +step:5983 train loss:3.529078 +step:5984 train loss:3.513663 +step:5985 train loss:3.553662 +step:5986 train loss:3.533645 +step:5987 train loss:3.549446 +step:5988 train loss:3.488370 +step:5989 train loss:3.530273 +step:5990 train loss:3.523285 +step:5991 train loss:3.574958 +step:5992 train loss:3.544211 +step:5993 train loss:3.561740 +step:5994 train loss:3.575860 +step:5995 train loss:3.533157 +step:5996 train loss:3.514874 +step:5997 train loss:3.479072 +step:5998 train loss:3.561460 +step:5999 train loss:3.528693 +step:6000 validation loss:3.478563 total_sharp:6.0338e-03 L1_sharp:1.0714e-02 L2_sharp:1.5421e-02 L3_sharp:3.9187e-03 L4_sharp:1.3963e-03 L5_sharp:1.1080e-03 L6_sharp:1.4753e-03 L7_sharp:1.2977e-03 L8_sharp:1.3697e-03 L9_sharp:9.5851e-04 L10_sharp:5.0144e-04 L11_sharp:5.1402e-04 L12_sharp:8.1385e-04 total_fnorm:2.2919e+00 total_l1_linf:1.9414e+04 total_spectral:2.2919e+00 L1_fnorm:5.2672e-01 L2_fnorm:3.3822e-01 L3_fnorm:3.7592e-01 L4_fnorm:4.8268e-01 L5_fnorm:5.2080e-01 L6_fnorm:5.5352e-01 L7_fnorm:5.9455e-01 L8_fnorm:5.9792e-01 L9_fnorm:5.9540e-01 L10_fnorm:5.9469e-01 L11_fnorm:5.7516e-01 L12_fnorm:5.9866e-01 L1_l1linf:3.7885e-01 L2_l1linf:3.6194e-01 L3_l1linf:3.8900e-01 L4_l1linf:4.3375e-01 L5_l1linf:3.9784e-01 L6_l1linf:4.0234e-01 L7_l1linf:4.0060e-01 L8_l1linf:4.0836e-01 L9_l1linf:4.0106e-01 L10_l1linf:4.0822e-01 L11_l1linf:4.1170e-01 L12_l1linf:4.1438e-01 L1_spectral:1.2048e-02 L2_spectral:1.2942e-02 L3_spectral:1.3546e-02 L4_spectral:1.7300e-02 L5_spectral:1.2047e-02 L6_spectral:1.2041e-02 L7_spectral:1.2050e-02 L8_spectral:1.2047e-02 L9_spectral:1.2055e-02 L10_spectral:1.2056e-02 L11_spectral:1.2042e-02 L12_spectral:1.2047e-02 v_norm:2.2919e+00 cos_v_-g_hvp:2.2535e-02 g_hvp_norm:8.2439e-01 cos_v_-g_t:2.5119e-02 g_t_norm:8.1732e-01 hv_norm:1.6636e+00 cos_v_hv:8.3130e-03 hg_norm:8.9830e+02 cos_g_hg:5.5782e-02 v_par:4.6622e-03 v_perp:2.2919e+00 L1_cos_v_neg_g:1.0519e-02 L1_v_norm:5.2672e-01 L2_cos_v_neg_g:3.1866e-02 L2_v_norm:3.3822e-01 L3_cos_v_neg_g:2.4941e-02 L3_v_norm:3.7592e-01 L4_cos_v_neg_g:2.3697e-02 L4_v_norm:4.8268e-01 L5_cos_v_neg_g:2.7709e-02 L5_v_norm:5.2080e-01 L6_cos_v_neg_g:2.6819e-02 L6_v_norm:5.5352e-01 L7_cos_v_neg_g:2.6772e-02 L7_v_norm:5.9455e-01 L8_cos_v_neg_g:2.7730e-02 L8_v_norm:5.9792e-01 L9_cos_v_neg_g:2.7956e-02 L9_v_norm:5.9540e-01 L10_cos_v_neg_g:2.9958e-02 L10_v_norm:5.9469e-01 L11_cos_v_neg_g:3.9670e-02 L11_v_norm:5.7516e-01 L12_cos_v_neg_g:7.2006e-02 L12_v_norm:5.9866e-01 +step:6000 train loss:3.511751 +step:6001 train loss:3.531401 +step:6002 train loss:3.534382 +step:6003 train loss:3.467439 +step:6004 train loss:3.420795 +step:6005 train loss:3.454055 +step:6006 train loss:3.550747 +step:6007 train loss:3.500506 +step:6008 train loss:3.548949 +step:6009 train loss:3.579400 +step:6010 train loss:3.537348 +step:6011 train loss:3.527556 +step:6012 train loss:3.518620 +step:6013 train loss:3.534611 +step:6014 train loss:3.500412 +step:6015 train loss:3.461600 +step:6016 train loss:3.511909 +step:6017 train loss:3.506442 +step:6018 train loss:3.552567 +step:6019 train loss:3.512689 +step:6020 train loss:3.567597 +step:6021 train loss:3.542981 +step:6022 train loss:3.544966 +step:6023 train loss:3.515844 +step:6024 train loss:3.595303 +step:6025 train loss:3.504117 +step:6026 train loss:3.573282 +step:6027 train loss:3.515886 +step:6028 train loss:3.564871 +step:6029 train loss:3.639785 +step:6030 train loss:3.510419 +step:6031 train loss:3.492711 +step:6032 train loss:3.571188 +step:6033 train loss:3.531159 +step:6034 train loss:3.531153 +step:6035 train loss:3.586564 +step:6036 train loss:3.535546 +step:6037 train loss:3.528419 +step:6038 train loss:3.511667 +step:6039 train loss:3.573232 +step:6040 train loss:3.498747 +step:6041 train loss:3.565338 +step:6042 train loss:3.495492 +step:6043 train loss:3.599610 +step:6044 train loss:3.600278 +step:6045 train loss:3.542766 +step:6046 train loss:3.611871 +step:6047 train loss:3.765163 +step:6048 train loss:3.562885 +step:6049 train loss:3.566210 +step:6050 train loss:3.543622 +step:6051 train loss:3.523036 +step:6052 train loss:3.592808 +step:6053 train loss:3.538696 +step:6054 train loss:3.579378 +step:6055 train loss:3.688494 +step:6056 train loss:3.684921 +step:6057 train loss:3.482226 +step:6058 train loss:3.531732 +step:6059 train loss:3.574988 +step:6060 train loss:3.565838 +step:6061 train loss:3.572553 +step:6062 train loss:3.542599 +step:6063 train loss:3.543666 +step:6064 train loss:3.496578 +step:6065 train loss:3.567674 +step:6066 train loss:3.541885 +step:6067 train loss:3.555324 +step:6068 train loss:3.470902 +step:6069 train loss:3.585785 +step:6070 train loss:3.517869 +step:6071 train loss:3.563462 +step:6072 train loss:3.599955 +step:6073 train loss:3.561499 +step:6074 train loss:3.536851 +step:6075 train loss:3.628649 +step:6076 train loss:3.558491 +step:6077 train loss:3.472086 +step:6078 train loss:3.558951 +step:6079 train loss:3.569227 +step:6080 train loss:3.578669 +step:6081 train loss:3.573933 +step:6082 train loss:3.526756 +step:6083 train loss:3.560427 +step:6084 train loss:3.496933 +step:6085 train loss:3.525921 +step:6086 train loss:3.548688 +step:6087 train loss:3.514606 +step:6088 train loss:3.497468 +step:6089 train loss:3.461861 +step:6090 train loss:3.530896 +step:6091 train loss:3.517809 +step:6092 train loss:3.494859 +step:6093 train loss:3.511896 +step:6094 train loss:3.554064 +step:6095 train loss:3.552239 +step:6096 train loss:3.538144 +step:6097 train loss:3.507273 +step:6098 train loss:3.529417 +step:6099 train loss:3.506429 +step:6100 train loss:3.511684 +step:6101 train loss:3.584070 +step:6102 train loss:3.542624 +step:6103 train loss:3.546281 +step:6104 train loss:3.507792 +step:6105 train loss:3.569033 +step:6106 train loss:3.537971 +step:6107 train loss:3.535545 +step:6108 train loss:3.578979 +step:6109 train loss:3.564437 +step:6110 train loss:3.569642 +step:6111 train loss:3.628726 +step:6112 train loss:3.762735 +step:6113 train loss:3.506228 +step:6114 train loss:3.572728 +step:6115 train loss:3.527109 +step:6116 train loss:3.532693 +step:6117 train loss:3.503389 +step:6118 train loss:3.525985 +step:6119 train loss:3.539726 +step:6120 train loss:3.512816 +step:6121 train loss:3.551234 +step:6122 train loss:3.507268 +step:6123 train loss:3.532356 +step:6124 train loss:3.538807 +step:6125 train loss:3.514267 +step:6126 train loss:3.529674 +step:6127 train loss:3.548396 +step:6128 train loss:3.513898 +step:6129 train loss:3.689421 +step:6130 train loss:3.745359 +step:6131 train loss:3.515530 +step:6132 train loss:3.551755 +step:6133 train loss:3.587587 +step:6134 train loss:3.583827 +step:6135 train loss:3.605204 +step:6136 train loss:3.604979 +step:6137 train loss:3.628448 +step:6138 train loss:3.531151 +step:6139 train loss:3.517509 +step:6140 train loss:3.529061 +step:6141 train loss:3.574281 +step:6142 train loss:3.539018 +step:6143 train loss:3.647181 +step:6144 train loss:3.633998 +step:6145 train loss:3.500505 +step:6146 train loss:3.478045 +step:6147 train loss:3.547099 +step:6148 train loss:3.485802 +step:6149 train loss:3.554017 +step:6150 train loss:3.546581 +step:6151 train loss:3.533999 +step:6152 train loss:3.508765 +step:6153 train loss:3.526229 +step:6154 train loss:3.575015 +step:6155 train loss:3.592138 +step:6156 train loss:3.543842 +step:6157 train loss:3.570350 +step:6158 train loss:3.580096 +step:6159 train loss:3.530530 +step:6160 train loss:3.539130 +step:6161 train loss:3.554280 +step:6162 train loss:3.514943 +step:6163 train loss:3.522734 +step:6164 train loss:3.612710 +step:6165 train loss:3.592420 +step:6166 train loss:3.562742 +step:6167 train loss:3.498082 +step:6168 train loss:3.529756 +step:6169 train loss:3.541628 +step:6170 train loss:3.551134 +step:6171 train loss:3.507435 +step:6172 train loss:3.544206 +step:6173 train loss:3.518762 +step:6174 train loss:3.520584 +step:6175 train loss:3.538243 +step:6176 train loss:3.554863 +step:6177 train loss:3.515855 +step:6178 train loss:3.520515 +step:6179 train loss:3.525496 +step:6180 train loss:3.544497 +step:6181 train loss:3.510838 +step:6182 train loss:3.526865 +step:6183 train loss:3.499499 +step:6184 train loss:3.489243 +step:6185 train loss:3.538171 +step:6186 train loss:3.511675 +step:6187 train loss:3.525257 +step:6188 train loss:3.532874 +step:6189 train loss:3.506119 +step:6190 train loss:3.539049 +step:6191 train loss:3.519876 +step:6192 train loss:3.557197 +step:6193 train loss:3.563910 +step:6194 train loss:3.499560 +step:6195 train loss:3.454904 +step:6196 train loss:3.544159 +step:6197 train loss:3.502376 +step:6198 train loss:3.565023 +step:6199 train loss:3.543433 +step:6200 train loss:3.527803 +step:6201 train loss:3.500797 +step:6202 train loss:3.541565 +step:6203 train loss:3.528261 +step:6204 train loss:3.485289 +step:6205 train loss:3.480186 +step:6206 train loss:3.537996 +step:6207 train loss:3.527493 +step:6208 train loss:3.526712 +step:6209 train loss:3.480751 +step:6210 train loss:3.519064 +step:6211 train loss:3.474644 +step:6212 train loss:3.518892 +step:6213 train loss:3.502220 +step:6214 train loss:3.676994 +step:6215 train loss:3.509462 +step:6216 train loss:3.519114 +step:6217 train loss:3.509518 +step:6218 train loss:3.484946 +step:6219 train loss:3.464824 +step:6220 train loss:3.507543 +step:6221 train loss:3.551624 +step:6222 train loss:3.525136 +step:6223 train loss:3.529023 +step:6224 train loss:3.504805 +step:6225 train loss:3.530566 +step:6226 train loss:3.530577 +step:6227 train loss:3.497333 +step:6228 train loss:3.505479 +step:6229 train loss:3.518476 +step:6230 train loss:3.557326 +step:6231 train loss:3.514728 +step:6232 train loss:3.526402 +step:6233 train loss:3.565094 +step:6234 train loss:3.541790 +step:6235 train loss:3.491689 +step:6236 train loss:3.490414 +step:6237 train loss:3.493120 +step:6238 train loss:3.526082 +step:6239 train loss:3.544353 +step:6240 train loss:3.515731 +step:6241 train loss:3.477875 +step:6242 train loss:3.551436 +step:6243 train loss:3.502167 +step:6244 train loss:3.546872 +step:6245 train loss:3.450306 +step:6246 train loss:3.463096 +step:6247 train loss:3.541607 +step:6248 train loss:3.508405 +step:6249 train loss:3.502487 +step:6250 validation loss:3.462546 +step:6250 train loss:3.537354 +step:6251 train loss:3.589411 +step:6252 train loss:3.699868 +step:6253 train loss:3.491391 +step:6254 train loss:3.480471 +step:6255 train loss:3.545959 +step:6256 train loss:3.552961 +step:6257 train loss:3.538372 +step:6258 train loss:3.531452 +step:6259 train loss:3.523509 +step:6260 train loss:3.571299 +step:6261 train loss:3.513687 +step:6262 train loss:3.587258 +step:6263 train loss:3.483493 +step:6264 train loss:3.650816 +step:6265 train loss:3.642117 +step:6266 train loss:3.537177 +step:6267 train loss:3.487983 +step:6268 train loss:3.551624 +step:6269 train loss:3.552720 +step:6270 train loss:3.565446 +step:6271 train loss:3.542143 +step:6272 train loss:3.503617 +step:6273 train loss:3.447654 +step:6274 train loss:3.508529 +step:6275 train loss:3.465837 +step:6276 train loss:3.504277 +step:6277 train loss:3.462837 +step:6278 train loss:3.521124 +step:6279 train loss:3.503584 +step:6280 train loss:3.499374 +step:6281 train loss:3.506022 +step:6282 train loss:3.635946 +step:6283 train loss:3.748982 +step:6284 train loss:3.469661 +step:6285 train loss:3.479128 +step:6286 train loss:3.515562 +step:6287 train loss:3.498750 +step:6288 train loss:3.508885 +step:6289 train loss:3.503744 +step:6290 train loss:3.535214 +step:6291 train loss:3.512946 +step:6292 train loss:3.555656 +step:6293 train loss:3.527715 +step:6294 train loss:3.520315 +step:6295 train loss:3.523518 +step:6296 train loss:3.506966 +step:6297 train loss:3.506584 +step:6298 train loss:3.456092 +step:6299 train loss:3.537073 +step:6300 train loss:3.490916 +step:6301 train loss:3.524108 +step:6302 train loss:3.537483 +step:6303 train loss:3.535918 +step:6304 train loss:3.511567 +step:6305 train loss:3.477113 +step:6306 train loss:3.547708 +step:6307 train loss:3.601118 +step:6308 train loss:3.545508 +step:6309 train loss:3.605914 +step:6310 train loss:3.580219 +step:6311 train loss:3.494407 +step:6312 train loss:3.594547 +step:6313 train loss:3.543335 +step:6314 train loss:3.485509 +step:6315 train loss:3.482074 +step:6316 train loss:3.542142 +step:6317 train loss:3.482130 +step:6318 train loss:3.492021 +step:6319 train loss:3.536881 +step:6320 train loss:3.486152 +step:6321 train loss:3.537248 +step:6322 train loss:3.571965 +step:6323 train loss:3.568357 +step:6324 train loss:3.512265 +step:6325 train loss:3.543101 +step:6326 train loss:3.537728 +step:6327 train loss:3.538008 +step:6328 train loss:3.516405 +step:6329 train loss:3.540553 +step:6330 train loss:3.589362 +step:6331 train loss:3.576196 +step:6332 train loss:3.538426 +step:6333 train loss:3.547245 +step:6334 train loss:3.509533 +step:6335 train loss:3.512780 +step:6336 train loss:3.551246 +step:6337 train loss:3.538251 +step:6338 train loss:3.534109 +step:6339 train loss:3.549712 +step:6340 train loss:3.563646 +step:6341 train loss:3.521589 +step:6342 train loss:3.609443 +step:6343 train loss:3.563580 +step:6344 train loss:3.477037 +step:6345 train loss:3.514218 +step:6346 train loss:3.496155 +step:6347 train loss:3.544044 +step:6348 train loss:3.542122 +step:6349 train loss:3.520526 +step:6350 train loss:3.587452 +step:6351 train loss:3.506973 +step:6352 train loss:3.505133 +step:6353 train loss:3.498371 +step:6354 train loss:3.546083 +step:6355 train loss:3.450089 +step:6356 train loss:3.445332 +step:6357 train loss:3.479562 +step:6358 train loss:3.530861 +step:6359 train loss:3.538784 +step:6360 train loss:3.631263 +step:6361 train loss:3.501506 +step:6362 train loss:3.575475 +step:6363 train loss:3.531961 +step:6364 train loss:3.510654 +step:6365 train loss:3.575685 +step:6366 train loss:3.535749 +step:6367 train loss:3.513217 +step:6368 train loss:3.527192 +step:6369 train loss:3.512027 +step:6370 train loss:3.509017 +step:6371 train loss:3.504945 +step:6372 train loss:3.491940 +step:6373 train loss:3.622357 +step:6374 train loss:3.550963 +step:6375 train loss:3.520488 +step:6376 train loss:3.577816 +step:6377 train loss:3.544932 +step:6378 train loss:3.547656 +step:6379 train loss:3.535188 +step:6380 train loss:3.532372 +step:6381 train loss:3.538255 +step:6382 train loss:3.534097 +step:6383 train loss:3.477177 +step:6384 train loss:3.464343 +step:6385 train loss:3.480633 +step:6386 train loss:3.575233 +step:6387 train loss:3.535851 +step:6388 train loss:3.563087 +step:6389 train loss:3.532327 +step:6390 train loss:3.499211 +step:6391 train loss:3.499466 +step:6392 train loss:3.547523 +step:6393 train loss:3.587521 +step:6394 train loss:3.778227 +step:6395 train loss:3.543629 +step:6396 train loss:3.545997 +step:6397 train loss:3.584676 +step:6398 train loss:3.520422 +step:6399 train loss:3.562214 +step:6400 train loss:3.504772 +step:6401 train loss:3.511186 +step:6402 train loss:3.597518 +step:6403 train loss:3.537161 +step:6404 train loss:3.536845 +step:6405 train loss:3.557836 +step:6406 train loss:3.625983 +step:6407 train loss:3.598928 +step:6408 train loss:3.518671 +step:6409 train loss:3.550418 +step:6410 train loss:3.561173 +step:6411 train loss:3.573391 +step:6412 train loss:3.543073 +step:6413 train loss:3.559707 +step:6414 train loss:3.537351 +step:6415 train loss:3.527430 +step:6416 train loss:3.581547 +step:6417 train loss:3.591217 +step:6418 train loss:3.588743 +step:6419 train loss:3.528216 +step:6420 train loss:3.528540 +step:6421 train loss:3.538152 +step:6422 train loss:3.551539 +step:6423 train loss:3.618612 +step:6424 train loss:3.571022 +step:6425 train loss:3.576768 +step:6426 train loss:3.560144 +step:6427 train loss:3.564062 +step:6428 train loss:3.577214 +step:6429 train loss:3.595612 +step:6430 train loss:3.553594 +step:6431 train loss:3.551689 +step:6432 train loss:3.520969 +step:6433 train loss:3.531958 +step:6434 train loss:3.527687 +step:6435 train loss:3.535656 +step:6436 train loss:3.536677 +step:6437 train loss:3.509351 +step:6438 train loss:3.528371 +step:6439 train loss:3.527740 +step:6440 train loss:3.543797 +step:6441 train loss:3.505866 +step:6442 train loss:3.563709 +step:6443 train loss:3.551068 +step:6444 train loss:3.523883 +step:6445 train loss:3.571777 +step:6446 train loss:3.753961 +step:6447 train loss:3.545900 +step:6448 train loss:3.527393 +step:6449 train loss:3.603800 +step:6450 train loss:3.556479 +step:6451 train loss:3.520192 +step:6452 train loss:3.518399 +step:6453 train loss:3.581496 +step:6454 train loss:3.577933 +step:6455 train loss:3.520827 +step:6456 train loss:3.559335 +step:6457 train loss:3.538740 +step:6458 train loss:3.540514 +step:6459 train loss:3.555604 +step:6460 train loss:3.601346 +step:6461 train loss:3.624538 +step:6462 train loss:3.583016 +step:6463 train loss:3.574866 +step:6464 train loss:3.616693 +step:6465 train loss:3.606013 +step:6466 train loss:3.574524 +step:6467 train loss:3.646201 +step:6468 train loss:3.592735 +step:6469 train loss:3.561176 +step:6470 train loss:3.578840 +step:6471 train loss:3.629172 +step:6472 train loss:3.589101 +step:6473 train loss:3.602469 +step:6474 train loss:3.549033 +step:6475 train loss:3.574718 +step:6476 train loss:3.612272 +step:6477 train loss:3.596438 +step:6478 train loss:3.547508 +step:6479 train loss:3.577425 +step:6480 train loss:3.623632 +step:6481 train loss:3.574572 +step:6482 train loss:3.589814 +step:6483 train loss:3.519461 +step:6484 train loss:3.545046 +step:6485 train loss:3.664248 +step:6486 train loss:3.540396 +step:6487 train loss:3.533653 +step:6488 train loss:3.645203 +step:6489 train loss:3.511141 +step:6490 train loss:3.557939 +step:6491 train loss:3.552112 +step:6492 train loss:3.614021 +step:6493 train loss:3.639817 +step:6494 train loss:3.568805 +step:6495 train loss:3.533966 +step:6496 train loss:3.569126 +step:6497 train loss:3.559506 +step:6498 train loss:3.584724 +step:6499 train loss:3.597104 +step:6500 validation loss:3.495939 total_sharp:3.5738e-02 L1_sharp:5.5534e-01 L2_sharp:1.5058e-02 L3_sharp:4.4081e-03 L4_sharp:1.9351e-03 L5_sharp:1.4461e-03 L6_sharp:1.5567e-03 L7_sharp:1.4061e-03 L8_sharp:1.2911e-03 L9_sharp:8.7863e-04 L10_sharp:4.4720e-04 L11_sharp:5.4794e-04 L12_sharp:9.3557e-04 total_fnorm:2.2113e+00 total_l1_linf:1.8529e+04 total_spectral:2.2113e+00 L1_fnorm:4.8105e-01 L2_fnorm:2.6685e-01 L3_fnorm:3.2049e-01 L4_fnorm:4.2517e-01 L5_fnorm:5.2228e-01 L6_fnorm:5.4544e-01 L7_fnorm:5.8882e-01 L8_fnorm:5.9283e-01 L9_fnorm:5.9400e-01 L10_fnorm:5.9492e-01 L11_fnorm:5.7017e-01 L12_fnorm:5.9902e-01 L1_l1linf:3.2565e-01 L2_l1linf:5.1189e-01 L3_l1linf:5.1949e-01 L4_l1linf:5.5058e-01 L5_l1linf:5.0597e-01 L6_l1linf:4.4786e-01 L7_l1linf:4.1281e-01 L8_l1linf:4.0692e-01 L9_l1linf:4.0574e-01 L10_l1linf:4.3669e-01 L11_l1linf:4.6488e-01 L12_l1linf:4.6717e-01 L1_spectral:1.2042e-02 L2_spectral:1.2034e-02 L3_spectral:1.2031e-02 L4_spectral:1.8372e-02 L5_spectral:1.2041e-02 L6_spectral:1.2042e-02 L7_spectral:1.2045e-02 L8_spectral:1.2051e-02 L9_spectral:1.2049e-02 L10_spectral:1.2053e-02 L11_spectral:1.2045e-02 L12_spectral:1.2048e-02 v_norm:2.2113e+00 cos_v_-g_hvp:8.8645e-03 g_hvp_norm:1.5712e+00 cos_v_-g_t:1.6507e-02 g_t_norm:1.1457e+00 hv_norm:4.8348e+01 cos_v_hv:1.6346e-03 hg_norm:2.1329e+05 cos_g_hg:4.0632e-01 v_par:4.6729e-03 v_perp:2.2113e+00 L1_cos_v_neg_g:-4.0684e-03 L1_v_norm:4.8105e-01 L2_cos_v_neg_g:1.1345e-02 L2_v_norm:2.6685e-01 L3_cos_v_neg_g:1.2881e-02 L3_v_norm:3.2049e-01 L4_cos_v_neg_g:1.8889e-02 L4_v_norm:4.2517e-01 L5_cos_v_neg_g:1.8821e-02 L5_v_norm:5.2228e-01 L6_cos_v_neg_g:2.5234e-02 L6_v_norm:5.4544e-01 L7_cos_v_neg_g:2.3926e-02 L7_v_norm:5.8882e-01 L8_cos_v_neg_g:2.5921e-02 L8_v_norm:5.9283e-01 L9_cos_v_neg_g:2.6518e-02 L9_v_norm:5.9400e-01 L10_cos_v_neg_g:2.9012e-02 L10_v_norm:5.9492e-01 L11_cos_v_neg_g:3.7271e-02 L11_v_norm:5.7017e-01 L12_cos_v_neg_g:6.0373e-02 L12_v_norm:5.9902e-01 +step:6500 train loss:3.617728 +step:6501 train loss:3.543445 +step:6502 train loss:3.562092 +step:6503 train loss:3.551951 +step:6504 train loss:3.657815 +step:6505 train loss:3.554395 +step:6506 train loss:3.544519 +step:6507 train loss:3.586566 +step:6508 train loss:3.547660 +step:6509 train loss:3.597955 +step:6510 train loss:3.570198 +step:6511 train loss:3.556151 +step:6512 train loss:3.568252 +step:6513 train loss:3.549693 +step:6514 train loss:3.585298 +step:6515 train loss:3.600462 +step:6516 train loss:3.545631 +step:6517 train loss:3.519001 +step:6518 train loss:3.568312 +step:6519 train loss:3.510841 +step:6520 train loss:3.509882 +step:6521 train loss:3.560920 +step:6522 train loss:3.582257 +step:6523 train loss:3.539093 +step:6524 train loss:3.532069 +step:6525 train loss:3.507024 +step:6526 train loss:3.665857 +step:6527 train loss:3.591527 +step:6528 train loss:3.529271 +step:6529 train loss:3.495702 +step:6530 train loss:3.554490 +step:6531 train loss:3.583125 +step:6532 train loss:3.504123 +step:6533 train loss:3.554948 +step:6534 train loss:3.501844 +step:6535 train loss:3.522517 +step:6536 train loss:3.532770 +step:6537 train loss:3.555668 +step:6538 train loss:3.536161 +step:6539 train loss:3.552576 +step:6540 train loss:3.532065 +step:6541 train loss:3.597004 +step:6542 train loss:3.571398 +step:6543 train loss:3.554517 +step:6544 train loss:3.496914 +step:6545 train loss:3.466968 +step:6546 train loss:3.503024 +step:6547 train loss:3.545719 +step:6548 train loss:3.578695 +step:6549 train loss:3.517993 +step:6550 train loss:3.537648 +step:6551 train loss:3.491117 +step:6552 train loss:3.491797 +step:6553 train loss:3.545919 +step:6554 train loss:3.611690 +step:6555 train loss:3.595885 +step:6556 train loss:3.569866 +step:6557 train loss:3.592718 +step:6558 train loss:3.639886 +step:6559 train loss:3.574458 +step:6560 train loss:3.557035 +step:6561 train loss:3.519251 +step:6562 train loss:3.538913 +step:6563 train loss:3.617223 +step:6564 train loss:3.563534 +step:6565 train loss:3.595722 +step:6566 train loss:3.620464 +step:6567 train loss:3.617624 +step:6568 train loss:3.579092 +step:6569 train loss:3.578842 +step:6570 train loss:3.545468 +step:6571 train loss:3.556005 +step:6572 train loss:3.561521 +step:6573 train loss:3.558920 +step:6574 train loss:3.538187 +step:6575 train loss:3.541749 +step:6576 train loss:3.540484 +step:6577 train loss:3.589931 +step:6578 train loss:3.540306 +step:6579 train loss:3.595548 +step:6580 train loss:3.548695 +step:6581 train loss:3.505810 +step:6582 train loss:3.517477 +step:6583 train loss:3.539852 +step:6584 train loss:3.578769 +step:6585 train loss:3.620913 +step:6586 train loss:3.593092 +step:6587 train loss:3.568576 +step:6588 train loss:3.529088 +step:6589 train loss:3.506467 +step:6590 train loss:3.530365 +step:6591 train loss:3.502167 +step:6592 train loss:3.516373 +step:6593 train loss:3.492710 +step:6594 train loss:3.510225 +step:6595 train loss:3.536219 +step:6596 train loss:3.488111 +step:6597 train loss:3.524724 +step:6598 train loss:3.534562 +step:6599 train loss:3.479319 +step:6600 train loss:3.426109 +step:6601 train loss:3.475519 +step:6602 train loss:3.493176 +step:6603 train loss:3.471725 +step:6604 train loss:3.466577 +step:6605 train loss:3.479013 +step:6606 train loss:3.519748 +step:6607 train loss:3.465899 +step:6608 train loss:3.494815 +step:6609 train loss:3.495091 +step:6610 train loss:3.456035 +step:6611 train loss:3.515589 +step:6612 train loss:3.478464 +step:6613 train loss:3.500267 +step:6614 train loss:3.551903 +step:6615 train loss:3.538164 +step:6616 train loss:3.564117 +step:6617 train loss:3.446892 +step:6618 train loss:3.486990 +step:6619 train loss:3.549867 +step:6620 train loss:3.518789 +step:6621 train loss:3.509535 +step:6622 train loss:3.524019 +step:6623 train loss:3.493984 +step:6624 train loss:3.525612 +step:6625 train loss:3.526446 +step:6626 train loss:3.534271 +step:6627 train loss:3.531551 +step:6628 train loss:3.567863 +step:6629 train loss:3.565291 +step:6630 train loss:3.490779 +step:6631 train loss:3.423538 +step:6632 train loss:3.585009 +step:6633 train loss:3.481714 +step:6634 train loss:3.526975 +step:6635 train loss:3.485557 +step:6636 train loss:3.485621 +step:6637 train loss:3.531902 +step:6638 train loss:3.549920 +step:6639 train loss:3.519043 +step:6640 train loss:3.517823 +step:6641 train loss:3.467010 +step:6642 train loss:3.502110 +step:6643 train loss:3.487783 +step:6644 train loss:3.521846 +step:6645 train loss:3.576168 +step:6646 train loss:3.440422 +step:6647 train loss:3.520753 +step:6648 train loss:3.500090 +step:6649 train loss:3.503601 +step:6650 train loss:3.536691 +step:6651 train loss:3.572975 +step:6652 train loss:3.534793 +step:6653 train loss:3.520752 +step:6654 train loss:3.466068 +step:6655 train loss:3.533060 +step:6656 train loss:3.489588 +step:6657 train loss:3.545109 +step:6658 train loss:3.500112 +step:6659 train loss:3.484008 +step:6660 train loss:3.514851 +step:6661 train loss:3.477513 +step:6662 train loss:3.533502 +step:6663 train loss:3.518116 +step:6664 train loss:3.505914 +step:6665 train loss:3.514588 +step:6666 train loss:3.491123 +step:6667 train loss:3.519219 +step:6668 train loss:3.507006 +step:6669 train loss:3.530301 +step:6670 train loss:3.562479 +step:6671 train loss:3.509253 +step:6672 train loss:3.524125 +step:6673 train loss:3.481507 +step:6674 train loss:3.495423 +step:6675 train loss:3.541144 +step:6676 train loss:3.489035 +step:6677 train loss:3.467532 +step:6678 train loss:3.458226 +step:6679 train loss:3.541234 +step:6680 train loss:3.520239 +step:6681 train loss:3.523302 +step:6682 train loss:3.521801 +step:6683 train loss:3.460423 +step:6684 train loss:3.510447 +step:6685 train loss:3.580191 +step:6686 train loss:3.470200 +step:6687 train loss:3.536554 +step:6688 train loss:3.530424 +step:6689 train loss:3.480060 +step:6690 train loss:3.562756 +step:6691 train loss:3.508621 +step:6692 train loss:3.522220 +step:6693 train loss:3.533968 +step:6694 train loss:3.586396 +step:6695 train loss:3.504948 +step:6696 train loss:3.512456 +step:6697 train loss:3.549116 +step:6698 train loss:3.532135 +step:6699 train loss:3.522114 +step:6700 train loss:3.463738 +step:6701 train loss:3.466419 +step:6702 train loss:3.509833 +step:6703 train loss:3.531051 +step:6704 train loss:3.548731 +step:6705 train loss:3.503730 +step:6706 train loss:3.552724 +step:6707 train loss:3.572712 +step:6708 train loss:3.517133 +step:6709 train loss:3.577015 +step:6710 train loss:3.488310 +step:6711 train loss:3.484975 +step:6712 train loss:3.503440 +step:6713 train loss:3.509351 +step:6714 train loss:3.510100 +step:6715 train loss:3.534057 +step:6716 train loss:3.514483 +step:6717 train loss:3.601744 +step:6718 train loss:3.529544 +step:6719 train loss:3.542280 +step:6720 train loss:3.602087 +step:6721 train loss:3.546481 +step:6722 train loss:3.484630 +step:6723 train loss:3.465698 +step:6724 train loss:3.496296 +step:6725 train loss:3.506794 +step:6726 train loss:3.494951 +step:6727 train loss:3.545751 +step:6728 train loss:3.472771 +step:6729 train loss:3.551739 +step:6730 train loss:3.534362 +step:6731 train loss:3.508481 +step:6732 train loss:3.644891 +step:6733 train loss:3.575398 +step:6734 train loss:3.535356 +step:6735 train loss:3.611576 +step:6736 train loss:3.503711 +step:6737 train loss:3.583082 +step:6738 train loss:3.524111 +step:6739 train loss:3.549175 +step:6740 train loss:3.494205 +step:6741 train loss:3.539494 +step:6742 train loss:3.534559 +step:6743 train loss:3.464076 +step:6744 train loss:3.566598 +step:6745 train loss:3.570879 +step:6746 train loss:3.530865 +step:6747 train loss:3.596804 +step:6748 train loss:3.706680 +step:6749 train loss:3.655221 +step:6750 validation loss:3.489268 +step:6750 train loss:3.548014 +step:6751 train loss:3.489630 +step:6752 train loss:3.528117 +step:6753 train loss:3.502178 +step:6754 train loss:3.558712 +step:6755 train loss:3.576592 +step:6756 train loss:3.513539 +step:6757 train loss:3.594332 +step:6758 train loss:3.480211 +step:6759 train loss:3.509910 +step:6760 train loss:3.482211 +step:6761 train loss:3.501674 +step:6762 train loss:3.566936 +step:6763 train loss:3.500707 +step:6764 train loss:3.497390 +step:6765 train loss:3.562270 +step:6766 train loss:3.549166 +step:6767 train loss:3.505101 +step:6768 train loss:3.516844 +step:6769 train loss:3.468852 +step:6770 train loss:3.538357 +step:6771 train loss:3.447611 +step:6772 train loss:3.553535 +step:6773 train loss:3.492754 +step:6774 train loss:3.474885 +step:6775 train loss:3.506117 +step:6776 train loss:3.462743 +step:6777 train loss:3.560084 +step:6778 train loss:3.458587 +step:6779 train loss:3.496728 +step:6780 train loss:3.496947 +step:6781 train loss:3.486664 +step:6782 train loss:3.482090 +step:6783 train loss:3.495308 +step:6784 train loss:3.532262 +step:6785 train loss:3.496769 +step:6786 train loss:3.508587 +step:6787 train loss:3.518110 +step:6788 train loss:3.514358 +step:6789 train loss:3.522506 +step:6790 train loss:3.523064 +step:6791 train loss:3.584569 +step:6792 train loss:3.524837 +step:6793 train loss:3.572892 +step:6794 train loss:3.535283 +step:6795 train loss:3.463918 +step:6796 train loss:3.531635 +step:6797 train loss:3.513960 +step:6798 train loss:3.529166 +step:6799 train loss:3.537784 +step:6800 train loss:3.531879 +step:6801 train loss:3.496319 +step:6802 train loss:3.535266 +step:6803 train loss:3.571503 +step:6804 train loss:3.518632 +step:6805 train loss:3.509422 +step:6806 train loss:3.543397 +step:6807 train loss:3.559035 +step:6808 train loss:3.506918 +step:6809 train loss:3.480302 +step:6810 train loss:3.517763 +step:6811 train loss:3.503235 +step:6812 train loss:3.455052 +step:6813 train loss:3.487915 +step:6814 train loss:3.450603 +step:6815 train loss:3.473543 +step:6816 train loss:3.533895 +step:6817 train loss:3.509969 +step:6818 train loss:3.515722 +step:6819 train loss:3.571907 +step:6820 train loss:3.550537 +step:6821 train loss:3.653183 +step:6822 train loss:3.498451 +step:6823 train loss:3.543205 +step:6824 train loss:3.518242 +step:6825 train loss:3.550955 +step:6826 train loss:3.467047 +step:6827 train loss:3.575427 +step:6828 train loss:3.503350 +step:6829 train loss:3.483199 +step:6830 train loss:3.490552 +step:6831 train loss:3.457359 +step:6832 train loss:3.513974 +step:6833 train loss:3.468775 +step:6834 train loss:3.542424 +step:6835 train loss:3.445347 +step:6836 train loss:3.530437 +step:6837 train loss:3.508669 +step:6838 train loss:3.535230 +step:6839 train loss:3.527636 +step:6840 train loss:3.540699 +step:6841 train loss:3.528646 +step:6842 train loss:3.465801 +step:6843 train loss:3.496625 +step:6844 train loss:3.507423 +step:6845 train loss:3.556883 +step:6846 train loss:3.495359 +step:6847 train loss:3.481239 +step:6848 train loss:3.600899 +step:6849 train loss:3.491933 +step:6850 train loss:3.550389 +step:6851 train loss:3.515713 +step:6852 train loss:3.500671 +step:6853 train loss:3.503403 +step:6854 train loss:3.509993 +step:6855 train loss:3.489215 +step:6856 train loss:3.519737 +step:6857 train loss:3.530423 +step:6858 train loss:3.449799 +step:6859 train loss:3.432138 +step:6860 train loss:3.484724 +step:6861 train loss:3.541196 +step:6862 train loss:3.484012 +step:6863 train loss:3.490765 +step:6864 train loss:3.546933 +step:6865 train loss:3.540578 +step:6866 train loss:3.522750 +step:6867 train loss:3.483012 +step:6868 train loss:3.570230 +step:6869 train loss:3.536409 +step:6870 train loss:3.557578 +step:6871 train loss:3.481062 +step:6872 train loss:3.485985 +step:6873 train loss:3.526905 +step:6874 train loss:3.468413 +step:6875 train loss:3.514639 +step:6876 train loss:3.487618 +step:6877 train loss:3.520786 +step:6878 train loss:3.483789 +step:6879 train loss:3.487970 +step:6880 train loss:3.458744 +step:6881 train loss:3.459149 +step:6882 train loss:3.459147 +step:6883 train loss:3.494301 +step:6884 train loss:3.491891 +step:6885 train loss:3.505156 +step:6886 train loss:3.419160 +step:6887 train loss:3.479795 +step:6888 train loss:3.528611 +step:6889 train loss:3.499824 +step:6890 train loss:3.532533 +step:6891 train loss:3.530511 +step:6892 train loss:3.536409 +step:6893 train loss:3.479209 +step:6894 train loss:3.509938 +step:6895 train loss:3.513247 +step:6896 train loss:3.487089 +step:6897 train loss:3.500291 +step:6898 train loss:3.536490 +step:6899 train loss:3.489955 +step:6900 train loss:3.496479 +step:6901 train loss:3.465908 +step:6902 train loss:3.520505 +step:6903 train loss:3.537540 +step:6904 train loss:3.542062 +step:6905 train loss:3.552569 +step:6906 train loss:3.606489 +step:6907 train loss:3.525464 +step:6908 train loss:3.532593 +step:6909 train loss:3.490347 +step:6910 train loss:3.460638 +step:6911 train loss:3.529261 +step:6912 train loss:3.469849 +step:6913 train loss:3.530033 +step:6914 train loss:3.460180 +step:6915 train loss:3.506100 +step:6916 train loss:3.509227 +step:6917 train loss:3.505948 +step:6918 train loss:3.507831 +step:6919 train loss:3.467334 +step:6920 train loss:3.540480 +step:6921 train loss:3.479722 +step:6922 train loss:3.494282 +step:6923 train loss:3.498787 +step:6924 train loss:3.506710 +step:6925 train loss:3.463216 +step:6926 train loss:3.537446 +step:6927 train loss:3.435690 +step:6928 train loss:3.529119 +step:6929 train loss:3.492100 +step:6930 train loss:3.508064 +step:6931 train loss:3.553318 +step:6932 train loss:3.471931 +step:6933 train loss:3.487156 +step:6934 train loss:3.571115 +step:6935 train loss:3.594912 +step:6936 train loss:3.484832 +step:6937 train loss:3.516147 +step:6938 train loss:3.501660 +step:6939 train loss:3.525872 +step:6940 train loss:3.570479 +step:6941 train loss:3.486463 +step:6942 train loss:3.484424 +step:6943 train loss:3.458822 +step:6944 train loss:3.493228 +step:6945 train loss:3.541543 +step:6946 train loss:3.547834 +step:6947 train loss:3.498884 +step:6948 train loss:3.527063 +step:6949 train loss:3.430064 +step:6950 train loss:3.564837 +step:6951 train loss:3.538347 +step:6952 train loss:3.546708 +step:6953 train loss:3.458883 +step:6954 train loss:3.507870 +step:6955 train loss:3.512178 +step:6956 train loss:3.515304 +step:6957 train loss:3.536650 +step:6958 train loss:3.520356 +step:6959 train loss:3.511322 +step:6960 train loss:3.530431 +step:6961 train loss:3.553963 +step:6962 train loss:3.473411 +step:6963 train loss:3.577722 +step:6964 train loss:3.486780 +step:6965 train loss:3.504988 +step:6966 train loss:3.484703 +step:6967 train loss:3.511854 +step:6968 train loss:3.527175 +step:6969 train loss:3.541276 +step:6970 train loss:3.517277 +step:6971 train loss:3.496863 +step:6972 train loss:3.495072 +step:6973 train loss:3.585097 +step:6974 train loss:3.481261 +step:6975 train loss:3.495736 +step:6976 train loss:3.515879 +step:6977 train loss:3.500507 +step:6978 train loss:3.521563 +step:6979 train loss:3.490914 +step:6980 train loss:3.528075 +step:6981 train loss:3.472060 +step:6982 train loss:3.462677 +step:6983 train loss:3.457220 +step:6984 train loss:3.542200 +step:6985 train loss:3.487655 +step:6986 train loss:3.487784 +step:6987 train loss:3.486319 +step:6988 train loss:3.559775 +step:6989 train loss:3.469181 +step:6990 train loss:3.442193 +step:6991 train loss:3.470718 +step:6992 train loss:3.474544 +step:6993 train loss:3.487418 +step:6994 train loss:3.525194 +step:6995 train loss:3.543241 +step:6996 train loss:3.459598 +step:6997 train loss:3.558249 +step:6998 train loss:3.507207 +step:6999 train loss:3.550581 +step:7000 validation loss:3.480392 total_sharp:1.4613e-02 L1_sharp:2.1303e-02 L2_sharp:7.6594e-02 L3_sharp:7.1907e-03 L4_sharp:4.0909e-03 L5_sharp:2.4367e-03 L6_sharp:1.5713e-03 L7_sharp:1.1377e-03 L8_sharp:1.0645e-03 L9_sharp:8.9405e-04 L10_sharp:4.7335e-04 L11_sharp:5.5161e-04 L12_sharp:6.3003e-04 total_fnorm:2.2508e+00 total_l1_linf:1.8936e+04 total_spectral:2.2508e+00 L1_fnorm:5.1062e-01 L2_fnorm:3.1093e-01 L3_fnorm:3.3396e-01 L4_fnorm:4.1070e-01 L5_fnorm:5.0274e-01 L6_fnorm:5.4183e-01 L7_fnorm:5.9460e-01 L8_fnorm:5.9784e-01 L9_fnorm:5.9578e-01 L10_fnorm:5.9573e-01 L11_fnorm:5.7283e-01 L12_fnorm:5.9857e-01 L1_l1linf:3.5937e-01 L2_l1linf:4.4538e-01 L3_l1linf:4.5454e-01 L4_l1linf:5.6804e-01 L5_l1linf:4.4572e-01 L6_l1linf:4.1083e-01 L7_l1linf:4.0005e-01 L8_l1linf:4.0210e-01 L9_l1linf:3.9958e-01 L10_l1linf:4.3591e-01 L11_l1linf:4.6955e-01 L12_l1linf:4.5909e-01 L1_spectral:1.2041e-02 L2_spectral:1.2030e-02 L3_spectral:1.2636e-02 L4_spectral:2.1638e-02 L5_spectral:1.3914e-02 L6_spectral:1.2047e-02 L7_spectral:1.2045e-02 L8_spectral:1.2048e-02 L9_spectral:1.2048e-02 L10_spectral:1.2057e-02 L11_spectral:1.2042e-02 L12_spectral:1.2045e-02 v_norm:2.2508e+00 cos_v_-g_hvp:1.5823e-02 g_hvp_norm:1.0599e+00 cos_v_-g_t:2.7034e-02 g_t_norm:8.8655e-01 hv_norm:3.5079e+01 cos_v_hv:9.3765e-04 hg_norm:5.3418e+05 cos_g_hg:4.0204e-01 v_par:3.7889e-03 v_perp:2.2508e+00 L1_cos_v_neg_g:3.0920e-03 L1_v_norm:5.1062e-01 L2_cos_v_neg_g:1.3824e-02 L2_v_norm:3.1093e-01 L3_cos_v_neg_g:2.7058e-02 L3_v_norm:3.3396e-01 L4_cos_v_neg_g:2.5928e-02 L4_v_norm:4.1070e-01 L5_cos_v_neg_g:2.7323e-02 L5_v_norm:5.0274e-01 L6_cos_v_neg_g:2.7355e-02 L6_v_norm:5.4183e-01 L7_cos_v_neg_g:2.6670e-02 L7_v_norm:5.9460e-01 L8_cos_v_neg_g:2.7489e-02 L8_v_norm:5.9784e-01 L9_cos_v_neg_g:2.8189e-02 L9_v_norm:5.9578e-01 L10_cos_v_neg_g:3.0874e-02 L10_v_norm:5.9573e-01 L11_cos_v_neg_g:4.0210e-02 L11_v_norm:5.7283e-01 L12_cos_v_neg_g:7.4909e-02 L12_v_norm:5.9857e-01 +step:7000 train loss:3.476339 +step:7001 train loss:3.527956 +step:7002 train loss:3.459476 +step:7003 train loss:3.511480 +step:7004 train loss:3.455380 +step:7005 train loss:3.529711 +step:7006 train loss:3.487973 +step:7007 train loss:3.512504 +step:7008 train loss:3.464608 +step:7009 train loss:3.546562 +step:7010 train loss:3.501772 +step:7011 train loss:3.511117 +step:7012 train loss:3.492221 +step:7013 train loss:3.499020 +step:7014 train loss:3.499463 +step:7015 train loss:3.487399 +step:7016 train loss:3.535371 +step:7017 train loss:3.441035 +step:7018 train loss:3.559801 +step:7019 train loss:3.509047 +step:7020 train loss:3.558559 +step:7021 train loss:3.492114 +step:7022 train loss:3.504560 +step:7023 train loss:3.509242 +step:7024 train loss:3.524987 +step:7025 train loss:3.514268 +step:7026 train loss:3.545320 +step:7027 train loss:3.520845 +step:7028 train loss:3.553997 +step:7029 train loss:3.564575 +step:7030 train loss:3.609918 +step:7031 train loss:3.482692 +step:7032 train loss:3.503547 +step:7033 train loss:3.529212 +step:7034 train loss:3.507961 +step:7035 train loss:3.503796 +step:7036 train loss:3.484318 +step:7037 train loss:3.542989 +step:7038 train loss:3.501473 +step:7039 train loss:3.577750 +step:7040 train loss:3.480522 +step:7041 train loss:3.500135 +step:7042 train loss:3.479408 +step:7043 train loss:3.457780 +step:7044 train loss:3.498428 +step:7045 train loss:3.519860 +step:7046 train loss:3.500334 +step:7047 train loss:3.454435 +step:7048 train loss:3.542745 +step:7049 train loss:3.520329 +step:7050 train loss:3.536590 +step:7051 train loss:3.541621 +step:7052 train loss:3.483002 +step:7053 train loss:3.475329 +step:7054 train loss:3.625567 +step:7055 train loss:3.508857 +step:7056 train loss:3.519911 +step:7057 train loss:3.421095 +step:7058 train loss:3.545528 +step:7059 train loss:3.494803 +step:7060 train loss:3.494283 +step:7061 train loss:3.483466 +step:7062 train loss:3.586620 +step:7063 train loss:3.487906 +step:7064 train loss:3.561879 +step:7065 train loss:3.495277 +step:7066 train loss:3.482530 +step:7067 train loss:3.489304 +step:7068 train loss:3.519843 +step:7069 train loss:3.529233 +step:7070 train loss:3.524736 +step:7071 train loss:3.577935 +step:7072 train loss:3.513776 +step:7073 train loss:3.517084 +step:7074 train loss:3.501384 +step:7075 train loss:3.615869 +step:7076 train loss:3.467914 +step:7077 train loss:3.561812 +step:7078 train loss:3.479675 +step:7079 train loss:3.531334 +step:7080 train loss:3.532898 +step:7081 train loss:3.506803 +step:7082 train loss:3.495734 +step:7083 train loss:3.522609 +step:7084 train loss:3.518520 +step:7085 train loss:3.502396 +step:7086 train loss:3.488925 +step:7087 train loss:3.519947 +step:7088 train loss:3.502205 +step:7089 train loss:3.529739 +step:7090 train loss:3.510991 +step:7091 train loss:3.549296 +step:7092 train loss:3.495153 +step:7093 train loss:3.515820 +step:7094 train loss:3.529404 +step:7095 train loss:3.570606 +step:7096 train loss:3.498764 +step:7097 train loss:3.552128 +step:7098 train loss:3.528153 +step:7099 train loss:3.558184 +step:7100 train loss:3.523370 +step:7101 train loss:3.547792 +step:7102 train loss:3.533640 +step:7103 train loss:3.541650 +step:7104 train loss:3.548181 +step:7105 train loss:3.548365 +step:7106 train loss:3.490720 +step:7107 train loss:3.489576 +step:7108 train loss:3.553760 +step:7109 train loss:3.636612 +step:7110 train loss:3.542943 +step:7111 train loss:3.547770 +step:7112 train loss:3.602094 +step:7113 train loss:3.600964 +step:7114 train loss:3.669002 +step:7115 train loss:3.589397 +step:7116 train loss:3.556156 +step:7117 train loss:3.487206 +step:7118 train loss:3.551758 +step:7119 train loss:3.505730 +step:7120 train loss:3.557488 +step:7121 train loss:3.556525 +step:7122 train loss:3.652366 +step:7123 train loss:3.547870 +step:7124 train loss:3.595412 +step:7125 train loss:3.534699 +step:7126 train loss:3.472823 +step:7127 train loss:3.459437 +step:7128 train loss:3.525868 +step:7129 train loss:3.498984 +step:7130 train loss:3.556175 +step:7131 train loss:3.544992 +step:7132 train loss:3.562336 +step:7133 train loss:3.509882 +step:7134 train loss:3.534143 +step:7135 train loss:3.555757 +step:7136 train loss:3.525968 +step:7137 train loss:3.501211 +step:7138 train loss:3.516236 +step:7139 train loss:3.510827 +step:7140 train loss:3.592324 +step:7141 train loss:3.517160 +step:7142 train loss:3.581242 +step:7143 train loss:3.479905 +step:7144 train loss:3.526946 +step:7145 train loss:3.589322 +step:7146 train loss:3.541594 +step:7147 train loss:3.562522 +step:7148 train loss:3.514225 +step:7149 train loss:3.563597 +step:7150 train loss:3.475224 +step:7151 train loss:3.512821 +step:7152 train loss:3.465512 +step:7153 train loss:3.538038 +step:7154 train loss:3.515309 +step:7155 train loss:3.533155 +step:7156 train loss:3.490257 +step:7157 train loss:3.573789 +step:7158 train loss:3.492358 +step:7159 train loss:3.523026 +step:7160 train loss:3.531039 +step:7161 train loss:3.516803 +step:7162 train loss:3.549999 +step:7163 train loss:3.520396 +step:7164 train loss:3.501215 +step:7165 train loss:3.528588 +step:7166 train loss:3.483086 +step:7167 train loss:3.575077 +step:7168 train loss:3.518049 +step:7169 train loss:3.593903 +step:7170 train loss:3.516460 +step:7171 train loss:3.553088 +step:7172 train loss:3.554452 +step:7173 train loss:3.587248 +step:7174 train loss:3.532053 +step:7175 train loss:3.583560 +step:7176 train loss:3.539094 +step:7177 train loss:3.481060 +step:7178 train loss:3.530786 +step:7179 train loss:3.583254 +step:7180 train loss:3.553166 +step:7181 train loss:3.560733 +step:7182 train loss:3.524129 +step:7183 train loss:3.568208 +step:7184 train loss:3.509147 +step:7185 train loss:3.512956 +step:7186 train loss:3.519245 +step:7187 train loss:3.610829 +step:7188 train loss:3.533824 +step:7189 train loss:3.515749 +step:7190 train loss:3.530102 +step:7191 train loss:3.495642 +step:7192 train loss:3.579636 +step:7193 train loss:3.631504 +step:7194 train loss:3.561004 +step:7195 train loss:3.595613 +step:7196 train loss:3.506033 +step:7197 train loss:3.517050 +step:7198 train loss:3.478757 +step:7199 train loss:3.496630 +step:7200 train loss:3.474058 +step:7201 train loss:3.488139 +step:7202 train loss:3.482867 +step:7203 train loss:3.538733 +step:7204 train loss:3.535018 +step:7205 train loss:3.554571 +step:7206 train loss:3.670954 +step:7207 train loss:3.481081 +step:7208 train loss:3.617012 +step:7209 train loss:3.623573 +step:7210 train loss:3.629637 +step:7211 train loss:3.660240 +step:7212 train loss:3.523977 +step:7213 train loss:3.512737 +step:7214 train loss:3.552373 +step:7215 train loss:3.516245 +step:7216 train loss:3.583762 +step:7217 train loss:3.509465 +step:7218 train loss:3.531669 +step:7219 train loss:3.545539 +step:7220 train loss:3.582363 +step:7221 train loss:3.532517 +step:7222 train loss:3.515843 +step:7223 train loss:3.567459 +step:7224 train loss:3.523827 +step:7225 train loss:3.561953 +step:7226 train loss:3.499247 +step:7227 train loss:3.490365 +step:7228 train loss:3.535488 +step:7229 train loss:3.547967 +step:7230 train loss:3.529119 +step:7231 train loss:3.536547 +step:7232 train loss:3.568230 +step:7233 train loss:3.518990 +step:7234 train loss:3.542815 +step:7235 train loss:3.579387 +step:7236 train loss:3.530387 +step:7237 train loss:3.573294 +step:7238 train loss:3.533740 +step:7239 train loss:3.583134 +step:7240 train loss:3.554088 +step:7241 train loss:3.601443 +step:7242 train loss:3.540705 +step:7243 train loss:3.535163 +step:7244 train loss:3.606325 +step:7245 train loss:3.542101 +step:7246 train loss:3.688964 +step:7247 train loss:3.553119 +step:7248 train loss:3.625559 +step:7249 train loss:3.573272 +step:7250 validation loss:3.560539 +step:7250 train loss:3.632988 +step:7251 train loss:3.623001 +step:7252 train loss:3.663197 +step:7253 train loss:3.547115 +step:7254 train loss:3.568072 +step:7255 train loss:3.609206 +step:7256 train loss:3.527395 +step:7257 train loss:3.617257 +step:7258 train loss:3.624396 +step:7259 train loss:3.567720 +step:7260 train loss:3.710452 +step:7261 train loss:3.563392 +step:7262 train loss:3.517686 +step:7263 train loss:3.567338 +step:7264 train loss:3.590816 +step:7265 train loss:3.606806 +step:7266 train loss:3.554038 +step:7267 train loss:3.565342 +step:7268 train loss:3.506743 +step:7269 train loss:3.544447 +step:7270 train loss:3.570068 +step:7271 train loss:3.518804 +step:7272 train loss:3.518803 +step:7273 train loss:3.545450 +step:7274 train loss:3.543552 +step:7275 train loss:3.548028 +step:7276 train loss:3.575673 +step:7277 train loss:3.569431 +step:7278 train loss:3.541495 +step:7279 train loss:3.581157 +step:7280 train loss:3.613320 +step:7281 train loss:3.561099 +step:7282 train loss:3.542680 +step:7283 train loss:3.493056 +step:7284 train loss:3.522930 +step:7285 train loss:3.556144 +step:7286 train loss:3.512318 +step:7287 train loss:3.538161 +step:7288 train loss:3.535546 +step:7289 train loss:3.502389 +step:7290 train loss:3.514058 +step:7291 train loss:3.581739 +step:7292 train loss:3.614797 +step:7293 train loss:3.626152 +step:7294 train loss:3.621089 +step:7295 train loss:3.504908 +step:7296 train loss:3.514479 +step:7297 train loss:3.530697 +step:7298 train loss:3.551006 +step:7299 train loss:3.560103 +step:7300 train loss:3.602264 +step:7301 train loss:3.532237 +step:7302 train loss:3.495381 +step:7303 train loss:3.500606 +step:7304 train loss:3.497677 +step:7305 train loss:3.515296 +step:7306 train loss:3.506935 +step:7307 train loss:3.528282 +step:7308 train loss:3.524839 +step:7309 train loss:3.540562 +step:7310 train loss:3.519144 +step:7311 train loss:3.507422 +step:7312 train loss:3.597872 +step:7313 train loss:3.587826 +step:7314 train loss:3.537584 +step:7315 train loss:3.578907 +step:7316 train loss:3.571632 +step:7317 train loss:3.619561 +step:7318 train loss:3.587105 +step:7319 train loss:3.531133 +step:7320 train loss:3.511026 +step:7321 train loss:3.602539 +step:7322 train loss:3.553557 +step:7323 train loss:3.530607 +step:7324 train loss:3.571635 +step:7325 train loss:3.498901 +step:7326 train loss:3.577434 +step:7327 train loss:3.558897 +step:7328 train loss:3.565599 +step:7329 train loss:3.555206 +step:7330 train loss:3.577360 +step:7331 train loss:3.578848 +step:7332 train loss:3.541062 +step:7333 train loss:3.536291 +step:7334 train loss:3.520808 +step:7335 train loss:3.552372 +step:7336 train loss:3.522437 +step:7337 train loss:3.517474 +step:7338 train loss:3.519747 +step:7339 train loss:3.537076 +step:7340 train loss:3.521834 +step:7341 train loss:3.534188 +step:7342 train loss:3.507953 +step:7343 train loss:3.566291 +step:7344 train loss:3.528238 +step:7345 train loss:3.522617 +step:7346 train loss:3.583368 +step:7347 train loss:3.542164 +step:7348 train loss:3.527249 +step:7349 train loss:3.505185 +step:7350 train loss:3.572845 +step:7351 train loss:3.568320 +step:7352 train loss:3.621860 +step:7353 train loss:3.578500 +step:7354 train loss:3.555672 +step:7355 train loss:3.579180 +step:7356 train loss:3.571450 +step:7357 train loss:3.539430 +step:7358 train loss:3.533597 +step:7359 train loss:3.509986 +step:7360 train loss:3.545650 +step:7361 train loss:3.534678 +step:7362 train loss:3.537401 +step:7363 train loss:3.688293 +step:7364 train loss:3.510078 +step:7365 train loss:3.572636 +step:7366 train loss:3.546191 +step:7367 train loss:3.537273 +step:7368 train loss:3.521835 +step:7369 train loss:3.570428 +step:7370 train loss:3.497289 +step:7371 train loss:3.564003 +step:7372 train loss:3.552866 +step:7373 train loss:3.541976 +step:7374 train loss:3.587285 +step:7375 train loss:3.519763 +step:7376 train loss:3.546989 +step:7377 train loss:3.555642 +step:7378 train loss:3.524185 +step:7379 train loss:3.566571 +step:7380 train loss:3.615442 +step:7381 train loss:3.520147 +step:7382 train loss:3.561019 +step:7383 train loss:3.532442 +step:7384 train loss:3.561393 +step:7385 train loss:3.522000 +step:7386 train loss:3.535903 +step:7387 train loss:3.563453 +step:7388 train loss:3.573386 +step:7389 train loss:3.587865 +step:7390 train loss:3.673312 +step:7391 train loss:3.652410 +step:7392 train loss:3.527839 +step:7393 train loss:3.476386 +step:7394 train loss:3.550264 +step:7395 train loss:3.499310 +step:7396 train loss:3.466098 +step:7397 train loss:3.493837 +step:7398 train loss:3.562385 +step:7399 train loss:3.541506 +step:7400 train loss:3.569783 +step:7401 train loss:3.532529 +step:7402 train loss:3.532387 +step:7403 train loss:3.566720 +step:7404 train loss:3.577264 +step:7405 train loss:3.565137 +step:7406 train loss:3.586314 +step:7407 train loss:3.634360 +step:7408 train loss:3.589351 +step:7409 train loss:3.539209 +step:7410 train loss:3.561324 +step:7411 train loss:3.576807 +step:7412 train loss:3.569985 +step:7413 train loss:3.613196 +step:7414 train loss:3.538360 +step:7415 train loss:3.654620 +step:7416 train loss:3.561871 +step:7417 train loss:3.550578 +step:7418 train loss:3.515199 +step:7419 train loss:3.495530 +step:7420 train loss:3.540812 +step:7421 train loss:3.522553 +step:7422 train loss:3.542695 +step:7423 train loss:3.588761 +step:7424 train loss:3.601359 +step:7425 train loss:3.514267 +step:7426 train loss:3.546712 +step:7427 train loss:3.550031 +step:7428 train loss:3.560537 +step:7429 train loss:3.520514 +step:7430 train loss:3.554681 +step:7431 train loss:3.510572 +step:7432 train loss:3.567899 +step:7433 train loss:3.528606 +step:7434 train loss:3.564803 +step:7435 train loss:3.593411 +step:7436 train loss:3.540387 +step:7437 train loss:3.529309 +step:7438 train loss:3.573761 +step:7439 train loss:3.526010 +step:7440 train loss:3.639596 +step:7441 train loss:3.582462 +step:7442 train loss:3.569612 +step:7443 train loss:3.539989 +step:7444 train loss:3.581708 +step:7445 train loss:3.521874 +step:7446 train loss:3.536629 +step:7447 train loss:3.527035 +step:7448 train loss:3.575775 +step:7449 train loss:3.555572 +step:7450 train loss:3.537202 +step:7451 train loss:3.583277 +step:7452 train loss:3.504713 +step:7453 train loss:3.499713 +step:7454 train loss:3.504471 +step:7455 train loss:3.577573 +step:7456 train loss:3.546922 +step:7457 train loss:3.517948 +step:7458 train loss:3.538130 +step:7459 train loss:3.538117 +step:7460 train loss:3.634491 +step:7461 train loss:3.587925 +step:7462 train loss:3.530798 +step:7463 train loss:3.551365 +step:7464 train loss:3.562565 +step:7465 train loss:3.573187 +step:7466 train loss:3.573548 +step:7467 train loss:3.621311 +step:7468 train loss:3.562027 +step:7469 train loss:3.581098 +step:7470 train loss:3.561093 +step:7471 train loss:3.511435 +step:7472 train loss:3.473058 +step:7473 train loss:3.499747 +step:7474 train loss:3.551315 +step:7475 train loss:3.584339 +step:7476 train loss:3.544573 +step:7477 train loss:3.612028 +step:7478 train loss:3.530481 +step:7479 train loss:3.549897 +step:7480 train loss:3.582638 +step:7481 train loss:3.502512 +step:7482 train loss:3.544394 +step:7483 train loss:3.611683 +step:7484 train loss:3.515148 +step:7485 train loss:3.546982 +step:7486 train loss:3.466491 +step:7487 train loss:3.444800 +step:7488 train loss:3.564432 +step:7489 train loss:3.592571 +step:7490 train loss:3.564268 +step:7491 train loss:3.541945 +step:7492 train loss:3.570583 +step:7493 train loss:3.508497 +step:7494 train loss:3.571781 +step:7495 train loss:3.555167 +step:7496 train loss:3.562719 +step:7497 train loss:3.649871 +step:7498 train loss:3.621567 +step:7499 train loss:3.579115 +step:7500 validation loss:3.497735 total_sharp:5.0914e-01 L1_sharp:3.9088e+00 L2_sharp:2.3543e+00 L3_sharp:9.7378e-02 L4_sharp:1.4034e-02 L5_sharp:3.0398e-03 L6_sharp:1.8421e-03 L7_sharp:1.7086e-03 L8_sharp:1.2907e-03 L9_sharp:9.5569e-04 L10_sharp:5.8618e-04 L11_sharp:6.2871e-04 L12_sharp:6.3023e-04 total_fnorm:2.2245e+00 total_l1_linf:1.8608e+04 total_spectral:2.2245e+00 L1_fnorm:4.7333e-01 L2_fnorm:2.7669e-01 L3_fnorm:2.8227e-01 L4_fnorm:3.9661e-01 L5_fnorm:4.6902e-01 L6_fnorm:5.2959e-01 L7_fnorm:5.8438e-01 L8_fnorm:5.9023e-01 L9_fnorm:5.8944e-01 L10_fnorm:5.9238e-01 L11_fnorm:5.6505e-01 L12_fnorm:5.9809e-01 L1_l1linf:4.3177e-01 L2_l1linf:6.0079e-01 L3_l1linf:5.3103e-01 L4_l1linf:5.7676e-01 L5_l1linf:4.8439e-01 L6_l1linf:4.5886e-01 L7_l1linf:4.3570e-01 L8_l1linf:4.1356e-01 L9_l1linf:4.4924e-01 L10_l1linf:4.7503e-01 L11_l1linf:4.8675e-01 L12_l1linf:5.0216e-01 L1_spectral:1.2042e-02 L2_spectral:1.3262e-02 L3_spectral:1.4009e-02 L4_spectral:2.1566e-02 L5_spectral:1.4654e-02 L6_spectral:1.2038e-02 L7_spectral:1.2044e-02 L8_spectral:1.2048e-02 L9_spectral:1.2043e-02 L10_spectral:1.2050e-02 L11_spectral:1.2043e-02 L12_spectral:1.2046e-02 v_norm:2.2245e+00 cos_v_-g_hvp:1.9323e-02 g_hvp_norm:1.0547e+00 cos_v_-g_t:2.2833e-02 g_t_norm:1.1361e+00 hv_norm:8.9997e+02 cos_v_hv:1.2585e-03 hg_norm:1.0591e+06 cos_g_hg:3.6115e-02 v_par:3.8923e-03 v_perp:2.2245e+00 L1_cos_v_neg_g:6.7412e-03 L1_v_norm:4.7333e-01 L2_cos_v_neg_g:1.8178e-02 L2_v_norm:2.7669e-01 L3_cos_v_neg_g:3.5342e-02 L3_v_norm:2.8227e-01 L4_cos_v_neg_g:2.6536e-02 L4_v_norm:3.9661e-01 L5_cos_v_neg_g:2.6016e-02 L5_v_norm:4.6902e-01 L6_cos_v_neg_g:2.7615e-02 L6_v_norm:5.2959e-01 L7_cos_v_neg_g:2.5951e-02 L7_v_norm:5.8438e-01 L8_cos_v_neg_g:2.6849e-02 L8_v_norm:5.9023e-01 L9_cos_v_neg_g:2.7659e-02 L9_v_norm:5.8944e-01 L10_cos_v_neg_g:3.0595e-02 L10_v_norm:5.9238e-01 L11_cos_v_neg_g:4.0101e-02 L11_v_norm:5.6505e-01 L12_cos_v_neg_g:7.2932e-02 L12_v_norm:5.9809e-01 +step:7500 train loss:3.576602 +step:7501 train loss:3.574454 +step:7502 train loss:3.575978 +step:7503 train loss:3.575194 +step:7504 train loss:3.533789 +step:7505 train loss:3.535096 +step:7506 train loss:3.525907 +step:7507 train loss:3.550008 +step:7508 train loss:3.568197 +step:7509 train loss:3.572943 +step:7510 train loss:3.546645 +step:7511 train loss:3.628384 +step:7512 train loss:3.559550 +step:7513 train loss:3.608582 +step:7514 train loss:3.532227 +step:7515 train loss:3.486054 +step:7516 train loss:3.495827 +step:7517 train loss:3.562709 +step:7518 train loss:3.549290 +step:7519 train loss:3.569649 +step:7520 train loss:3.529594 +step:7521 train loss:3.537684 +step:7522 train loss:3.535738 +step:7523 train loss:3.534443 +step:7524 train loss:3.585733 +step:7525 train loss:3.568491 +step:7526 train loss:3.545415 +step:7527 train loss:3.557991 +step:7528 train loss:3.599460 +step:7529 train loss:3.560801 +step:7530 train loss:3.508460 +step:7531 train loss:3.626671 +step:7532 train loss:3.561896 +step:7533 train loss:3.610655 +step:7534 train loss:3.626372 +step:7535 train loss:3.543169 +step:7536 train loss:3.541102 +step:7537 train loss:3.578221 +step:7538 train loss:3.554309 +step:7539 train loss:3.579479 +step:7540 train loss:3.554300 +step:7541 train loss:3.536659 +step:7542 train loss:3.585349 +step:7543 train loss:3.537507 +step:7544 train loss:3.518921 +step:7545 train loss:3.524275 +step:7546 train loss:3.489627 +step:7547 train loss:3.541305 +step:7548 train loss:3.478018 +step:7549 train loss:3.518677 +step:7550 train loss:3.450629 +step:7551 train loss:3.499775 +step:7552 train loss:3.504529 +step:7553 train loss:3.488935 +step:7554 train loss:3.502110 +step:7555 train loss:3.494270 +step:7556 train loss:3.546498 +step:7557 train loss:3.514019 +step:7558 train loss:3.508538 +step:7559 train loss:3.456697 +step:7560 train loss:3.532043 +step:7561 train loss:3.535852 +step:7562 train loss:3.524027 +step:7563 train loss:3.549906 +step:7564 train loss:3.541235 +step:7565 train loss:3.516073 +step:7566 train loss:3.495862 +step:7567 train loss:3.541066 +step:7568 train loss:3.564226 +step:7569 train loss:3.677114 +step:7570 train loss:3.596810 +step:7571 train loss:3.492667 +step:7572 train loss:3.530322 +step:7573 train loss:3.500652 +step:7574 train loss:3.517890 +step:7575 train loss:3.516339 +step:7576 train loss:3.532106 +step:7577 train loss:3.543202 +step:7578 train loss:3.512694 +step:7579 train loss:3.497416 +step:7580 train loss:3.505366 +step:7581 train loss:3.522095 +step:7582 train loss:3.492750 +step:7583 train loss:3.504907 +step:7584 train loss:3.529651 +step:7585 train loss:3.490600 +step:7586 train loss:3.565518 +step:7587 train loss:3.474080 +step:7588 train loss:3.502643 +step:7589 train loss:3.496591 +step:7590 train loss:3.527603 +step:7591 train loss:3.545944 +step:7592 train loss:3.601038 +step:7593 train loss:3.559451 +step:7594 train loss:3.480414 +step:7595 train loss:3.473300 +step:7596 train loss:3.490298 +step:7597 train loss:3.509878 +step:7598 train loss:3.518571 +step:7599 train loss:3.481530 +step:7600 train loss:3.493708 +step:7601 train loss:3.488172 +step:7602 train loss:3.543058 +step:7603 train loss:3.483130 +step:7604 train loss:3.538622 +step:7605 train loss:3.533875 +step:7606 train loss:3.518019 +step:7607 train loss:3.567681 +step:7608 train loss:3.504929 +step:7609 train loss:3.488487 +step:7610 train loss:3.498147 +step:7611 train loss:3.530904 +step:7612 train loss:3.476084 +step:7613 train loss:3.552003 +step:7614 train loss:3.548112 +step:7615 train loss:3.525469 +step:7616 train loss:3.507815 +step:7617 train loss:3.444494 +step:7618 train loss:3.483811 +step:7619 train loss:3.476707 +step:7620 train loss:3.490470 +step:7621 train loss:3.459970 +step:7622 train loss:3.567535 +step:7623 train loss:3.521616 +step:7624 train loss:3.556905 +step:7625 train loss:3.515913 +step:7626 train loss:3.513641 +step:7627 train loss:3.556981 +step:7628 train loss:3.507207 +step:7629 train loss:3.542722 +step:7630 train loss:3.499004 +step:7631 train loss:3.587792 +step:7632 train loss:3.580013 +step:7633 train loss:3.593322 +step:7634 train loss:3.592777 +step:7635 train loss:3.710472 +step:7636 train loss:3.674738 +step:7637 train loss:3.696639 +step:7638 train loss:3.756135 +step:7639 train loss:3.740661 +step:7640 train loss:3.840332 +step:7641 train loss:3.744908 +step:7642 train loss:3.739105 +step:7643 train loss:3.694024 +step:7644 train loss:3.699340 +step:7645 train loss:3.690797 +step:7646 train loss:3.878993 +step:7647 train loss:3.731833 +step:7648 train loss:3.771299 +step:7649 train loss:3.785690 +step:7650 train loss:3.700084 +step:7651 train loss:3.688883 +step:7652 train loss:3.722968 +step:7653 train loss:3.728908 +step:7654 train loss:3.661293 +step:7655 train loss:3.712862 +step:7656 train loss:3.711937 +step:7657 train loss:3.642752 +step:7658 train loss:3.654376 +step:7659 train loss:3.731384 +step:7660 train loss:3.720379 +step:7661 train loss:3.630801 +step:7662 train loss:3.663620 +step:7663 train loss:3.596317 +step:7664 train loss:3.616311 +step:7665 train loss:3.618900 +step:7666 train loss:3.621673 +step:7667 train loss:3.575621 +step:7668 train loss:3.612406 +step:7669 train loss:3.602850 +step:7670 train loss:3.609728 +step:7671 train loss:3.620804 +step:7672 train loss:3.633722 +step:7673 train loss:3.612521 +step:7674 train loss:3.646402 +step:7675 train loss:3.612008 +step:7676 train loss:3.597486 +step:7677 train loss:3.618043 +step:7678 train loss:3.599722 +step:7679 train loss:3.598233 +step:7680 train loss:3.623140 +step:7681 train loss:3.635232 +step:7682 train loss:3.612247 +step:7683 train loss:3.577025 +step:7684 train loss:3.586673 +step:7685 train loss:3.596283 +step:7686 train loss:3.547634 +step:7687 train loss:3.594297 +step:7688 train loss:3.591539 +step:7689 train loss:3.557401 +step:7690 train loss:3.522467 +step:7691 train loss:3.591833 +step:7692 train loss:3.590802 +step:7693 train loss:3.561488 +step:7694 train loss:3.608979 +step:7695 train loss:3.521019 +step:7696 train loss:3.550226 +step:7697 train loss:3.553069 +step:7698 train loss:3.561262 +step:7699 train loss:3.579337 +step:7700 train loss:3.585394 +step:7701 train loss:3.545623 +step:7702 train loss:3.583863 +step:7703 train loss:3.563420 +step:7704 train loss:3.564449 +step:7705 train loss:3.546257 +step:7706 train loss:3.570760 +step:7707 train loss:3.530114 +step:7708 train loss:3.556446 +step:7709 train loss:3.575333 +step:7710 train loss:3.551726 +step:7711 train loss:3.566691 +step:7712 train loss:3.562827 +step:7713 train loss:3.624299 +step:7714 train loss:3.557159 +step:7715 train loss:3.565063 +step:7716 train loss:3.553408 +step:7717 train loss:3.534262 +step:7718 train loss:3.546577 +step:7719 train loss:3.508312 +step:7720 train loss:3.536009 +step:7721 train loss:3.527460 +step:7722 train loss:3.536743 +step:7723 train loss:3.579385 +step:7724 train loss:3.564662 +step:7725 train loss:3.527104 +step:7726 train loss:3.492003 +step:7727 train loss:3.532839 +step:7728 train loss:3.555785 +step:7729 train loss:3.538734 +step:7730 train loss:3.539467 +step:7731 train loss:3.531059 +step:7732 train loss:3.529453 +step:7733 train loss:3.585510 +step:7734 train loss:3.556466 +step:7735 train loss:3.524577 +step:7736 train loss:3.601107 +step:7737 train loss:3.577884 +step:7738 train loss:3.691455 +step:7739 train loss:3.570877 +step:7740 train loss:3.586017 +step:7741 train loss:3.588691 +step:7742 train loss:3.585795 +step:7743 train loss:3.523666 +step:7744 train loss:3.549578 +step:7745 train loss:3.601957 +step:7746 train loss:3.592304 +step:7747 train loss:3.548979 +step:7748 train loss:3.573427 +step:7749 train loss:3.578909 +step:7750 validation loss:3.510322 +step:7750 train loss:3.613818 +step:7751 train loss:3.586308 +step:7752 train loss:3.553301 +step:7753 train loss:3.554625 +step:7754 train loss:3.526268 +step:7755 train loss:3.604520 +step:7756 train loss:3.584413 +step:7757 train loss:3.568784 +step:7758 train loss:3.568062 +step:7759 train loss:3.611271 +step:7760 train loss:3.605970 +step:7761 train loss:3.590829 +step:7762 train loss:3.552694 +step:7763 train loss:3.515479 +step:7764 train loss:3.545529 +step:7765 train loss:3.525551 +step:7766 train loss:3.573101 +step:7767 train loss:3.606863 +step:7768 train loss:3.561402 +step:7769 train loss:3.577301 +step:7770 train loss:3.614474 +step:7771 train loss:3.626245 +step:7772 train loss:3.523546 +step:7773 train loss:3.564436 +step:7774 train loss:3.595381 +step:7775 train loss:3.546823 +step:7776 train loss:3.495608 +step:7777 train loss:3.568174 +step:7778 train loss:3.610044 +step:7779 train loss:3.556807 +step:7780 train loss:3.531527 +step:7781 train loss:3.550268 +step:7782 train loss:3.546955 +step:7783 train loss:3.592153 +step:7784 train loss:3.533716 +step:7785 train loss:3.537948 +step:7786 train loss:3.565511 +step:7787 train loss:3.605885 +step:7788 train loss:3.535800 +step:7789 train loss:3.558991 +step:7790 train loss:3.589646 +step:7791 train loss:3.608913 +step:7792 train loss:3.595029 +step:7793 train loss:3.592815 +step:7794 train loss:3.561990 +step:7795 train loss:3.525419 +step:7796 train loss:3.607353 +step:7797 train loss:3.578557 +step:7798 train loss:3.554986 +step:7799 train loss:3.592793 +step:7800 train loss:3.621870 +step:7801 train loss:3.604719 +step:7802 train loss:3.584130 +step:7803 train loss:3.556565 +step:7804 train loss:3.596107 +step:7805 train loss:3.566923 +step:7806 train loss:3.574545 +step:7807 train loss:3.578153 +step:7808 train loss:3.530870 +step:7809 train loss:3.516835 +step:7810 train loss:3.534161 +step:7811 train loss:3.558039 +step:7812 train loss:3.585750 +step:7813 train loss:3.573075 +step:7814 train loss:3.657813 +step:7815 train loss:3.564739 +step:7816 train loss:3.583145 +step:7817 train loss:3.512224 +step:7818 train loss:3.510146 +step:7819 train loss:3.581214 +step:7820 train loss:3.519600 +step:7821 train loss:3.581335 +step:7822 train loss:3.654276 +step:7823 train loss:3.612609 +step:7824 train loss:3.560266 +step:7825 train loss:3.620241 +step:7826 train loss:3.581280 +step:7827 train loss:3.581392 +step:7828 train loss:3.637574 +step:7829 train loss:3.584916 +step:7830 train loss:3.537601 +step:7831 train loss:3.567491 +step:7832 train loss:3.626871 +step:7833 train loss:3.565611 +step:7834 train loss:3.573296 +step:7835 train loss:3.638754 +step:7836 train loss:3.554650 +step:7837 train loss:3.481530 +step:7838 train loss:3.586412 +step:7839 train loss:3.591916 +step:7840 train loss:3.507830 +step:7841 train loss:3.579430 +step:7842 train loss:3.549802 +step:7843 train loss:3.586103 +step:7844 train loss:3.588028 +step:7845 train loss:3.573154 +step:7846 train loss:3.626578 +step:7847 train loss:3.547089 +step:7848 train loss:3.503673 +step:7849 train loss:3.642858 +step:7850 train loss:3.577096 +step:7851 train loss:3.617710 +step:7852 train loss:3.607004 +step:7853 train loss:3.557143 +step:7854 train loss:3.566059 +step:7855 train loss:3.587927 +step:7856 train loss:3.595421 +step:7857 train loss:3.518988 +step:7858 train loss:3.588936 +step:7859 train loss:3.559698 +step:7860 train loss:3.584640 +step:7861 train loss:3.557384 +step:7862 train loss:3.571007 +step:7863 train loss:3.610218 +step:7864 train loss:3.575127 +step:7865 train loss:3.609776 +step:7866 train loss:3.512269 +step:7867 train loss:3.516844 +step:7868 train loss:3.519333 +step:7869 train loss:3.584636 +step:7870 train loss:3.501988 +step:7871 train loss:3.568452 +step:7872 train loss:3.557463 +step:7873 train loss:3.553239 +step:7874 train loss:3.506707 +step:7875 train loss:3.557783 +step:7876 train loss:3.505155 +step:7877 train loss:3.587983 +step:7878 train loss:3.608932 +step:7879 train loss:3.561507 +step:7880 train loss:3.577925 +step:7881 train loss:3.560389 +step:7882 train loss:3.555238 +step:7883 train loss:3.557541 +step:7884 train loss:3.550562 +step:7885 train loss:3.624069 +step:7886 train loss:3.539721 +step:7887 train loss:3.569221 +step:7888 train loss:3.565112 +step:7889 train loss:3.546660 +step:7890 train loss:3.538191 +step:7891 train loss:3.586567 +step:7892 train loss:3.670786 +step:7893 train loss:3.558002 +step:7894 train loss:3.600389 +step:7895 train loss:3.625795 +step:7896 train loss:3.544960 +step:7897 train loss:3.567351 +step:7898 train loss:3.577367 +step:7899 train loss:3.548692 +step:7900 train loss:3.502865 +step:7901 train loss:3.559203 +step:7902 train loss:3.594644 +step:7903 train loss:3.577108 +step:7904 train loss:3.553948 +step:7905 train loss:3.561924 +step:7906 train loss:3.567229 +step:7907 train loss:3.490046 +step:7908 train loss:3.578243 +step:7909 train loss:3.549840 +step:7910 train loss:3.505115 +step:7911 train loss:3.514538 +step:7912 train loss:3.572215 +step:7913 train loss:3.512174 +step:7914 train loss:3.490420 +step:7915 train loss:3.555203 +step:7916 train loss:3.523231 +step:7917 train loss:3.518220 +step:7918 train loss:3.562927 +step:7919 train loss:3.569873 +step:7920 train loss:3.552284 +step:7921 train loss:3.546247 +step:7922 train loss:3.610069 +step:7923 train loss:3.575804 +step:7924 train loss:3.539206 +step:7925 train loss:3.569217 +step:7926 train loss:3.587544 +step:7927 train loss:3.526098 +step:7928 train loss:3.526108 +step:7929 train loss:3.491240 +step:7930 train loss:3.531946 +step:7931 train loss:3.566469 +step:7932 train loss:3.528659 +step:7933 train loss:3.533113 +step:7934 train loss:3.539210 +step:7935 train loss:3.525807 +step:7936 train loss:3.545429 +step:7937 train loss:3.495584 +step:7938 train loss:3.510262 +step:7939 train loss:3.489106 +step:7940 train loss:3.486742 +step:7941 train loss:3.584241 +step:7942 train loss:3.465099 +step:7943 train loss:3.535323 +step:7944 train loss:3.509953 +step:7945 train loss:3.490786 +step:7946 train loss:3.582901 +step:7947 train loss:3.587760 +step:7948 train loss:3.553545 +step:7949 train loss:3.600561 +step:7950 train loss:3.570182 +step:7951 train loss:3.524478 +step:7952 train loss:3.569376 +step:7953 train loss:3.545542 +step:7954 train loss:3.526875 +step:7955 train loss:3.620446 +step:7956 train loss:3.556683 +step:7957 train loss:3.520682 +step:7958 train loss:3.537692 +step:7959 train loss:3.522426 +step:7960 train loss:3.538730 +step:7961 train loss:3.514578 +step:7962 train loss:3.464692 +step:7963 train loss:3.542267 +step:7964 train loss:3.544511 +step:7965 train loss:3.512583 +step:7966 train loss:3.527757 +step:7967 train loss:3.561440 +step:7968 train loss:3.519148 +step:7969 train loss:3.566797 +step:7970 train loss:3.542301 +step:7971 train loss:3.581294 +step:7972 train loss:3.535601 +step:7973 train loss:3.592181 +step:7974 train loss:3.545656 +step:7975 train loss:3.614407 +step:7976 train loss:3.590067 +step:7977 train loss:3.483146 +step:7978 train loss:3.545743 +step:7979 train loss:3.538651 +step:7980 train loss:3.517149 +step:7981 train loss:3.567727 +step:7982 train loss:3.521972 +step:7983 train loss:3.581936 +step:7984 train loss:3.577721 +step:7985 train loss:3.505267 +step:7986 train loss:3.537621 +step:7987 train loss:3.531702 +step:7988 train loss:3.551641 +step:7989 train loss:3.579773 +step:7990 train loss:3.564786 +step:7991 train loss:3.478483 +step:7992 train loss:3.478064 +step:7993 train loss:3.593261 +step:7994 train loss:3.524237 +step:7995 train loss:3.561309 +step:7996 train loss:3.566019 +step:7997 train loss:3.512950 +step:7998 train loss:3.627381 +step:7999 train loss:3.635730 +step:8000 validation loss:3.477881 total_sharp:6.6089e-02 L1_sharp:1.2206e-01 L2_sharp:2.3156e-01 L3_sharp:6.9316e-01 L4_sharp:5.0936e-03 L5_sharp:2.0383e-03 L6_sharp:1.6917e-03 L7_sharp:1.3116e-03 L8_sharp:1.3600e-03 L9_sharp:9.1240e-04 L10_sharp:4.2607e-04 L11_sharp:5.1678e-04 L12_sharp:4.0638e-04 total_fnorm:2.2033e+00 total_l1_linf:1.8175e+04 total_spectral:2.2033e+00 L1_fnorm:3.8007e-01 L2_fnorm:1.7994e-01 L3_fnorm:2.9283e-01 L4_fnorm:4.0183e-01 L5_fnorm:4.9395e-01 L6_fnorm:5.3579e-01 L7_fnorm:5.8760e-01 L8_fnorm:5.9371e-01 L9_fnorm:5.9255e-01 L10_fnorm:5.9456e-01 L11_fnorm:5.7351e-01 L12_fnorm:5.9785e-01 L1_l1linf:6.8277e-01 L2_l1linf:6.1796e-01 L3_l1linf:5.8487e-01 L4_l1linf:5.6092e-01 L5_l1linf:4.7645e-01 L6_l1linf:4.2848e-01 L7_l1linf:3.9404e-01 L8_l1linf:3.9905e-01 L9_l1linf:3.9996e-01 L10_l1linf:4.0920e-01 L11_l1linf:4.6126e-01 L12_l1linf:4.5859e-01 L1_spectral:1.4383e-02 L2_spectral:1.3555e-02 L3_spectral:1.3213e-02 L4_spectral:2.1544e-02 L5_spectral:1.5683e-02 L6_spectral:1.2540e-02 L7_spectral:1.2052e-02 L8_spectral:1.2047e-02 L9_spectral:1.2045e-02 L10_spectral:1.2058e-02 L11_spectral:1.2042e-02 L12_spectral:1.2046e-02 v_norm:2.2033e+00 cos_v_-g_hvp:1.3794e-02 g_hvp_norm:1.1211e+00 cos_v_-g_t:1.8899e-02 g_t_norm:1.2169e+00 hv_norm:1.3335e+02 cos_v_hv:1.0920e-03 hg_norm:1.0796e+06 cos_g_hg:5.4694e-01 v_par:3.4940e-03 v_perp:2.2033e+00 L1_cos_v_neg_g:-5.8713e-03 L1_v_norm:3.8007e-01 L2_cos_v_neg_g:9.9474e-03 L2_v_norm:1.7994e-01 L3_cos_v_neg_g:3.8468e-02 L3_v_norm:2.9283e-01 L4_cos_v_neg_g:2.6663e-02 L4_v_norm:4.0183e-01 L5_cos_v_neg_g:2.5078e-02 L5_v_norm:4.9395e-01 L6_cos_v_neg_g:2.6152e-02 L6_v_norm:5.3579e-01 L7_cos_v_neg_g:2.6229e-02 L7_v_norm:5.8760e-01 L8_cos_v_neg_g:2.7525e-02 L8_v_norm:5.9371e-01 L9_cos_v_neg_g:2.7405e-02 L9_v_norm:5.9255e-01 L10_cos_v_neg_g:3.0888e-02 L10_v_norm:5.9456e-01 L11_cos_v_neg_g:3.9084e-02 L11_v_norm:5.7351e-01 L12_cos_v_neg_g:6.6647e-02 L12_v_norm:5.9785e-01 +step:8000 train loss:3.508007 +step:8001 train loss:3.583249 +step:8002 train loss:3.498366 +step:8003 train loss:3.520681 +step:8004 train loss:3.556886 +step:8005 train loss:3.673632 +step:8006 train loss:3.578760 +step:8007 train loss:3.548353 +step:8008 train loss:3.532462 +step:8009 train loss:3.534374 +step:8010 train loss:3.591902 +step:8011 train loss:3.564321 +step:8012 train loss:3.496343 +step:8013 train loss:3.568424 +step:8014 train loss:3.522780 +step:8015 train loss:3.536980 +step:8016 train loss:3.523385 +step:8017 train loss:3.501669 +step:8018 train loss:3.570842 +step:8019 train loss:3.530098 +step:8020 train loss:3.528172 +step:8021 train loss:3.534519 +step:8022 train loss:3.574495 +step:8023 train loss:3.665650 +step:8024 train loss:3.551093 +step:8025 train loss:3.585982 +step:8026 train loss:3.526405 +step:8027 train loss:3.550516 +step:8028 train loss:3.472456 +step:8029 train loss:3.583799 +step:8030 train loss:3.557867 +step:8031 train loss:3.578162 +step:8032 train loss:3.551711 +step:8033 train loss:3.553171 +step:8034 train loss:3.497521 +step:8035 train loss:3.487057 +step:8036 train loss:3.542100 +step:8037 train loss:3.468318 +step:8038 train loss:3.564209 +step:8039 train loss:3.594221 +step:8040 train loss:3.520927 +step:8041 train loss:3.492723 +step:8042 train loss:3.582597 +step:8043 train loss:3.594710 +step:8044 train loss:3.572180 +step:8045 train loss:3.568949 +step:8046 train loss:3.530781 +step:8047 train loss:3.617635 +step:8048 train loss:3.544490 +step:8049 train loss:3.572736 +step:8050 train loss:3.576987 +step:8051 train loss:3.509269 +step:8052 train loss:3.542407 +step:8053 train loss:3.598371 +step:8054 train loss:3.523896 +step:8055 train loss:3.537990 +step:8056 train loss:3.518174 +step:8057 train loss:3.541396 +step:8058 train loss:3.524768 +step:8059 train loss:3.524131 +step:8060 train loss:3.510111 +step:8061 train loss:3.520000 +step:8062 train loss:3.529444 +step:8063 train loss:3.529783 +step:8064 train loss:3.509468 +step:8065 train loss:3.519893 +step:8066 train loss:3.507540 +step:8067 train loss:3.538545 +step:8068 train loss:3.540787 +step:8069 train loss:3.560086 +step:8070 train loss:3.586694 +step:8071 train loss:3.554449 +step:8072 train loss:3.583150 +step:8073 train loss:3.529278 +step:8074 train loss:3.594234 +step:8075 train loss:3.569112 +step:8076 train loss:3.587271 +step:8077 train loss:3.515532 +step:8078 train loss:3.487064 +step:8079 train loss:3.531941 +step:8080 train loss:3.552874 +step:8081 train loss:3.485441 +step:8082 train loss:3.529023 +step:8083 train loss:3.476729 +step:8084 train loss:3.521173 +step:8085 train loss:3.498915 +step:8086 train loss:3.589225 +step:8087 train loss:3.487702 +step:8088 train loss:3.550190 +step:8089 train loss:3.622015 +step:8090 train loss:3.549650 +step:8091 train loss:3.643707 +step:8092 train loss:3.576551 +step:8093 train loss:3.525347 +step:8094 train loss:3.560322 +step:8095 train loss:3.560185 +step:8096 train loss:3.598924 +step:8097 train loss:3.523226 +step:8098 train loss:3.505736 +step:8099 train loss:3.541067 +step:8100 train loss:3.499380 +step:8101 train loss:3.469666 +step:8102 train loss:3.529417 +step:8103 train loss:3.475945 +step:8104 train loss:3.471616 +step:8105 train loss:3.583462 +step:8106 train loss:3.577804 +step:8107 train loss:3.576738 +step:8108 train loss:3.571851 +step:8109 train loss:3.535984 +step:8110 train loss:3.502128 +step:8111 train loss:3.539295 +step:8112 train loss:3.547588 +step:8113 train loss:3.533709 +step:8114 train loss:3.493308 +step:8115 train loss:3.579716 +step:8116 train loss:3.460968 +step:8117 train loss:3.524680 +step:8118 train loss:3.504775 +step:8119 train loss:3.594223 +step:8120 train loss:3.482362 +step:8121 train loss:3.454368 +step:8122 train loss:3.487464 +step:8123 train loss:3.477081 +step:8124 train loss:3.550831 +step:8125 train loss:3.513611 +step:8126 train loss:3.557474 +step:8127 train loss:3.495916 +step:8128 train loss:3.540972 +step:8129 train loss:3.460761 +step:8130 train loss:3.527056 +step:8131 train loss:3.512324 +step:8132 train loss:3.559560 +step:8133 train loss:3.512373 +step:8134 train loss:3.529791 +step:8135 train loss:3.526856 +step:8136 train loss:3.434146 +step:8137 train loss:3.400034 +step:8138 train loss:3.470249 +step:8139 train loss:3.498233 +step:8140 train loss:3.489532 +step:8141 train loss:3.539749 +step:8142 train loss:3.488191 +step:8143 train loss:3.502602 +step:8144 train loss:3.536321 +step:8145 train loss:3.479695 +step:8146 train loss:3.572477 +step:8147 train loss:3.528783 +step:8148 train loss:3.537084 +step:8149 train loss:3.486043 +step:8150 train loss:3.530417 +step:8151 train loss:3.495886 +step:8152 train loss:3.480219 +step:8153 train loss:3.468171 +step:8154 train loss:3.553299 +step:8155 train loss:3.504387 +step:8156 train loss:3.548232 +step:8157 train loss:3.447911 +step:8158 train loss:3.463472 +step:8159 train loss:3.492855 +step:8160 train loss:3.471947 +step:8161 train loss:3.521067 +step:8162 train loss:3.547238 +step:8163 train loss:3.436571 +step:8164 train loss:3.455967 +step:8165 train loss:3.535459 +step:8166 train loss:3.482754 +step:8167 train loss:3.475930 +step:8168 train loss:3.465590 +step:8169 train loss:3.422054 +step:8170 train loss:3.520833 +step:8171 train loss:3.451743 +step:8172 train loss:3.530126 +step:8173 train loss:3.455510 +step:8174 train loss:3.541314 +step:8175 train loss:3.480101 +step:8176 train loss:3.542182 +step:8177 train loss:3.427195 +step:8178 train loss:3.469626 +step:8179 train loss:3.470628 +step:8180 train loss:3.492445 +step:8181 train loss:3.489252 +step:8182 train loss:3.455038 +step:8183 train loss:3.488932 +step:8184 train loss:3.397866 +step:8185 train loss:3.523182 +step:8186 train loss:3.525606 +step:8187 train loss:3.547399 +step:8188 train loss:3.518415 +step:8189 train loss:3.483176 +step:8190 train loss:3.481165 +step:8191 train loss:3.451519 +step:8192 train loss:3.510874 +step:8193 train loss:3.474157 +step:8194 train loss:3.502682 +step:8195 train loss:3.466777 +step:8196 train loss:3.514510 +step:8197 train loss:3.454760 +step:8198 train loss:3.642861 +step:8199 train loss:3.655228 +step:8200 train loss:3.520058 +step:8201 train loss:3.487900 +step:8202 train loss:3.567171 +step:8203 train loss:3.464149 +step:8204 train loss:3.515872 +step:8205 train loss:3.483122 +step:8206 train loss:3.505834 +step:8207 train loss:3.504184 +step:8208 train loss:3.460387 +step:8209 train loss:3.500635 +step:8210 train loss:3.400437 +step:8211 train loss:3.486725 +step:8212 train loss:3.448290 +step:8213 train loss:3.511506 +step:8214 train loss:3.483141 +step:8215 train loss:3.479150 +step:8216 train loss:3.500688 +step:8217 train loss:3.409590 +step:8218 train loss:3.482393 +step:8219 train loss:3.452181 +step:8220 train loss:3.503123 +step:8221 train loss:3.443081 +step:8222 train loss:3.506341 +step:8223 train loss:3.512882 +step:8224 train loss:3.583582 +step:8225 train loss:3.469752 +step:8226 train loss:3.558942 +step:8227 train loss:3.502437 +step:8228 train loss:3.456266 +step:8229 train loss:3.444774 +step:8230 train loss:3.498976 +step:8231 train loss:3.586337 +step:8232 train loss:3.504002 +step:8233 train loss:3.497685 +step:8234 train loss:3.475652 +step:8235 train loss:3.507831 +step:8236 train loss:3.527261 +step:8237 train loss:3.511335 +step:8238 train loss:3.454693 +step:8239 train loss:3.534061 +step:8240 train loss:3.419713 +step:8241 train loss:3.574203 +step:8242 train loss:3.505373 +step:8243 train loss:3.605664 +step:8244 train loss:3.480489 +step:8245 train loss:3.515881 +step:8246 train loss:3.470485 +step:8247 train loss:3.538279 +step:8248 train loss:3.487557 +step:8249 train loss:3.475658 +step:8250 validation loss:3.458759 +step:8250 train loss:3.469315 +step:8251 train loss:3.452808 +step:8252 train loss:3.519943 +step:8253 train loss:3.419080 +step:8254 train loss:3.443216 +step:8255 train loss:3.465628 +step:8256 train loss:3.501696 +step:8257 train loss:3.467370 +step:8258 train loss:3.473804 +step:8259 train loss:3.511088 +step:8260 train loss:3.487210 +step:8261 train loss:3.542847 +step:8262 train loss:3.518636 +step:8263 train loss:3.544357 +step:8264 train loss:3.506859 +step:8265 train loss:3.559471 +step:8266 train loss:3.501933 +step:8267 train loss:3.519014 +step:8268 train loss:3.493112 +step:8269 train loss:3.638930 +step:8270 train loss:3.440968 +step:8271 train loss:3.523139 +step:8272 train loss:3.468554 +step:8273 train loss:3.483156 +step:8274 train loss:3.541401 +step:8275 train loss:3.514427 +step:8276 train loss:3.612872 +step:8277 train loss:3.499521 +step:8278 train loss:3.516522 +step:8279 train loss:3.462361 +step:8280 train loss:3.560196 +step:8281 train loss:3.517651 +step:8282 train loss:3.632703 +step:8283 train loss:3.622596 +step:8284 train loss:3.627529 +step:8285 train loss:3.729656 +step:8286 train loss:3.673291 +step:8287 train loss:3.634197 +step:8288 train loss:3.564540 +step:8289 train loss:3.609907 +step:8290 train loss:3.549880 +step:8291 train loss:3.623494 +step:8292 train loss:3.608584 +step:8293 train loss:3.662775 +step:8294 train loss:3.714816 +step:8295 train loss:3.638788 +step:8296 train loss:3.596718 +step:8297 train loss:3.568570 +step:8298 train loss:3.642434 +step:8299 train loss:3.547542 +step:8300 train loss:3.586892 +step:8301 train loss:3.640739 +step:8302 train loss:3.658454 +step:8303 train loss:3.539881 +step:8304 train loss:3.616342 +step:8305 train loss:3.634555 +step:8306 train loss:3.585603 +step:8307 train loss:3.569005 +step:8308 train loss:3.617115 +step:8309 train loss:3.598349 +step:8310 train loss:3.637500 +step:8311 train loss:3.577080 +step:8312 train loss:3.607305 +step:8313 train loss:3.604877 +step:8314 train loss:3.624454 +step:8315 train loss:3.598139 +step:8316 train loss:3.649063 +step:8317 train loss:3.575144 +step:8318 train loss:3.628302 +step:8319 train loss:3.575062 +step:8320 train loss:3.504625 +step:8321 train loss:3.647467 +step:8322 train loss:3.518928 +step:8323 train loss:3.633995 +step:8324 train loss:3.599760 +step:8325 train loss:3.528234 +step:8326 train loss:3.591703 +step:8327 train loss:3.535352 +step:8328 train loss:3.714531 +step:8329 train loss:3.601926 +step:8330 train loss:3.594435 +step:8331 train loss:3.657728 +step:8332 train loss:3.599561 +step:8333 train loss:3.624868 +step:8334 train loss:3.565419 +step:8335 train loss:3.618795 +step:8336 train loss:3.550328 +step:8337 train loss:3.557213 +step:8338 train loss:3.587036 +step:8339 train loss:3.569888 +step:8340 train loss:3.582758 +step:8341 train loss:3.550204 +step:8342 train loss:3.589524 +step:8343 train loss:3.612243 +step:8344 train loss:3.565130 +step:8345 train loss:3.585825 +step:8346 train loss:3.555492 +step:8347 train loss:3.617472 +step:8348 train loss:3.567438 +step:8349 train loss:3.588718 +step:8350 train loss:3.543384 +step:8351 train loss:3.522812 +step:8352 train loss:3.533258 +step:8353 train loss:3.545581 +step:8354 train loss:3.581769 +step:8355 train loss:3.558299 +step:8356 train loss:3.550893 +step:8357 train loss:3.559062 +step:8358 train loss:3.547436 +step:8359 train loss:3.583146 +step:8360 train loss:3.560279 +step:8361 train loss:3.477055 +step:8362 train loss:3.545390 +step:8363 train loss:3.546901 +step:8364 train loss:3.597248 +step:8365 train loss:3.590856 +step:8366 train loss:3.527564 +step:8367 train loss:3.522960 +step:8368 train loss:3.547839 +step:8369 train loss:3.580740 +step:8370 train loss:3.486665 +step:8371 train loss:3.512856 +step:8372 train loss:3.520917 +step:8373 train loss:3.517044 +step:8374 train loss:3.553726 +step:8375 train loss:3.539263 +step:8376 train loss:3.523582 +step:8377 train loss:3.512996 +step:8378 train loss:3.483825 +step:8379 train loss:3.537301 +step:8380 train loss:3.512715 +step:8381 train loss:3.542338 +step:8382 train loss:3.525270 +step:8383 train loss:3.568370 +step:8384 train loss:3.576023 +step:8385 train loss:3.550001 +step:8386 train loss:3.586057 +step:8387 train loss:3.482880 +step:8388 train loss:3.514035 +step:8389 train loss:3.471121 +step:8390 train loss:3.548312 +step:8391 train loss:3.530627 +step:8392 train loss:3.489312 +step:8393 train loss:3.579587 +step:8394 train loss:3.557291 +step:8395 train loss:3.503394 +step:8396 train loss:3.695045 +step:8397 train loss:3.503362 +step:8398 train loss:3.573832 +step:8399 train loss:3.517716 +step:8400 train loss:3.524987 +step:8401 train loss:3.535208 +step:8402 train loss:3.491770 +step:8403 train loss:3.563422 +step:8404 train loss:3.489681 +step:8405 train loss:3.517057 +step:8406 train loss:3.505051 +step:8407 train loss:3.564748 +step:8408 train loss:3.484696 +step:8409 train loss:3.432849 +step:8410 train loss:3.513086 +step:8411 train loss:3.565628 +step:8412 train loss:3.554123 +step:8413 train loss:3.510110 +step:8414 train loss:3.486354 +step:8415 train loss:3.514174 +step:8416 train loss:3.487280 +step:8417 train loss:3.504163 +step:8418 train loss:3.555555 +step:8419 train loss:3.473117 +step:8420 train loss:3.523463 +step:8421 train loss:3.496347 +step:8422 train loss:3.542615 +step:8423 train loss:3.500746 +step:8424 train loss:3.494451 +step:8425 train loss:3.544483 +step:8426 train loss:3.482077 +step:8427 train loss:3.563728 +step:8428 train loss:3.451237 +step:8429 train loss:3.484252 +step:8430 train loss:3.517353 +step:8431 train loss:3.481035 +step:8432 train loss:3.519486 +step:8433 train loss:3.484182 +step:8434 train loss:3.502892 +step:8435 train loss:3.499751 +step:8436 train loss:3.511899 +step:8437 train loss:3.531298 +step:8438 train loss:3.461659 +step:8439 train loss:3.532657 +step:8440 train loss:3.550742 +step:8441 train loss:3.587964 +step:8442 train loss:3.513884 +step:8443 train loss:3.566236 +step:8444 train loss:3.505664 +step:8445 train loss:3.454374 +step:8446 train loss:3.494110 +step:8447 train loss:3.556230 +step:8448 train loss:3.441023 +step:8449 train loss:3.498523 +step:8450 train loss:3.446808 +step:8451 train loss:3.508076 +step:8452 train loss:3.500675 +step:8453 train loss:3.483345 +step:8454 train loss:3.548402 +step:8455 train loss:3.459882 +step:8456 train loss:3.521490 +step:8457 train loss:3.498601 +step:8458 train loss:3.477819 +step:8459 train loss:3.560078 +step:8460 train loss:3.493749 +step:8461 train loss:3.526329 +step:8462 train loss:3.509511 +step:8463 train loss:3.473932 +step:8464 train loss:3.507631 +step:8465 train loss:3.525372 +step:8466 train loss:3.604901 +step:8467 train loss:3.474124 +step:8468 train loss:3.472857 +step:8469 train loss:3.502102 +step:8470 train loss:3.521909 +step:8471 train loss:3.559932 +step:8472 train loss:3.463995 +step:8473 train loss:3.539921 +step:8474 train loss:3.522995 +step:8475 train loss:3.501024 +step:8476 train loss:3.535120 +step:8477 train loss:3.508343 +step:8478 train loss:3.522370 +step:8479 train loss:3.512525 +step:8480 train loss:3.464508 +step:8481 train loss:3.533005 +step:8482 train loss:3.492722 +step:8483 train loss:3.584782 +step:8484 train loss:3.496356 +step:8485 train loss:3.443092 +step:8486 train loss:3.512738 +step:8487 train loss:3.454979 +step:8488 train loss:3.482642 +step:8489 train loss:3.593650 +step:8490 train loss:3.510948 +step:8491 train loss:3.483588 +step:8492 train loss:3.535057 +step:8493 train loss:3.452055 +step:8494 train loss:3.471014 +step:8495 train loss:3.430639 +step:8496 train loss:3.605281 +step:8497 train loss:3.682796 +step:8498 train loss:3.722136 +step:8499 train loss:3.659986 +step:8500 validation loss:3.483615 total_sharp:-2.8216e-02 L1_sharp:-4.1448e-01 L2_sharp:-3.3464e-01 L3_sharp:6.7157e-03 L4_sharp:-6.4038e-04 L5_sharp:2.3653e-03 L6_sharp:2.2733e-03 L7_sharp:1.6918e-03 L8_sharp:1.7345e-03 L9_sharp:1.3763e-03 L10_sharp:6.1752e-04 L11_sharp:5.5386e-04 L12_sharp:4.8118e-04 total_fnorm:1.6395e+00 total_l1_linf:1.3629e+04 total_spectral:1.6395e+00 L1_fnorm:3.3459e-01 L2_fnorm:1.8553e-01 L3_fnorm:2.2899e-01 L4_fnorm:2.9268e-01 L5_fnorm:3.4483e-01 L6_fnorm:3.9622e-01 L7_fnorm:4.3638e-01 L8_fnorm:4.3887e-01 L9_fnorm:4.4064e-01 L10_fnorm:4.4348e-01 L11_fnorm:4.2549e-01 L12_fnorm:4.4946e-01 L1_l1linf:3.6075e-01 L2_l1linf:4.6061e-01 L3_l1linf:4.4733e-01 L4_l1linf:4.4395e-01 L5_l1linf:4.2220e-01 L6_l1linf:3.8022e-01 L7_l1linf:3.3280e-01 L8_l1linf:3.3812e-01 L9_l1linf:3.5900e-01 L10_l1linf:3.7053e-01 L11_l1linf:3.9629e-01 L12_l1linf:3.9512e-01 L1_spectral:9.0356e-03 L2_spectral:1.0209e-02 L3_spectral:9.9090e-03 L4_spectral:1.5587e-02 L5_spectral:1.3345e-02 L6_spectral:9.0347e-03 L7_spectral:9.0387e-03 L8_spectral:9.0416e-03 L9_spectral:9.0408e-03 L10_spectral:9.0439e-03 L11_spectral:9.0393e-03 L12_spectral:9.0393e-03 v_norm:1.6395e+00 cos_v_-g_hvp:1.2721e-02 g_hvp_norm:1.1607e+00 cos_v_-g_t:2.6221e-02 g_t_norm:9.1922e-01 hv_norm:4.1222e+01 cos_v_hv:-1.1222e-03 hg_norm:1.0074e+05 cos_g_hg:4.6779e-01 v_par:2.4426e-03 v_perp:1.6395e+00 L1_cos_v_neg_g:-5.0477e-03 L1_v_norm:3.3459e-01 L2_cos_v_neg_g:-3.2778e-03 L2_v_norm:1.8553e-01 L3_cos_v_neg_g:1.8478e-02 L3_v_norm:2.2899e-01 L4_cos_v_neg_g:2.6667e-02 L4_v_norm:2.9268e-01 L5_cos_v_neg_g:2.6958e-02 L5_v_norm:3.4483e-01 L6_cos_v_neg_g:2.6065e-02 L6_v_norm:3.9622e-01 L7_cos_v_neg_g:2.5642e-02 L7_v_norm:4.3638e-01 L8_cos_v_neg_g:2.5356e-02 L8_v_norm:4.3887e-01 L9_cos_v_neg_g:2.7647e-02 L9_v_norm:4.4064e-01 L10_cos_v_neg_g:2.8076e-02 L10_v_norm:4.4348e-01 L11_cos_v_neg_g:3.6588e-02 L11_v_norm:4.2549e-01 L12_cos_v_neg_g:6.9584e-02 L12_v_norm:4.4946e-01 +step:8500 train loss:3.516927 +step:8501 train loss:3.551776 +step:8502 train loss:3.536414 +step:8503 train loss:3.543101 +step:8504 train loss:3.476036 +step:8505 train loss:3.521721 +step:8506 train loss:3.461945 +step:8507 train loss:3.507378 +step:8508 train loss:3.490988 +step:8509 train loss:3.496035 +step:8510 train loss:3.515309 +step:8511 train loss:3.565111 +step:8512 train loss:3.535289 +step:8513 train loss:3.518192 +step:8514 train loss:3.482712 +step:8515 train loss:3.512641 +step:8516 train loss:3.480812 +step:8517 train loss:3.529616 +step:8518 train loss:3.525668 +step:8519 train loss:3.520690 +step:8520 train loss:3.534903 +step:8521 train loss:3.505258 +step:8522 train loss:3.532018 +step:8523 train loss:3.525439 +step:8524 train loss:3.426965 +step:8525 train loss:3.475780 +step:8526 train loss:3.531744 +step:8527 train loss:3.533222 +step:8528 train loss:3.516227 +step:8529 train loss:3.550786 +step:8530 train loss:3.483367 +step:8531 train loss:3.568856 +step:8532 train loss:3.518072 +step:8533 train loss:3.521492 +step:8534 train loss:3.479202 +step:8535 train loss:3.533557 +step:8536 train loss:3.533535 +step:8537 train loss:3.543949 +step:8538 train loss:3.550571 +step:8539 train loss:3.542895 +step:8540 train loss:3.497853 +step:8541 train loss:3.589116 +step:8542 train loss:3.648193 +step:8543 train loss:3.596617 +step:8544 train loss:3.538133 +step:8545 train loss:3.491915 +step:8546 train loss:3.545735 +step:8547 train loss:3.456429 +step:8548 train loss:3.550853 +step:8549 train loss:3.426266 +step:8550 train loss:3.501804 +step:8551 train loss:3.497029 +step:8552 train loss:3.513470 +step:8553 train loss:3.523103 +step:8554 train loss:3.469377 +step:8555 train loss:3.503894 +step:8556 train loss:3.514860 +step:8557 train loss:3.542211 +step:8558 train loss:3.543166 +step:8559 train loss:3.501716 +step:8560 train loss:3.532731 +step:8561 train loss:3.543071 +step:8562 train loss:3.475450 +step:8563 train loss:3.530473 +step:8564 train loss:3.505702 +step:8565 train loss:3.507290 +step:8566 train loss:3.542177 +step:8567 train loss:3.488130 +step:8568 train loss:3.490965 +step:8569 train loss:3.515612 +step:8570 train loss:3.448003 +step:8571 train loss:3.491655 +step:8572 train loss:3.501293 +step:8573 train loss:3.566481 +step:8574 train loss:3.487841 +step:8575 train loss:3.524274 +step:8576 train loss:3.496995 +step:8577 train loss:3.482427 +step:8578 train loss:3.539514 +step:8579 train loss:3.545013 +step:8580 train loss:3.516961 +step:8581 train loss:3.544357 +step:8582 train loss:3.499106 +step:8583 train loss:3.464814 +step:8584 train loss:3.550348 +step:8585 train loss:3.455616 +step:8586 train loss:3.475550 +step:8587 train loss:3.514430 +step:8588 train loss:3.454153 +step:8589 train loss:3.510226 +step:8590 train loss:3.494269 +step:8591 train loss:3.480966 +step:8592 train loss:3.489843 +step:8593 train loss:3.474397 +step:8594 train loss:3.506805 +step:8595 train loss:3.472512 +step:8596 train loss:3.488996 +step:8597 train loss:3.521738 +step:8598 train loss:3.487537 +step:8599 train loss:3.550198 +step:8600 train loss:3.501916 +step:8601 train loss:3.444420 +step:8602 train loss:3.533782 +step:8603 train loss:3.462327 +step:8604 train loss:3.554715 +step:8605 train loss:3.482856 +step:8606 train loss:3.450370 +step:8607 train loss:3.468102 +step:8608 train loss:3.430425 +step:8609 train loss:3.406338 +step:8610 train loss:3.532481 +step:8611 train loss:3.455304 +step:8612 train loss:3.488873 +step:8613 train loss:3.490270 +step:8614 train loss:3.440626 +step:8615 train loss:3.473788 +step:8616 train loss:3.520295 +step:8617 train loss:3.566536 +step:8618 train loss:3.528424 +step:8619 train loss:3.489644 +step:8620 train loss:3.528762 +step:8621 train loss:3.474778 +step:8622 train loss:3.499476 +step:8623 train loss:3.499925 +step:8624 train loss:3.491347 +step:8625 train loss:3.523608 +step:8626 train loss:3.544253 +step:8627 train loss:3.488964 +step:8628 train loss:3.515749 +step:8629 train loss:3.461282 +step:8630 train loss:3.499318 +step:8631 train loss:3.558380 +step:8632 train loss:3.521123 +step:8633 train loss:3.468849 +step:8634 train loss:3.457998 +step:8635 train loss:3.474426 +step:8636 train loss:3.502430 +step:8637 train loss:3.469835 +step:8638 train loss:3.527903 +step:8639 train loss:3.504851 +step:8640 train loss:3.459300 +step:8641 train loss:3.455642 +step:8642 train loss:3.471057 +step:8643 train loss:3.504962 +step:8644 train loss:3.539072 +step:8645 train loss:3.464468 +step:8646 train loss:3.510781 +step:8647 train loss:3.548543 +step:8648 train loss:3.480399 +step:8649 train loss:3.493078 +step:8650 train loss:3.456429 +step:8651 train loss:3.463570 +step:8652 train loss:3.449741 +step:8653 train loss:3.454156 +step:8654 train loss:3.565267 +step:8655 train loss:3.500669 +step:8656 train loss:3.503470 +step:8657 train loss:3.508366 +step:8658 train loss:3.496799 +step:8659 train loss:3.470340 +step:8660 train loss:3.565762 +step:8661 train loss:3.456284 +step:8662 train loss:3.539148 +step:8663 train loss:3.460375 +step:8664 train loss:3.502700 +step:8665 train loss:3.538146 +step:8666 train loss:3.501895 +step:8667 train loss:3.501655 +step:8668 train loss:3.426337 +step:8669 train loss:3.460622 +step:8670 train loss:3.473428 +step:8671 train loss:3.494367 +step:8672 train loss:3.493407 +step:8673 train loss:3.514634 +step:8674 train loss:3.470897 +step:8675 train loss:3.471060 +step:8676 train loss:3.495638 +step:8677 train loss:3.484000 +step:8678 train loss:3.495986 +step:8679 train loss:3.504271 +step:8680 train loss:3.474250 +step:8681 train loss:3.506973 +step:8682 train loss:3.499686 +step:8683 train loss:3.491662 +step:8684 train loss:3.482377 +step:8685 train loss:3.439720 +step:8686 train loss:3.460143 +step:8687 train loss:3.508106 +step:8688 train loss:3.510397 +step:8689 train loss:3.499448 +step:8690 train loss:3.522216 +step:8691 train loss:3.455215 +step:8692 train loss:3.446093 +step:8693 train loss:3.497439 +step:8694 train loss:3.501554 +step:8695 train loss:3.470158 +step:8696 train loss:3.463170 +step:8697 train loss:3.425729 +step:8698 train loss:3.440449 +step:8699 train loss:3.415328 +step:8700 train loss:3.403940 +step:8701 train loss:3.462534 +step:8702 train loss:3.500524 +step:8703 train loss:3.510192 +step:8704 train loss:3.603531 +step:8705 train loss:3.453424 +step:8706 train loss:3.539767 +step:8707 train loss:3.477599 +step:8708 train loss:3.500329 +step:8709 train loss:3.462633 +step:8710 train loss:3.455995 +step:8711 train loss:3.500767 +step:8712 train loss:3.401678 +step:8713 train loss:3.434613 +step:8714 train loss:3.436849 +step:8715 train loss:3.424700 +step:8716 train loss:3.453142 +step:8717 train loss:3.393900 +step:8718 train loss:3.507959 +step:8719 train loss:3.422427 +step:8720 train loss:3.451282 +step:8721 train loss:3.520853 +step:8722 train loss:3.461504 +step:8723 train loss:3.464718 +step:8724 train loss:3.460848 +step:8725 train loss:3.415818 +step:8726 train loss:3.505849 +step:8727 train loss:3.450195 +step:8728 train loss:3.443947 +step:8729 train loss:3.461716 +step:8730 train loss:3.383084 +step:8731 train loss:3.466206 +step:8732 train loss:3.526079 +step:8733 train loss:3.525726 +step:8734 train loss:3.463216 +step:8735 train loss:3.474602 +step:8736 train loss:3.477287 +step:8737 train loss:3.476383 +step:8738 train loss:3.434363 +step:8739 train loss:3.394723 +step:8740 train loss:3.479474 +step:8741 train loss:3.381596 +step:8742 train loss:3.500715 +step:8743 train loss:3.474314 +step:8744 train loss:3.517980 +step:8745 train loss:3.448669 +step:8746 train loss:3.470104 +step:8747 train loss:3.480386 +step:8748 train loss:3.433359 +step:8749 train loss:3.401318 +step:8750 validation loss:3.432914 +step:8750 train loss:3.537666 +step:8751 train loss:3.427994 +step:8752 train loss:3.477838 +step:8753 train loss:3.523865 +step:8754 train loss:3.473681 +step:8755 train loss:3.525652 +step:8756 train loss:3.586349 +step:8757 train loss:3.448383 +step:8758 train loss:3.454185 +step:8759 train loss:3.479997 +step:8760 train loss:3.479348 +step:8761 train loss:3.462593 +step:8762 train loss:3.459118 +step:8763 train loss:3.474152 +step:8764 train loss:3.431582 +step:8765 train loss:3.411645 +step:8766 train loss:3.480348 +step:8767 train loss:3.451351 +step:8768 train loss:3.510079 +step:8769 train loss:3.441742 +step:8770 train loss:3.410836 +step:8771 train loss:3.463708 +step:8772 train loss:3.552850 +step:8773 train loss:3.432432 +step:8774 train loss:3.472342 +step:8775 train loss:3.450610 +step:8776 train loss:3.484742 +step:8777 train loss:3.547224 +step:8778 train loss:3.429493 +step:8779 train loss:3.495747 +step:8780 train loss:3.401979 +step:8781 train loss:3.469282 +step:8782 train loss:3.474214 +step:8783 train loss:3.394022 +step:8784 train loss:3.538149 +step:8785 train loss:3.450158 +step:8786 train loss:3.483804 +step:8787 train loss:3.461812 +step:8788 train loss:3.432588 +step:8789 train loss:3.516808 +step:8790 train loss:3.402919 +step:8791 train loss:3.483692 +step:8792 train loss:3.460806 +step:8793 train loss:3.455500 +step:8794 train loss:3.453603 +step:8795 train loss:3.459763 +step:8796 train loss:3.442941 +step:8797 train loss:3.432635 +step:8798 train loss:3.474094 +step:8799 train loss:3.486679 +step:8800 train loss:3.473781 +step:8801 train loss:3.452570 +step:8802 train loss:3.377089 +step:8803 train loss:3.543690 +step:8804 train loss:3.461595 +step:8805 train loss:3.425895 +step:8806 train loss:3.481732 +step:8807 train loss:3.478339 +step:8808 train loss:3.466567 +step:8809 train loss:3.440394 +step:8810 train loss:3.415344 +step:8811 train loss:3.524500 +step:8812 train loss:3.418488 +step:8813 train loss:3.447902 +step:8814 train loss:3.507910 +step:8815 train loss:3.433191 +step:8816 train loss:3.452005 +step:8817 train loss:3.457865 +step:8818 train loss:3.426148 +step:8819 train loss:3.475066 +step:8820 train loss:3.447683 +step:8821 train loss:3.458378 +step:8822 train loss:3.494496 +step:8823 train loss:3.397701 +step:8824 train loss:3.542275 +step:8825 train loss:3.437685 +step:8826 train loss:3.428116 +step:8827 train loss:3.454497 +step:8828 train loss:3.392377 +step:8829 train loss:3.427361 +step:8830 train loss:3.440661 +step:8831 train loss:3.387328 +step:8832 train loss:3.469119 +step:8833 train loss:3.471222 +step:8834 train loss:3.479054 +step:8835 train loss:3.468137 +step:8836 train loss:3.479426 +step:8837 train loss:3.436106 +step:8838 train loss:3.473792 +step:8839 train loss:3.380433 +step:8840 train loss:3.433685 +step:8841 train loss:3.459357 +step:8842 train loss:3.444139 +step:8843 train loss:3.513705 +step:8844 train loss:3.426425 +step:8845 train loss:3.443813 +step:8846 train loss:3.465680 +step:8847 train loss:3.424275 +step:8848 train loss:3.412654 +step:8849 train loss:3.446197 +step:8850 train loss:3.522666 +step:8851 train loss:3.434552 +step:8852 train loss:3.579855 +step:8853 train loss:3.445744 +step:8854 train loss:3.479663 +step:8855 train loss:3.466471 +step:8856 train loss:3.427924 +step:8857 train loss:3.420358 +step:8858 train loss:3.420487 +step:8859 train loss:3.416201 +step:8860 train loss:3.463841 +step:8861 train loss:3.501744 +step:8862 train loss:3.408063 +step:8863 train loss:3.485915 +step:8864 train loss:3.487104 +step:8865 train loss:3.409488 +step:8866 train loss:3.493310 +step:8867 train loss:3.411406 +step:8868 train loss:3.452408 +step:8869 train loss:3.496126 +step:8870 train loss:3.426065 +step:8871 train loss:3.533330 +step:8872 train loss:3.410387 +step:8873 train loss:3.414627 +step:8874 train loss:3.482450 +step:8875 train loss:3.467526 +step:8876 train loss:3.372933 +step:8877 train loss:3.461284 +step:8878 train loss:3.390494 +step:8879 train loss:3.422934 +step:8880 train loss:3.468024 +step:8881 train loss:3.408471 +step:8882 train loss:3.462926 +step:8883 train loss:3.436616 +step:8884 train loss:3.475276 +step:8885 train loss:3.445974 +step:8886 train loss:3.481680 +step:8887 train loss:3.443455 +step:8888 train loss:3.440493 +step:8889 train loss:3.444136 +step:8890 train loss:3.458595 +step:8891 train loss:3.465770 +step:8892 train loss:3.392875 +step:8893 train loss:3.455085 +step:8894 train loss:3.411453 +step:8895 train loss:3.442053 +step:8896 train loss:3.444333 +step:8897 train loss:3.445300 +step:8898 train loss:3.434781 +step:8899 train loss:3.526057 +step:8900 train loss:3.410284 +step:8901 train loss:3.494592 +step:8902 train loss:3.442481 +step:8903 train loss:3.498618 +step:8904 train loss:3.409570 +step:8905 train loss:3.459922 +step:8906 train loss:3.391094 +step:8907 train loss:3.495502 +step:8908 train loss:3.408743 +step:8909 train loss:3.561829 +step:8910 train loss:3.405025 +step:8911 train loss:3.463127 +step:8912 train loss:3.423078 +step:8913 train loss:3.450552 +step:8914 train loss:3.377556 +step:8915 train loss:3.454182 +step:8916 train loss:3.410100 +step:8917 train loss:3.458881 +step:8918 train loss:3.495889 +step:8919 train loss:3.424882 +step:8920 train loss:3.407053 +step:8921 train loss:3.485536 +step:8922 train loss:3.415375 +step:8923 train loss:3.492575 +step:8924 train loss:3.421102 +step:8925 train loss:3.437237 +step:8926 train loss:3.404062 +step:8927 train loss:3.415567 +step:8928 train loss:3.409179 +step:8929 train loss:3.470033 +step:8930 train loss:3.392022 +step:8931 train loss:3.457401 +step:8932 train loss:3.452117 +step:8933 train loss:3.473380 +step:8934 train loss:3.489503 +step:8935 train loss:3.387663 +step:8936 train loss:3.467789 +step:8937 train loss:3.395087 +step:8938 train loss:3.467720 +step:8939 train loss:3.417808 +step:8940 train loss:3.511034 +step:8941 train loss:3.366195 +step:8942 train loss:3.411708 +step:8943 train loss:3.383511 +step:8944 train loss:3.401858 +step:8945 train loss:3.392997 +step:8946 train loss:3.474207 +step:8947 train loss:3.450951 +step:8948 train loss:3.530857 +step:8949 train loss:3.439741 +step:8950 train loss:3.471717 +step:8951 train loss:3.413028 +step:8952 train loss:3.495662 +step:8953 train loss:3.400457 +step:8954 train loss:3.471217 +step:8955 train loss:3.486549 +step:8956 train loss:3.451821 +step:8957 train loss:3.512136 +step:8958 train loss:3.442422 +step:8959 train loss:3.427839 +step:8960 train loss:3.405155 +step:8961 train loss:3.459391 +step:8962 train loss:3.465199 +step:8963 train loss:3.379356 +step:8964 train loss:3.526933 +step:8965 train loss:3.416032 +step:8966 train loss:3.436114 +step:8967 train loss:3.373366 +step:8968 train loss:3.448995 +step:8969 train loss:3.344024 +step:8970 train loss:3.445385 +step:8971 train loss:3.444600 +step:8972 train loss:3.438805 +step:8973 train loss:3.441819 +step:8974 train loss:3.425500 +step:8975 train loss:3.461148 +step:8976 train loss:3.445859 +step:8977 train loss:3.403389 +step:8978 train loss:3.450609 +step:8979 train loss:3.403072 +step:8980 train loss:3.501832 +step:8981 train loss:3.405761 +step:8982 train loss:3.481103 +step:8983 train loss:3.411218 +step:8984 train loss:3.429264 +step:8985 train loss:3.460879 +step:8986 train loss:3.475798 +step:8987 train loss:3.433223 +step:8988 train loss:3.455399 +step:8989 train loss:3.365962 +step:8990 train loss:3.450454 +step:8991 train loss:3.404169 +step:8992 train loss:3.513721 +step:8993 train loss:3.451435 +step:8994 train loss:3.619548 +step:8995 train loss:3.430436 +step:8996 train loss:3.424063 +step:8997 train loss:3.424878 +step:8998 train loss:3.496686 +step:8999 train loss:3.386769 +step:9000 validation loss:3.411837 total_sharp:6.1039e-03 L1_sharp:-8.0866e-04 L2_sharp:1.1444e-03 L3_sharp:8.1542e-03 L4_sharp:3.6033e-03 L5_sharp:2.7602e-03 L6_sharp:2.2047e-03 L7_sharp:2.2232e-03 L8_sharp:1.8463e-03 L9_sharp:1.0939e-03 L10_sharp:5.9403e-04 L11_sharp:6.9792e-04 L12_sharp:1.2522e-03 total_fnorm:1.0960e+00 total_l1_linf:9.0979e+03 total_spectral:1.0960e+00 L1_fnorm:2.0903e-01 L2_fnorm:1.1473e-01 L3_fnorm:1.6193e-01 L4_fnorm:1.9799e-01 L5_fnorm:2.2153e-01 L6_fnorm:2.6894e-01 L7_fnorm:2.9179e-01 L8_fnorm:2.9458e-01 L9_fnorm:2.9518e-01 L10_fnorm:2.9719e-01 L11_fnorm:2.8450e-01 L12_fnorm:2.9958e-01 L1_l1linf:1.8455e-01 L2_l1linf:2.2404e-01 L3_l1linf:2.4648e-01 L4_l1linf:2.4718e-01 L5_l1linf:2.5623e-01 L6_l1linf:2.2960e-01 L7_l1linf:2.0979e-01 L8_l1linf:2.0047e-01 L9_l1linf:2.0766e-01 L10_l1linf:2.3397e-01 L11_l1linf:2.5032e-01 L12_l1linf:2.5175e-01 L1_spectral:6.0265e-03 L2_spectral:6.0189e-03 L3_spectral:6.0208e-03 L4_spectral:8.8363e-03 L5_spectral:9.8787e-03 L6_spectral:6.0258e-03 L7_spectral:6.0291e-03 L8_spectral:6.0320e-03 L9_spectral:6.0294e-03 L10_spectral:6.0306e-03 L11_spectral:6.0283e-03 L12_spectral:6.0275e-03 v_norm:1.0960e+00 cos_v_-g_hvp:2.2921e-02 g_hvp_norm:5.9189e-01 cos_v_-g_t:2.7600e-02 g_t_norm:7.2995e-01 hv_norm:4.8109e+00 cos_v_hv:1.3906e-03 hg_norm:4.5381e+04 cos_g_hg:-1.9106e-01 v_par:2.6103e-03 v_perp:1.0960e+00 L1_cos_v_neg_g:7.2665e-03 L1_v_norm:2.0903e-01 L2_cos_v_neg_g:1.3754e-02 L2_v_norm:1.1473e-01 L3_cos_v_neg_g:1.5107e-02 L3_v_norm:1.6193e-01 L4_cos_v_neg_g:1.5362e-02 L4_v_norm:1.9799e-01 L5_cos_v_neg_g:2.0844e-02 L5_v_norm:2.2153e-01 L6_cos_v_neg_g:2.4079e-02 L6_v_norm:2.6894e-01 L7_cos_v_neg_g:2.4792e-02 L7_v_norm:2.9179e-01 L8_cos_v_neg_g:2.2088e-02 L8_v_norm:2.9458e-01 L9_cos_v_neg_g:2.3687e-02 L9_v_norm:2.9518e-01 L10_cos_v_neg_g:2.7528e-02 L10_v_norm:2.9719e-01 L11_cos_v_neg_g:3.7381e-02 L11_v_norm:2.8450e-01 L12_cos_v_neg_g:7.1539e-02 L12_v_norm:2.9958e-01 +step:9000 train loss:3.425002 +step:9001 train loss:3.402756 +step:9002 train loss:3.462489 +step:9003 train loss:3.409372 +step:9004 train loss:3.434510 +step:9005 train loss:3.388591 +step:9006 train loss:3.484281 +step:9007 train loss:3.418539 +step:9008 train loss:3.478934 +step:9009 train loss:3.416437 +step:9010 train loss:3.436396 +step:9011 train loss:3.398625 +step:9012 train loss:3.454167 +step:9013 train loss:3.413960 +step:9014 train loss:3.502073 +step:9015 train loss:3.435293 +step:9016 train loss:3.473729 +step:9017 train loss:3.460955 +step:9018 train loss:3.549226 +step:9019 train loss:3.413943 +step:9020 train loss:3.452451 +step:9021 train loss:3.411006 +step:9022 train loss:3.440450 +step:9023 train loss:3.366805 +step:9024 train loss:3.447452 +step:9025 train loss:3.400906 +step:9026 train loss:3.421218 +step:9027 train loss:3.455065 +step:9028 train loss:3.487573 +step:9029 train loss:3.416144 +step:9030 train loss:3.483895 +step:9031 train loss:3.452431 +step:9032 train loss:3.496290 +step:9033 train loss:3.425493 +step:9034 train loss:3.416893 +step:9035 train loss:3.369997 +step:9036 train loss:3.467943 +step:9037 train loss:3.493533 +step:9038 train loss:3.473776 +step:9039 train loss:3.417702 +step:9040 train loss:3.440368 +step:9041 train loss:3.454182 +step:9042 train loss:3.487292 +step:9043 train loss:3.480603 +step:9044 train loss:3.453893 +step:9045 train loss:3.432937 +step:9046 train loss:3.422857 +step:9047 train loss:3.397367 +step:9048 train loss:3.435955 +step:9049 train loss:3.403008 +step:9050 train loss:3.458023 +step:9051 train loss:3.428355 +step:9052 train loss:3.420159 +step:9053 train loss:3.421422 +step:9054 train loss:3.456505 +step:9055 train loss:3.440201 +step:9056 train loss:3.401054 +step:9057 train loss:3.503445 +step:9058 train loss:3.354664 +step:9059 train loss:3.439604 +step:9060 train loss:3.380311 +step:9061 train loss:3.405404 +step:9062 train loss:3.370933 +step:9063 train loss:3.475057 +step:9064 train loss:3.435293 +step:9065 train loss:3.432579 +step:9066 train loss:3.423247 +step:9067 train loss:3.460405 +step:9068 train loss:3.463401 +step:9069 train loss:3.490137 +step:9070 train loss:3.436186 +step:9071 train loss:3.472654 +step:9072 train loss:3.425128 +step:9073 train loss:3.495177 +step:9074 train loss:3.416219 +step:9075 train loss:3.506440 +step:9076 train loss:3.428846 +step:9077 train loss:3.441634 +step:9078 train loss:3.465668 +step:9079 train loss:3.528059 +step:9080 train loss:3.509797 +step:9081 train loss:3.543875 +step:9082 train loss:3.441460 +step:9083 train loss:3.512474 +step:9084 train loss:3.483634 +step:9085 train loss:3.479255 +step:9086 train loss:3.547396 +step:9087 train loss:3.393457 +step:9088 train loss:3.559166 +step:9089 train loss:3.430294 +step:9090 train loss:3.501974 +step:9091 train loss:3.490077 +step:9092 train loss:3.486268 +step:9093 train loss:3.433120 +step:9094 train loss:3.469379 +step:9095 train loss:3.449870 +step:9096 train loss:3.467197 +step:9097 train loss:3.502917 +step:9098 train loss:3.473667 +step:9099 train loss:3.488487 +step:9100 train loss:3.426887 +step:9101 train loss:3.480883 +step:9102 train loss:3.519639 +step:9103 train loss:3.434193 +step:9104 train loss:3.441278 +step:9105 train loss:3.456183 +step:9106 train loss:3.494616 +step:9107 train loss:3.456066 +step:9108 train loss:3.506316 +step:9109 train loss:3.412305 +step:9110 train loss:3.499758 +step:9111 train loss:3.435289 +step:9112 train loss:3.440224 +step:9113 train loss:3.441375 +step:9114 train loss:3.487038 +step:9115 train loss:3.468116 +step:9116 train loss:3.457360 +step:9117 train loss:3.528532 +step:9118 train loss:3.496451 +step:9119 train loss:3.448847 +step:9120 train loss:3.436284 +step:9121 train loss:3.499676 +step:9122 train loss:3.439064 +step:9123 train loss:3.446706 +step:9124 train loss:3.470357 +step:9125 train loss:3.458240 +step:9126 train loss:3.425692 +step:9127 train loss:3.445102 +step:9128 train loss:3.408484 +step:9129 train loss:3.489712 +step:9130 train loss:3.463624 +step:9131 train loss:3.456600 +step:9132 train loss:3.461736 +step:9133 train loss:3.482811 +step:9134 train loss:3.437997 +step:9135 train loss:3.575951 +step:9136 train loss:3.452950 +step:9137 train loss:3.458278 +step:9138 train loss:3.488236 +step:9139 train loss:3.418465 +step:9140 train loss:3.483485 +step:9141 train loss:3.406363 +step:9142 train loss:3.458525 +step:9143 train loss:3.465171 +step:9144 train loss:3.472349 +step:9145 train loss:3.421441 +step:9146 train loss:3.536234 +step:9147 train loss:3.478042 +step:9148 train loss:3.485408 +step:9149 train loss:3.494901 +step:9150 train loss:3.416467 +step:9151 train loss:3.449947 +step:9152 train loss:3.422500 +step:9153 train loss:3.524597 +step:9154 train loss:3.485183 +step:9155 train loss:3.463762 +step:9156 train loss:3.505226 +step:9157 train loss:3.476296 +step:9158 train loss:3.594954 +step:9159 train loss:3.398576 +step:9160 train loss:3.489491 +step:9161 train loss:3.449762 +step:9162 train loss:3.477091 +step:9163 train loss:3.404942 +step:9164 train loss:3.454933 +step:9165 train loss:3.497982 +step:9166 train loss:3.472080 +step:9167 train loss:3.509904 +step:9168 train loss:3.450376 +step:9169 train loss:3.424487 +step:9170 train loss:3.518147 +step:9171 train loss:3.460178 +step:9172 train loss:3.533659 +step:9173 train loss:3.476661 +step:9174 train loss:3.494916 +step:9175 train loss:3.445738 +step:9176 train loss:3.486677 +step:9177 train loss:3.494998 +step:9178 train loss:3.440139 +step:9179 train loss:3.448380 +step:9180 train loss:3.495958 +step:9181 train loss:3.459490 +step:9182 train loss:3.474348 +step:9183 train loss:3.441304 +step:9184 train loss:3.514586 +step:9185 train loss:3.436350 +step:9186 train loss:3.433573 +step:9187 train loss:3.447380 +step:9188 train loss:3.395914 +step:9189 train loss:3.434008 +step:9190 train loss:3.426592 +step:9191 train loss:3.445892 +step:9192 train loss:3.412340 +step:9193 train loss:3.460674 +step:9194 train loss:3.436871 +step:9195 train loss:3.424872 +step:9196 train loss:3.534790 +step:9197 train loss:3.531722 +step:9198 train loss:3.452105 +step:9199 train loss:3.462893 +step:9200 train loss:3.475137 +step:9201 train loss:3.432694 +step:9202 train loss:3.427071 +step:9203 train loss:3.436763 +step:9204 train loss:3.460600 +step:9205 train loss:3.424830 +step:9206 train loss:3.493987 +step:9207 train loss:3.435629 +step:9208 train loss:3.497823 +step:9209 train loss:3.486530 +step:9210 train loss:3.449953 +step:9211 train loss:3.495665 +step:9212 train loss:3.467786 +step:9213 train loss:3.450892 +step:9214 train loss:3.462223 +step:9215 train loss:3.416392 +step:9216 train loss:3.416425 +step:9217 train loss:3.441521 +step:9218 train loss:3.437016 +step:9219 train loss:3.430087 +step:9220 train loss:3.495801 +step:9221 train loss:3.494206 +step:9222 train loss:3.457724 +step:9223 train loss:3.499876 +step:9224 train loss:3.385515 +step:9225 train loss:3.418087 +step:9226 train loss:3.405044 +step:9227 train loss:3.424464 +step:9228 train loss:3.479345 +step:9229 train loss:3.486383 +step:9230 train loss:3.483145 +step:9231 train loss:3.443829 +step:9232 train loss:3.486064 +step:9233 train loss:3.443492 +step:9234 train loss:3.481143 +step:9235 train loss:3.436115 +step:9236 train loss:3.460322 +step:9237 train loss:3.476805 +step:9238 train loss:3.498761 +step:9239 train loss:3.449058 +step:9240 train loss:3.495516 +step:9241 train loss:3.451191 +step:9242 train loss:3.502040 +step:9243 train loss:3.449548 +step:9244 train loss:3.433790 +step:9245 train loss:3.490584 +step:9246 train loss:3.470373 +step:9247 train loss:3.472627 +step:9248 train loss:3.392627 +step:9249 train loss:3.487983 +step:9250 validation loss:3.397810 +step:9250 train loss:3.514849 +step:9251 train loss:3.455258 +step:9252 train loss:3.489820 +step:9253 train loss:3.438540 +step:9254 train loss:3.536968 +step:9255 train loss:3.470083 +step:9256 train loss:3.549470 +step:9257 train loss:3.539639 +step:9258 train loss:3.392539 +step:9259 train loss:3.431323 +step:9260 train loss:3.533948 +step:9261 train loss:3.442541 +step:9262 train loss:3.363276 +step:9263 train loss:3.358200 +step:9264 train loss:3.455814 +step:9265 train loss:3.394694 +step:9266 train loss:3.448272 +step:9267 train loss:3.487515 +step:9268 train loss:3.486876 +step:9269 train loss:3.471998 +step:9270 train loss:3.527441 +step:9271 train loss:3.431961 +step:9272 train loss:3.496338 +step:9273 train loss:3.464083 +step:9274 train loss:3.416273 +step:9275 train loss:3.438449 +step:9276 train loss:3.460322 +step:9277 train loss:3.481265 +step:9278 train loss:3.403566 +step:9279 train loss:3.495822 +step:9280 train loss:3.489723 +step:9281 train loss:3.446196 +step:9282 train loss:3.468419 +step:9283 train loss:3.490206 +step:9284 train loss:3.476688 +step:9285 train loss:3.444088 +step:9286 train loss:3.479506 +step:9287 train loss:3.404616 +step:9288 train loss:3.465357 +step:9289 train loss:3.474688 +step:9290 train loss:3.406561 +step:9291 train loss:3.425774 +step:9292 train loss:3.469422 +step:9293 train loss:3.492208 +step:9294 train loss:3.400168 +step:9295 train loss:3.475248 +step:9296 train loss:3.460993 +step:9297 train loss:3.432265 +step:9298 train loss:3.398581 +step:9299 train loss:3.396115 +step:9300 train loss:3.441563 +step:9301 train loss:3.420492 +step:9302 train loss:3.438765 +step:9303 train loss:3.429639 +step:9304 train loss:3.457679 +step:9305 train loss:3.446528 +step:9306 train loss:3.468318 +step:9307 train loss:3.529866 +step:9308 train loss:3.445792 +step:9309 train loss:3.408927 +step:9310 train loss:3.459369 +step:9311 train loss:3.461396 +step:9312 train loss:3.440902 +step:9313 train loss:3.464442 +step:9314 train loss:3.483347 +step:9315 train loss:3.459917 +step:9316 train loss:3.450791 +step:9317 train loss:3.457719 +step:9318 train loss:3.441515 +step:9319 train loss:3.453584 +step:9320 train loss:3.484466 +step:9321 train loss:3.521849 +step:9322 train loss:3.398861 +step:9323 train loss:3.435737 +step:9324 train loss:3.432324 +step:9325 train loss:3.391290 +step:9326 train loss:3.514246 +step:9327 train loss:3.434542 +step:9328 train loss:3.401752 +step:9329 train loss:3.443242 +step:9330 train loss:3.502967 +step:9331 train loss:3.446749 +step:9332 train loss:3.454829 +step:9333 train loss:3.442287 +step:9334 train loss:3.432132 +step:9335 train loss:3.432806 +step:9336 train loss:3.437789 +step:9337 train loss:3.442245 +step:9338 train loss:3.507818 +step:9339 train loss:3.456950 +step:9340 train loss:3.424944 +step:9341 train loss:3.607506 +step:9342 train loss:3.405902 +step:9343 train loss:3.389319 +step:9344 train loss:3.488939 +step:9345 train loss:3.425628 +step:9346 train loss:3.438519 +step:9347 train loss:3.463754 +step:9348 train loss:3.446235 +step:9349 train loss:3.429992 +step:9350 train loss:3.500448 +step:9351 train loss:3.478416 +step:9352 train loss:3.445563 +step:9353 train loss:3.482230 +step:9354 train loss:3.472347 +step:9355 train loss:3.428597 +step:9356 train loss:3.449292 +step:9357 train loss:3.436424 +step:9358 train loss:3.502845 +step:9359 train loss:3.437735 +step:9360 train loss:3.465741 +step:9361 train loss:3.426943 +step:9362 train loss:3.432975 +step:9363 train loss:3.445942 +step:9364 train loss:3.407111 +step:9365 train loss:3.418441 +step:9366 train loss:3.503318 +step:9367 train loss:3.409353 +step:9368 train loss:3.465250 +step:9369 train loss:3.445101 +step:9370 train loss:3.478939 +step:9371 train loss:3.443693 +step:9372 train loss:3.411538 +step:9373 train loss:3.456287 +step:9374 train loss:3.395931 +step:9375 train loss:3.431373 +step:9376 train loss:3.415248 +step:9377 train loss:3.519191 +step:9378 train loss:3.433788 +step:9379 train loss:3.413606 +step:9380 train loss:3.441548 +step:9381 train loss:3.401899 +step:9382 train loss:3.403885 +step:9383 train loss:3.495643 +step:9384 train loss:3.411055 +step:9385 train loss:3.523656 +step:9386 train loss:3.385208 +step:9387 train loss:3.422627 +step:9388 train loss:3.449104 +step:9389 train loss:3.413889 +step:9390 train loss:3.405982 +step:9391 train loss:3.431119 +step:9392 train loss:3.459719 +step:9393 train loss:3.472860 +step:9394 train loss:3.528967 +step:9395 train loss:3.540578 +step:9396 train loss:3.459720 +step:9397 train loss:3.421987 +step:9398 train loss:3.475564 +step:9399 train loss:3.461210 +step:9400 train loss:3.410098 +step:9401 train loss:3.430916 +step:9402 train loss:3.422215 +step:9403 train loss:3.406031 +step:9404 train loss:3.436934 +step:9405 train loss:3.463647 +step:9406 train loss:3.402008 +step:9407 train loss:3.453182 +step:9408 train loss:3.460089 +step:9409 train loss:3.462005 +step:9410 train loss:3.406013 +step:9411 train loss:3.458929 +step:9412 train loss:3.446841 +step:9413 train loss:3.515402 +step:9414 train loss:3.579285 +step:9415 train loss:3.430448 +step:9416 train loss:3.453081 +step:9417 train loss:3.412730 +step:9418 train loss:3.322434 +step:9419 train loss:3.449127 +step:9420 train loss:3.494183 +step:9421 train loss:3.459776 +step:9422 train loss:3.510740 +step:9423 train loss:3.504878 +step:9424 train loss:3.477656 +step:9425 train loss:3.421167 +step:9426 train loss:3.448766 +step:9427 train loss:3.485614 +step:9428 train loss:3.455700 +step:9429 train loss:3.436811 +step:9430 train loss:3.446744 +step:9431 train loss:3.473376 +step:9432 train loss:3.403587 +step:9433 train loss:3.468979 +step:9434 train loss:3.393329 +step:9435 train loss:3.469982 +step:9436 train loss:3.439449 +step:9437 train loss:3.376309 +step:9438 train loss:3.503914 +step:9439 train loss:3.458468 +step:9440 train loss:3.439714 +step:9441 train loss:3.455713 +step:9442 train loss:3.474275 +step:9443 train loss:3.405813 +step:9444 train loss:3.392800 +step:9445 train loss:3.490086 +step:9446 train loss:3.444782 +step:9447 train loss:3.437300 +step:9448 train loss:3.435566 +step:9449 train loss:3.431403 +step:9450 train loss:3.441891 +step:9451 train loss:3.415124 +step:9452 train loss:3.426881 +step:9453 train loss:3.428699 +step:9454 train loss:3.365396 +step:9455 train loss:3.385596 +step:9456 train loss:3.450653 +step:9457 train loss:3.447226 +step:9458 train loss:3.422578 +step:9459 train loss:3.404495 +step:9460 train loss:3.442416 +step:9461 train loss:3.408567 +step:9462 train loss:3.476440 +step:9463 train loss:3.472555 +step:9464 train loss:3.366161 +step:9465 train loss:3.463652 +step:9466 train loss:3.401910 +step:9467 train loss:3.450237 +step:9468 train loss:3.468004 +step:9469 train loss:3.420762 +step:9470 train loss:3.367398 +step:9471 train loss:3.478858 +step:9472 train loss:3.383654 +step:9473 train loss:3.415453 +step:9474 train loss:3.408151 +step:9475 train loss:3.414961 +step:9476 train loss:3.367254 +step:9477 train loss:3.382495 +step:9478 train loss:3.398534 +step:9479 train loss:3.399436 +step:9480 train loss:3.412655 +step:9481 train loss:3.406124 +step:9482 train loss:3.559109 +step:9483 train loss:3.427288 +step:9484 train loss:3.536572 +step:9485 train loss:3.441441 +step:9486 train loss:3.394309 +step:9487 train loss:3.388665 +step:9488 train loss:3.440826 +step:9489 train loss:3.378963 +step:9490 train loss:3.421411 +step:9491 train loss:3.496121 +step:9492 train loss:3.449287 +step:9493 train loss:3.483703 +step:9494 train loss:3.364375 +step:9495 train loss:3.378608 +step:9496 train loss:3.377724 +step:9497 train loss:3.424849 +step:9498 train loss:3.363018 +step:9499 train loss:3.467856 +step:9500 validation loss:3.383713 total_sharp:4.7870e-03 L1_sharp:-1.1417e-02 L2_sharp:-5.0711e-03 L3_sharp:1.6677e-02 L4_sharp:4.7281e-03 L5_sharp:3.4804e-03 L6_sharp:2.3938e-03 L7_sharp:1.5686e-03 L8_sharp:1.4049e-03 L9_sharp:1.0354e-03 L10_sharp:6.4071e-04 L11_sharp:7.8786e-04 L12_sharp:1.0123e-03 total_fnorm:5.5227e-01 total_l1_linf:4.6001e+03 total_spectral:5.5227e-01 L1_fnorm:1.2333e-01 L2_fnorm:6.4770e-02 L3_fnorm:7.4959e-02 L4_fnorm:9.7437e-02 L5_fnorm:1.0814e-01 L6_fnorm:1.3368e-01 L7_fnorm:1.4655e-01 L8_fnorm:1.4850e-01 L9_fnorm:1.4821e-01 L10_fnorm:1.4877e-01 L11_fnorm:1.4256e-01 L12_fnorm:1.5007e-01 L1_l1linf:1.1717e-01 L2_l1linf:1.3076e-01 L3_l1linf:1.4446e-01 L4_l1linf:1.4620e-01 L5_l1linf:1.3849e-01 L6_l1linf:1.3820e-01 L7_l1linf:1.2221e-01 L8_l1linf:1.2417e-01 L9_l1linf:1.3053e-01 L10_l1linf:1.4200e-01 L11_l1linf:1.4521e-01 L12_l1linf:1.4579e-01 L1_spectral:3.0163e-03 L2_spectral:3.0127e-03 L3_spectral:3.2309e-03 L4_spectral:4.6552e-03 L5_spectral:4.4670e-03 L6_spectral:3.0517e-03 L7_spectral:3.0168e-03 L8_spectral:3.0181e-03 L9_spectral:3.0173e-03 L10_spectral:3.1532e-03 L11_spectral:3.2028e-03 L12_spectral:3.2315e-03 v_norm:5.5227e-01 cos_v_-g_hvp:2.1834e-02 g_hvp_norm:5.7017e-01 cos_v_-g_t:2.6984e-02 g_t_norm:6.9548e-01 hv_norm:1.9328e+00 cos_v_hv:1.3678e-03 hg_norm:4.5950e+03 cos_g_hg:-3.5877e-02 v_par:1.1277e-03 v_perp:5.5227e-01 L1_cos_v_neg_g:7.2945e-03 L1_v_norm:1.2333e-01 L2_cos_v_neg_g:7.5608e-03 L2_v_norm:6.4770e-02 L3_cos_v_neg_g:2.0102e-02 L3_v_norm:7.4959e-02 L4_cos_v_neg_g:1.6751e-02 L4_v_norm:9.7437e-02 L5_cos_v_neg_g:2.2280e-02 L5_v_norm:1.0814e-01 L6_cos_v_neg_g:2.2753e-02 L6_v_norm:1.3368e-01 L7_cos_v_neg_g:2.1381e-02 L7_v_norm:1.4655e-01 L8_cos_v_neg_g:2.2252e-02 L8_v_norm:1.4850e-01 L9_cos_v_neg_g:2.2148e-02 L9_v_norm:1.4821e-01 L10_cos_v_neg_g:2.6311e-02 L10_v_norm:1.4877e-01 L11_cos_v_neg_g:3.5471e-02 L11_v_norm:1.4256e-01 L12_cos_v_neg_g:6.8379e-02 L12_v_norm:1.5007e-01 +step:9500 train loss:3.417667 +step:9501 train loss:3.475046 +step:9502 train loss:3.424542 +step:9503 train loss:3.492376 +step:9504 train loss:3.391194 +step:9505 train loss:3.391977 +step:9506 train loss:3.457586 +step:9507 train loss:3.441429 +step:9508 train loss:3.419448 +step:9509 train loss:3.469570 +step:9510 train loss:3.503417 +step:9511 train loss:3.373450 +step:9512 train loss:3.456909 +step:9513 train loss:3.435303 +step:9514 train loss:3.496882 +step:9515 train loss:3.392328 +step:9516 train loss:3.306126 +step:9517 train loss:3.374337 +step:9518 train loss:3.398611 +step:9519 train loss:3.402854 +step:9520 train loss:3.322432 +step:9521 train loss:3.417902 +step:9522 train loss:3.443493 +step:9523 train loss:3.376415 +step:9524 train loss:3.438896 +step:9525 train loss:3.429851 +step:9526 train loss:3.380546 +step:9527 train loss:3.367539 +step:9528 train loss:3.458641 +step:9529 train loss:3.359478 +step:9530 train loss:3.418373 +step:9531 train loss:3.451231 +step:9532 train loss:3.439586 +step:9533 train loss:3.420958 +step:9534 train loss:3.452936 +step:9535 train loss:3.387786 +step:9536 train loss:3.401952 +step:9537 train loss:3.490224 +step:9538 train loss:3.487995 +step:9539 train loss:3.396554 +step:9540 train loss:3.569023 +step:9541 train loss:3.389926 +step:9542 train loss:3.385661 +step:9543 train loss:3.382498 +step:9544 train loss:3.387970 +step:9545 train loss:3.355903 +step:9546 train loss:3.387157 +step:9547 train loss:3.511050 +step:9548 train loss:3.435640 +step:9549 train loss:3.425659 +step:9550 train loss:3.460068 +step:9551 train loss:3.363995 +step:9552 train loss:3.416243 +step:9553 train loss:3.448248 +step:9554 train loss:3.420368 +step:9555 train loss:3.362993 +step:9556 train loss:3.441515 +step:9557 train loss:3.413988 +step:9558 train loss:3.428794 +step:9559 train loss:3.420740 +step:9560 train loss:3.529841 +step:9561 train loss:3.408137 +step:9562 train loss:3.481327 +step:9563 train loss:3.620139 +step:9564 train loss:3.418769 +step:9565 train loss:3.409858 +step:9566 train loss:3.449059 +step:9567 train loss:3.395952 +step:9568 train loss:3.453795 +step:9569 train loss:3.450787 +step:9570 train loss:3.463711 +step:9571 train loss:3.440917 +step:9572 train loss:3.378804 +step:9573 train loss:3.542108 +step:9574 train loss:3.379521 +step:9575 train loss:3.455472 +step:9576 train loss:3.401654 +step:9577 train loss:3.393270 +step:9578 train loss:3.438794 +step:9579 train loss:3.429041 +step:9580 train loss:3.423793 +step:9581 train loss:3.457109 +step:9582 train loss:3.377591 +step:9583 train loss:3.509313 +step:9584 train loss:3.399955 +step:9585 train loss:3.423569 +step:9586 train loss:3.466280 +step:9587 train loss:3.449620 +step:9588 train loss:3.459238 +step:9589 train loss:3.435046 +step:9590 train loss:3.411737 +step:9591 train loss:3.376271 +step:9592 train loss:3.380224 +step:9593 train loss:3.422414 +step:9594 train loss:3.357488 +step:9595 train loss:3.411303 +step:9596 train loss:3.426877 +step:9597 train loss:3.408079 +step:9598 train loss:3.350576 +step:9599 train loss:3.352105 +step:9600 train loss:3.425353 +step:9601 train loss:3.396494 +step:9602 train loss:3.344165 +step:9603 train loss:3.464502 +step:9604 train loss:3.408962 +step:9605 train loss:3.408653 +step:9606 train loss:3.541185 +step:9607 train loss:3.421299 +step:9608 train loss:3.387479 +step:9609 train loss:3.496217 +step:9610 train loss:3.414289 +step:9611 train loss:3.359632 +step:9612 train loss:3.444645 +step:9613 train loss:3.429631 +step:9614 train loss:3.431374 +step:9615 train loss:3.371481 +step:9616 train loss:3.448966 +step:9617 train loss:3.404475 +step:9618 train loss:3.376547 +step:9619 train loss:3.359600 +step:9620 train loss:3.407384 +step:9621 train loss:3.407357 +step:9622 train loss:3.472477 +step:9623 train loss:3.397252 +step:9624 train loss:3.477117 +step:9625 train loss:3.376072 +step:9626 train loss:3.380171 +step:9627 train loss:3.425481 +step:9628 train loss:3.450485 +step:9629 train loss:3.504699 +step:9630 train loss:3.437751 +step:9631 train loss:3.447241 +step:9632 train loss:3.422211 +step:9633 train loss:3.392849 +step:9634 train loss:3.479245 +step:9635 train loss:3.438855 +step:9636 train loss:3.448098 +step:9637 train loss:3.420612 +step:9638 train loss:3.403915 +step:9639 train loss:3.428212 +step:9640 train loss:3.437718 +step:9641 train loss:3.379543 +step:9642 train loss:3.444535 +step:9643 train loss:3.405028 +step:9644 train loss:3.455360 +step:9645 train loss:3.445223 +step:9646 train loss:3.453552 +step:9647 train loss:3.421477 +step:9648 train loss:3.431465 +step:9649 train loss:3.445258 +step:9650 train loss:3.413283 +step:9651 train loss:3.427064 +step:9652 train loss:3.429028 +step:9653 train loss:3.450872 +step:9654 train loss:3.457148 +step:9655 train loss:3.360025 +step:9656 train loss:3.388793 +step:9657 train loss:3.390115 +step:9658 train loss:3.436772 +step:9659 train loss:3.442481 +step:9660 train loss:3.331980 +step:9661 train loss:3.541132 +step:9662 train loss:3.368263 +step:9663 train loss:3.432039 +step:9664 train loss:3.476413 +step:9665 train loss:3.366207 +step:9666 train loss:3.531508 +step:9667 train loss:3.431293 +step:9668 train loss:3.531368 +step:9669 train loss:3.409741 +step:9670 train loss:3.441256 +step:9671 train loss:3.402193 +step:9672 train loss:3.500747 +step:9673 train loss:3.412212 +step:9674 train loss:3.502217 +step:9675 train loss:3.443795 +step:9676 train loss:3.442552 +step:9677 train loss:3.467806 +step:9678 train loss:3.394433 +step:9679 train loss:3.433206 +step:9680 train loss:3.401245 +step:9681 train loss:3.417582 +step:9682 train loss:3.402911 +step:9683 train loss:3.440452 +step:9684 train loss:3.392534 +step:9685 train loss:3.465956 +step:9686 train loss:3.433691 +step:9687 train loss:3.411379 +step:9688 train loss:3.412889 +step:9689 train loss:3.378689 +step:9690 train loss:3.432202 +step:9691 train loss:3.422509 +step:9692 train loss:3.449757 +step:9693 train loss:3.418552 +step:9694 train loss:3.438998 +step:9695 train loss:3.483845 +step:9696 train loss:3.460648 +step:9697 train loss:3.463678 +step:9698 train loss:3.413203 +step:9699 train loss:3.430367 +step:9700 train loss:3.397850 +step:9701 train loss:3.397951 +step:9702 train loss:3.402134 +step:9703 train loss:3.395785 +step:9704 train loss:3.458984 +step:9705 train loss:3.456171 +step:9706 train loss:3.402991 +step:9707 train loss:3.405855 +step:9708 train loss:3.487783 +step:9709 train loss:3.440270 +step:9710 train loss:3.446555 +step:9711 train loss:3.429885 +step:9712 train loss:3.591376 +step:9713 train loss:3.451533 +step:9714 train loss:3.431449 +step:9715 train loss:3.399426 +step:9716 train loss:3.415255 +step:9717 train loss:3.409583 +step:9718 train loss:3.469456 +step:9719 train loss:3.429664 +step:9720 train loss:3.486889 +step:9721 train loss:3.446002 +step:9722 train loss:3.395725 +step:9723 train loss:3.385520 +step:9724 train loss:3.449484 +step:9725 train loss:3.451984 +step:9726 train loss:3.459595 +step:9727 train loss:3.418971 +step:9728 train loss:3.446708 +step:9729 train loss:3.401510 +step:9730 train loss:3.427921 +step:9731 train loss:3.425570 +step:9732 train loss:3.386225 +step:9733 train loss:3.475154 +step:9734 train loss:3.405211 +step:9735 train loss:3.466141 +step:9736 train loss:3.478436 +step:9737 train loss:3.388325 +step:9738 train loss:3.469834 +step:9739 train loss:3.416950 +step:9740 train loss:3.405437 +step:9741 train loss:3.478633 +step:9742 train loss:3.370888 +step:9743 train loss:3.427442 +step:9744 train loss:3.429493 +step:9745 train loss:3.399030 +step:9746 train loss:3.401141 +step:9747 train loss:3.397121 +step:9748 train loss:3.450410 +step:9749 train loss:3.372469 +step:9750 validation loss:3.366734 +step:9750 train loss:3.403703 +step:9751 train loss:3.475784 +step:9752 train loss:3.428090 +step:9753 train loss:3.402141 +step:9754 train loss:3.432311 +step:9755 train loss:3.359412 +step:9756 train loss:3.431773 +step:9757 train loss:3.385051 +step:9758 train loss:3.492104 +step:9759 train loss:3.424501 +step:9760 train loss:3.439101 +step:9761 train loss:3.434471 +step:9762 train loss:3.453343 +step:9763 train loss:3.439258 +step:9764 train loss:3.412089 +step:9765 train loss:3.438448 +step:9766 train loss:3.424611 +step:9767 train loss:3.383461 +step:9768 train loss:3.474464 +step:9769 train loss:3.394381 +step:9770 train loss:3.363128 +step:9771 train loss:3.438997 +step:9772 train loss:3.386135 +step:9773 train loss:3.429662 +step:9774 train loss:3.504484 +step:9775 train loss:3.431898 +step:9776 train loss:3.513276 +step:9777 train loss:3.367702 +step:9778 train loss:3.425488 +step:9779 train loss:3.428914 +step:9780 train loss:3.440830 +step:9781 train loss:3.438308 +step:9782 train loss:3.404742 +step:9783 train loss:3.453245 +step:9784 train loss:3.362450 +step:9785 train loss:3.394223 +step:9786 train loss:3.427247 +step:9787 train loss:3.437738 +step:9788 train loss:3.430234 +step:9789 train loss:3.441350 +step:9790 train loss:3.397729 +step:9791 train loss:3.446041 +step:9792 train loss:3.386739 +step:9793 train loss:3.429011 +step:9794 train loss:3.479494 +step:9795 train loss:3.458018 +step:9796 train loss:3.448248 +step:9797 train loss:3.353559 +step:9798 train loss:3.393030 +step:9799 train loss:3.485102 +step:9800 train loss:3.495479 +step:9801 train loss:3.422395 +step:9802 train loss:3.372577 +step:9803 train loss:3.391670 +step:9804 train loss:3.414834 +step:9805 train loss:3.383795 +step:9806 train loss:3.421358 +step:9807 train loss:3.415925 +step:9808 train loss:3.317781 +step:9809 train loss:3.391355 +step:9810 train loss:3.400537 +step:9811 train loss:3.447560 +step:9812 train loss:3.452243 +step:9813 train loss:3.435763 +step:9814 train loss:3.437563 +step:9815 train loss:3.417783 +step:9816 train loss:3.388132 +step:9817 train loss:3.382922 +step:9818 train loss:3.375071 +step:9819 train loss:3.457079 +step:9820 train loss:3.466712 +step:9821 train loss:3.373027 +step:9822 train loss:3.381585 +step:9823 train loss:3.435893 +step:9824 train loss:3.366836 +step:9825 train loss:3.447274 +step:9826 train loss:3.433499 +step:9827 train loss:3.414759 +step:9828 train loss:3.454868 +step:9829 train loss:3.439629 +step:9830 train loss:3.396433 +step:9831 train loss:3.361239 +step:9832 train loss:3.462518 +step:9833 train loss:3.378336 +step:9834 train loss:3.448861 +step:9835 train loss:3.427875 +step:9836 train loss:3.410589 +step:9837 train loss:3.394097 +step:9838 train loss:3.345796 +step:9839 train loss:3.405261 +step:9840 train loss:3.372018 +step:9841 train loss:3.367594 +step:9842 train loss:3.395129 +step:9843 train loss:3.422810 +step:9844 train loss:3.466002 +step:9845 train loss:3.364683 +step:9846 train loss:3.462970 +step:9847 train loss:3.430743 +step:9848 train loss:3.389105 +step:9849 train loss:3.417360 +step:9850 train loss:3.436433 +step:9851 train loss:3.443315 +step:9852 train loss:3.410981 +step:9853 train loss:3.444085 +step:9854 train loss:3.432952 +step:9855 train loss:3.405041 +step:9856 train loss:3.505641 +step:9857 train loss:3.482993 +step:9858 train loss:3.499827 +step:9859 train loss:3.397654 +step:9860 train loss:3.427112 +step:9861 train loss:3.506949 +step:9862 train loss:3.375810 +step:9863 train loss:3.358863 +step:9864 train loss:3.428111 +step:9865 train loss:3.403558 +step:9866 train loss:3.393352 +step:9867 train loss:3.441349 +step:9868 train loss:3.467566 +step:9869 train loss:3.451823 +step:9870 train loss:3.469465 +step:9871 train loss:3.431750 +step:9872 train loss:3.416399 +step:9873 train loss:3.440403 +step:9874 train loss:3.433632 +step:9875 train loss:3.485059 +step:9876 train loss:3.446505 +step:9877 train loss:3.441323 +step:9878 train loss:3.505150 +step:9879 train loss:3.445553 +step:9880 train loss:3.349863 +step:9881 train loss:3.410965 +step:9882 train loss:3.317152 +step:9883 train loss:3.452033 +step:9884 train loss:3.400542 +step:9885 train loss:3.435143 +step:9886 train loss:3.399914 +step:9887 train loss:3.425526 +step:9888 train loss:3.555814 +step:9889 train loss:3.400676 +step:9890 train loss:3.407814 +step:9891 train loss:3.443377 +step:9892 train loss:3.440656 +step:9893 train loss:3.400488 +step:9894 train loss:3.513550 +step:9895 train loss:3.432820 +step:9896 train loss:3.388198 +step:9897 train loss:3.386034 +step:9898 train loss:3.373991 +step:9899 train loss:3.373629 +step:9900 train loss:3.350110 +step:9901 train loss:3.379036 +step:9902 train loss:3.438243 +step:9903 train loss:3.423190 +step:9904 train loss:3.347431 +step:9905 train loss:3.455520 +step:9906 train loss:3.447138 +step:9907 train loss:3.343088 +step:9908 train loss:3.428044 +step:9909 train loss:3.400964 +step:9910 train loss:3.384211 +step:9911 train loss:3.396746 +step:9912 train loss:3.421037 +step:9913 train loss:3.391761 +step:9914 train loss:3.377741 +step:9915 train loss:3.409355 +step:9916 train loss:3.439981 +step:9917 train loss:3.351922 +step:9918 train loss:3.393172 +step:9919 train loss:3.415625 +step:9920 train loss:3.396542 +step:9921 train loss:3.437252 +step:9922 train loss:3.422344 +step:9923 train loss:3.459296 +step:9924 train loss:3.435108 +step:9925 train loss:3.444417 +step:9926 train loss:3.406699 +step:9927 train loss:3.431596 +step:9928 train loss:3.419552 +step:9929 train loss:3.451735 +step:9930 train loss:3.441220 +step:9931 train loss:3.434489 +step:9932 train loss:3.436113 +step:9933 train loss:3.408632 +step:9934 train loss:3.441640 +step:9935 train loss:3.453005 +step:9936 train loss:3.401849 +step:9937 train loss:3.404960 +step:9938 train loss:3.428727 +step:9939 train loss:3.525965 +step:9940 train loss:3.378153 +step:9941 train loss:3.416970 +step:9942 train loss:3.382036 +step:9943 train loss:3.446932 +step:9944 train loss:3.495755 +step:9945 train loss:3.483772 +step:9946 train loss:3.478216 +step:9947 train loss:3.389764 +step:9948 train loss:3.385269 +step:9949 train loss:3.432777 +step:9950 train loss:3.400542 +step:9951 train loss:3.402611 +step:9952 train loss:3.425416 +step:9953 train loss:3.457977 +step:9954 train loss:3.395815 +step:9955 train loss:3.422753 +step:9956 train loss:3.419747 +step:9957 train loss:3.396154 +step:9958 train loss:3.381761 +step:9959 train loss:3.356048 +step:9960 train loss:3.430746 +step:9961 train loss:3.471421 +step:9962 train loss:3.332969 +step:9963 train loss:3.413923 +step:9964 train loss:3.402693 +step:9965 train loss:3.385689 +step:9966 train loss:3.426491 +step:9967 train loss:3.412385 +step:9968 train loss:3.441193 +step:9969 train loss:3.408824 +step:9970 train loss:3.389720 +step:9971 train loss:3.421815 +step:9972 train loss:3.405638 +step:9973 train loss:3.387758 +step:9974 train loss:3.446682 +step:9975 train loss:3.461776 +step:9976 train loss:3.396997 +step:9977 train loss:3.386239 +step:9978 train loss:3.425836 +step:9979 train loss:3.432638 +step:9980 train loss:3.415025 +step:9981 train loss:3.442056 +step:9982 train loss:3.403746 +step:9983 train loss:3.456723 +step:9984 train loss:3.385044 +step:9985 train loss:3.397750 +step:9986 train loss:3.427330 +step:9987 train loss:3.445998 +step:9988 train loss:3.409285 +step:9989 train loss:3.421463 +step:9990 train loss:3.462120 +step:9991 train loss:3.577853 +step:9992 train loss:3.433887 +step:9993 train loss:3.402666 +step:9994 train loss:3.373455 +step:9995 train loss:3.433777 +step:9996 train loss:3.360756 +step:9997 train loss:3.392331 +step:9998 train loss:3.371865 +step:9999 train loss:3.417386 +step:10000 validation loss:3.356963 total_sharp:1.0203e-02 L1_sharp:1.0188e-02 L2_sharp:1.7307e-02 L3_sharp:2.0726e-02 L4_sharp:1.5530e-02 L5_sharp:7.3485e-03 L6_sharp:3.5145e-03 L7_sharp:3.3090e-03 L8_sharp:2.3668e-03 L9_sharp:1.8345e-03 L10_sharp:8.1322e-04 L11_sharp:7.0505e-04 L12_sharp:8.0827e-04 total_fnorm:1.0880e-03 total_l1_linf:9.0346e+00 total_spectral:1.0880e-03 L1_fnorm:2.4876e-04 L2_fnorm:1.3223e-04 L3_fnorm:1.4248e-04 L4_fnorm:1.7612e-04 L5_fnorm:2.0412e-04 L6_fnorm:2.6268e-04 L7_fnorm:2.8427e-04 L8_fnorm:2.8960e-04 L9_fnorm:2.9149e-04 L10_fnorm:2.9456e-04 L11_fnorm:2.8294e-04 L12_fnorm:2.9922e-04 L1_l1linf:2.0246e-04 L2_l1linf:2.6358e-04 L3_l1linf:3.1510e-04 L4_l1linf:3.8728e-04 L5_l1linf:3.5671e-04 L6_l1linf:3.5746e-04 L7_l1linf:3.5383e-04 L8_l1linf:3.6035e-04 L9_l1linf:3.6181e-04 L10_l1linf:3.3864e-04 L11_l1linf:3.1960e-04 L12_l1linf:3.1824e-04 L1_spectral:6.0391e-06 L2_spectral:6.0196e-06 L3_spectral:7.0581e-06 L4_spectral:9.4792e-06 L5_spectral:9.5144e-06 L6_spectral:7.8308e-06 L7_spectral:7.8913e-06 L8_spectral:7.9237e-06 L9_spectral:8.0278e-06 L10_spectral:7.5316e-06 L11_spectral:7.1172e-06 L12_spectral:7.1159e-06 v_norm:1.0880e-03 cos_v_-g_hvp:2.5716e-02 g_hvp_norm:4.6131e-01 cos_v_-g_t:2.6571e-02 g_t_norm:6.1690e-01 hv_norm:2.9492e-03 cos_v_hv:3.7641e-03 hg_norm:2.4789e+03 cos_g_hg:-2.9606e-02 v_par:2.9450e-06 v_perp:1.0880e-03 L1_cos_v_neg_g:9.2623e-03 L1_v_norm:2.4876e-04 L2_cos_v_neg_g:1.6968e-02 L2_v_norm:1.3223e-04 L3_cos_v_neg_g:2.4126e-02 L3_v_norm:1.4248e-04 L4_cos_v_neg_g:2.2600e-02 L4_v_norm:1.7613e-04 L5_cos_v_neg_g:2.2892e-02 L5_v_norm:2.0412e-04 L6_cos_v_neg_g:2.3905e-02 L6_v_norm:2.6269e-04 L7_cos_v_neg_g:2.4765e-02 L7_v_norm:2.8427e-04 L8_cos_v_neg_g:2.4524e-02 L8_v_norm:2.8960e-04 L9_cos_v_neg_g:2.6355e-02 L9_v_norm:2.9149e-04 L10_cos_v_neg_g:2.9512e-02 L10_v_norm:2.9456e-04 L11_cos_v_neg_g:3.7806e-02 L11_v_norm:2.8295e-04 L12_cos_v_neg_g:6.6774e-02 L12_v_norm:2.9922e-04 diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/config.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/config.json new file mode 100644 index 0000000000000000000000000000000000000000..18ee09c4055472526107129d2333e4b097f8c779 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/config.json @@ -0,0 +1,41 @@ +{ + "cli_args": { + "input_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_train_*.bin", + "input_val_bin": "/home/aiops/zhangfz/MUON_theory/modded-nanogpt/data/fineweb10B/fineweb_val_*.bin", + "output_dir": "/home/aiops/zhangfz/MUON_sharpness/logs_sharpness_pure_qk_nonorm_no_clip/layer_wise_new_code_rand", + "model": "d12", + "batch_size": 4, + "sequence_length": 1024, + "total_batch_size": 524288, + "num_iterations": 10000, + "inference_only": 0, + "adam_lr": 0.001, + "warmup_iters": 700, + "lr_decay_frac": 0.0, + "weight_decay": 0.0, + "grad_clip": 10000.0, + "val_loss_every": 250, + "val_max_steps": 20, + "sample_every": 0, + "overfit_single_batch": 0, + "shuffle_files": true, + "tensorcores": 1, + "device": "", + "compile": 1, + "flash": 1, + "dtype": "bfloat16", + "zero_stage": 1, + "optimizer": "muon", + "muon_lr": 0.01, + "muon_momentum": 0.95, + "muon_weight_decay": 0.0, + "muon_ns_steps": 5, + "muon_nesterov": false, + "write_tensors": 0, + "seed": 45, + "analyze_sharpness": true, + "sharpness_analysis_interval": 500 + }, + "run_uuid": "0e58633d-c658-4b9d-ba8a-a8f1876052d5", + "script_code_logged_at_start": true +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1000.json new file mode 100644 index 0000000000000000000000000000000000000000..5417712531166a69c42ded0d604047de85f2e2d3 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.4116506576538086, + "total_l1_linf_norm": 20641.66015625, + "total_spectral_norm": 2.411651134490967, + "embed_lm_head_update_fnorm": 1.3272377252578735, + "embed_lm_head_max_l1_linf_norm": 0.33344632387161255, + "embed_lm_head_max_spectral_norm": 0.2882075905799866, + "layer_1_update_fnorm": 0.6089069247245789, + "layer_1_max_l1_linf_norm": 0.4322613477706909, + "layer_1_max_spectral_norm": 0.012061327695846558, + "layer_2_update_fnorm": 0.5402422547340393, + "layer_2_max_l1_linf_norm": 0.4197053015232086, + "layer_2_max_spectral_norm": 0.012065564282238483, + "layer_3_update_fnorm": 0.5364785194396973, + "layer_3_max_l1_linf_norm": 0.42699992656707764, + "layer_3_max_spectral_norm": 0.012063593603670597, + "layer_4_update_fnorm": 0.5459780097007751, + "layer_4_max_l1_linf_norm": 0.39482980966567993, + "layer_4_max_spectral_norm": 0.012043556198477745, + "layer_5_update_fnorm": 0.5747373700141907, + "layer_5_max_l1_linf_norm": 0.401324599981308, + "layer_5_max_spectral_norm": 0.012046055868268013, + "layer_6_update_fnorm": 0.5808573961257935, + "layer_6_max_l1_linf_norm": 0.40690505504608154, + "layer_6_max_spectral_norm": 0.01206048671156168, + "layer_7_update_fnorm": 0.5911704897880554, + "layer_7_max_l1_linf_norm": 0.40230387449264526, + "layer_7_max_spectral_norm": 0.012040740810334682, + "layer_8_update_fnorm": 0.5910958051681519, + "layer_8_max_l1_linf_norm": 0.4100784659385681, + "layer_8_max_spectral_norm": 0.012044680304825306, + "layer_9_update_fnorm": 0.5986559391021729, + "layer_9_max_l1_linf_norm": 0.40256980061531067, + "layer_9_max_spectral_norm": 0.012046543881297112, + "layer_10_update_fnorm": 0.6001666784286499, + "layer_10_max_l1_linf_norm": 0.4054636061191559, + "layer_10_max_spectral_norm": 0.012042936868965626, + "layer_11_update_fnorm": 0.5996579527854919, + "layer_11_max_l1_linf_norm": 0.4002988338470459, + "layer_11_max_spectral_norm": 0.012045100331306458, + "layer_12_update_fnorm": 0.6009969711303711, + "layer_12_max_l1_linf_norm": 0.3982405662536621, + "layer_12_max_spectral_norm": 0.01204516738653183, + "block0_q_update_fnorm": 0.24783781170845032, + "block0_q_max_l1_linf_norm": 0.21036237478256226, + "block0_q_max_spectral_norm": 0.012045429088175297, + "block0_k_update_fnorm": 0.24750874936580658, + "block0_k_max_l1_linf_norm": 0.20548665523529053, + "block0_k_max_spectral_norm": 0.01204056479036808, + "block0_v_update_fnorm": 0.23742830753326416, + "block0_v_max_l1_linf_norm": 0.20497681200504303, + "block0_v_max_spectral_norm": 0.012042849324643612, + "block0_o_update_fnorm": 0.24247322976589203, + "block0_o_max_l1_linf_norm": 0.20293384790420532, + "block0_o_max_spectral_norm": 0.012032036669552326, + "block0_mlp_win_update_fnorm": 0.2554621994495392, + "block0_mlp_win_max_l1_linf_norm": 0.13801011443138123, + "block0_mlp_win_max_spectral_norm": 0.012061327695846558, + "block0_mlp_wout_update_fnorm": 0.2598939538002014, + "block0_mlp_wout_max_l1_linf_norm": 0.4322613477706909, + "block0_mlp_wout_max_spectral_norm": 0.012048373930156231, + "block3_q_update_fnorm": 0.2077111452817917, + "block3_q_max_l1_linf_norm": 0.20926383137702942, + "block3_q_max_spectral_norm": 0.012032371014356613, + "block3_k_update_fnorm": 0.18139035999774933, + "block3_k_max_l1_linf_norm": 0.2127673625946045, + "block3_k_max_spectral_norm": 0.012042192742228508, + "block3_v_update_fnorm": 0.19880135357379913, + "block3_v_max_l1_linf_norm": 0.2068517506122589, + "block3_v_max_spectral_norm": 0.012033576145768166, + "block3_o_update_fnorm": 0.22921647131443024, + "block3_o_max_l1_linf_norm": 0.19226539134979248, + "block3_o_max_spectral_norm": 0.012039829045534134, + "block3_mlp_win_update_fnorm": 0.26884880661964417, + "block3_mlp_win_max_l1_linf_norm": 0.1761462390422821, + "block3_mlp_win_max_spectral_norm": 0.012043556198477745, + "block3_mlp_wout_update_fnorm": 0.2399614453315735, + "block3_mlp_wout_max_l1_linf_norm": 0.39482980966567993, + "block3_mlp_wout_max_spectral_norm": 0.011392041109502316, + "block7_q_update_fnorm": 0.23894508183002472, + "block7_q_max_l1_linf_norm": 0.21743828058242798, + "block7_q_max_spectral_norm": 0.012040037661790848, + "block7_k_update_fnorm": 0.2373962104320526, + "block7_k_max_l1_linf_norm": 0.21474409103393555, + "block7_k_max_spectral_norm": 0.012044680304825306, + "block7_v_update_fnorm": 0.2383827418088913, + "block7_v_max_l1_linf_norm": 0.208403080701828, + "block7_v_max_spectral_norm": 0.01204115804284811, + "block7_o_update_fnorm": 0.24703772366046906, + "block7_o_max_l1_linf_norm": 0.2082061618566513, + "block7_o_max_spectral_norm": 0.012042411603033543, + "block7_mlp_win_update_fnorm": 0.2412969022989273, + "block7_mlp_win_max_l1_linf_norm": 0.1671123206615448, + "block7_mlp_win_max_spectral_norm": 0.011388594284653664, + "block7_mlp_wout_update_fnorm": 0.24447424709796906, + "block7_mlp_wout_max_l1_linf_norm": 0.4100784659385681, + "block7_mlp_wout_max_spectral_norm": 0.011369328945875168, + "block11_q_update_fnorm": 0.24829696118831635, + "block11_q_max_l1_linf_norm": 0.21219082176685333, + "block11_q_max_spectral_norm": 0.012039676308631897, + "block11_k_update_fnorm": 0.24793948233127594, + "block11_k_max_l1_linf_norm": 0.2148541808128357, + "block11_k_max_spectral_norm": 0.012037988752126694, + "block11_v_update_fnorm": 0.24548466503620148, + "block11_v_max_l1_linf_norm": 0.20689015090465546, + "block11_v_max_spectral_norm": 0.012041709385812283, + "block11_o_update_fnorm": 0.24806596338748932, + "block11_o_max_l1_linf_norm": 0.207844078540802, + "block11_o_max_spectral_norm": 0.01204516738653183, + "block11_mlp_win_update_fnorm": 0.24191975593566895, + "block11_mlp_win_max_l1_linf_norm": 0.15236195921897888, + "block11_mlp_win_max_spectral_norm": 0.01138988696038723, + "block11_mlp_wout_update_fnorm": 0.24011017382144928, + "block11_mlp_wout_max_l1_linf_norm": 0.3982405662536621, + "block11_mlp_wout_max_spectral_norm": 0.011384010314941406, + "total_sharpness": 0.008061428554356098, + "block_total_sharpness": 0.010334647260606289, + "v_norm_block": 2.0135793685913086, + "v_T_H_v_block": 0.0419018417596817, + "v_norm": 2.4116506576538086, + "ip_v_neg_g_hvp": 0.07442989945411682, + "cos_v_neg_g_hvp": 0.06372523307800293, + "g_hvp_norm": 0.4843079745769501, + "ip_v_neg_g_t": 0.07497280836105347, + "cos_v_neg_g_t": 0.07204489409923553, + "g_t_norm": 0.43150535225868225, + "g_norm": 0.4843079745769501, + "hv_norm": 0.6465440392494202, + "cos_v_hv": 0.03006964549422264, + "hg_norm": 21.057958602905273, + "cos_g_hg": 0.31255456805229187, + "v_parallel_norm": 0.010420362465083599, + "v_perp_norm": 2.411628007888794, + "embed_lm_head_v_norm": 1.3272377252578735, + "embed_lm_head_cos_v_neg_g": 0.10724395513534546, + "layer_1_v_norm": 0.6089069247245789, + "layer_1_cos_v_neg_g": 0.05746515467762947, + "layer_2_v_norm": 0.5402422547340393, + "layer_2_cos_v_neg_g": 0.04565218836069107, + "layer_3_v_norm": 0.5364785194396973, + "layer_3_cos_v_neg_g": 0.039326321333646774, + "layer_4_v_norm": 0.5459780097007751, + "layer_4_cos_v_neg_g": 0.048537757247686386, + "layer_5_v_norm": 0.5747373700141907, + "layer_5_cos_v_neg_g": 0.06110072135925293, + "layer_6_v_norm": 0.5808573961257935, + "layer_6_cos_v_neg_g": 0.06648963689804077, + "layer_7_v_norm": 0.5911704897880554, + "layer_7_cos_v_neg_g": 0.07157648354768753, + "layer_8_v_norm": 0.5910958647727966, + "layer_8_cos_v_neg_g": 0.07273589819669724, + "layer_9_v_norm": 0.5986559391021729, + "layer_9_cos_v_neg_g": 0.07516475766897202, + "layer_10_v_norm": 0.6001666784286499, + "layer_10_cos_v_neg_g": 0.08185745775699615, + "layer_11_v_norm": 0.5996579527854919, + "layer_11_cos_v_neg_g": 0.08767735213041306, + "layer_12_v_norm": 0.6009969711303711, + "layer_12_cos_v_neg_g": 0.09235925227403641, + "block0_q_v_norm": 0.24783781170845032, + "block0_q_cos_v_neg_g": 0.10353387892246246, + "block0_k_v_norm": 0.24750874936580658, + "block0_k_cos_v_neg_g": 0.1151501014828682, + "block0_v_v_norm": 0.23742830753326416, + "block0_v_cos_v_neg_g": 0.04046417027711868, + "block0_o_v_norm": 0.24247322976589203, + "block0_o_cos_v_neg_g": 0.06755910068750381, + "block0_mlp_win_v_norm": 0.2554621994495392, + "block0_mlp_win_cos_v_neg_g": 0.08860601484775543, + "block0_mlp_wout_v_norm": 0.2598939538002014, + "block0_mlp_wout_cos_v_neg_g": 0.10087303817272186, + "block3_q_v_norm": 0.2077111452817917, + "block3_q_cos_v_neg_g": 0.06011199206113815, + "block3_k_v_norm": 0.18139035999774933, + "block3_k_cos_v_neg_g": 0.03975509852170944, + "block3_v_v_norm": 0.19880135357379913, + "block3_v_cos_v_neg_g": 0.042886387556791306, + "block3_o_v_norm": 0.22921647131443024, + "block3_o_cos_v_neg_g": 0.06427350640296936, + "block3_mlp_win_v_norm": 0.26884880661964417, + "block3_mlp_win_cos_v_neg_g": 0.06592405587434769, + "block3_mlp_wout_v_norm": 0.2399614453315735, + "block3_mlp_wout_cos_v_neg_g": 0.09712131321430206, + "block7_q_v_norm": 0.23894508183002472, + "block7_q_cos_v_neg_g": 0.08147315680980682, + "block7_k_v_norm": 0.2373962104320526, + "block7_k_cos_v_neg_g": 0.09583248198032379, + "block7_v_v_norm": 0.2383827418088913, + "block7_v_cos_v_neg_g": 0.0582159198820591, + "block7_o_v_norm": 0.24703772366046906, + "block7_o_cos_v_neg_g": 0.08156526833772659, + "block7_mlp_win_v_norm": 0.2412969022989273, + "block7_mlp_win_cos_v_neg_g": 0.10279692709445953, + "block7_mlp_wout_v_norm": 0.24447424709796906, + "block7_mlp_wout_cos_v_neg_g": 0.14200851321220398, + "block11_q_v_norm": 0.24829696118831635, + "block11_q_cos_v_neg_g": 0.1024906113743782, + "block11_k_v_norm": 0.24793948233127594, + "block11_k_cos_v_neg_g": 0.10858013480901718, + "block11_v_v_norm": 0.24548466503620148, + "block11_v_cos_v_neg_g": 0.07717373222112656, + "block11_o_v_norm": 0.24806596338748932, + "block11_o_cos_v_neg_g": 0.09815908223390579, + "block11_mlp_win_v_norm": 0.24191975593566895, + "block11_mlp_win_cos_v_neg_g": 0.12515941262245178, + "block11_mlp_wout_v_norm": 0.24011017382144928, + "block11_mlp_wout_cos_v_neg_g": 0.10949917882680893, + "embed_lm_head_sharpness": 0.0005724877119064331, + "layer_1_sharpness": 0.010526483878493309, + "layer_2_sharpness": 0.004447670187801123, + "layer_3_sharpness": 0.0012797275558114052, + "layer_4_sharpness": 0.0017304870998486876, + "layer_5_sharpness": 0.0018668543780222535, + "layer_6_sharpness": 0.001921056187711656, + "layer_7_sharpness": 0.001755828969180584, + "layer_8_sharpness": 0.001332352519966662, + "layer_9_sharpness": 0.0008078814134933054, + "layer_10_sharpness": 0.0006512476247735322, + "layer_11_sharpness": 0.0006081518949940801, + "layer_12_sharpness": 0.0007467914838343859, + "block0_q_sharpness": 0.0007430654368363321, + "block0_k_sharpness": 0.0008361447835341096, + "block0_v_sharpness": 0.0052327727898955345, + "block0_o_sharpness": 0.0014961300184950233, + "block0_mlp_win_sharpness": 0.004214281681925058, + "block0_mlp_wout_sharpness": 0.00343280378729105, + "block3_q_sharpness": 0.00018919221474789083, + "block3_k_sharpness": 0.0028851116076111794, + "block3_v_sharpness": 0.0027135428972542286, + "block3_o_sharpness": 0.0005036035436205566, + "block3_mlp_win_sharpness": 0.00015898597484920174, + "block3_mlp_wout_sharpness": 0.00017937421216629446, + "block7_q_sharpness": 0.0002476652734912932, + "block7_k_sharpness": 0.0004250830097589642, + "block7_v_sharpness": 0.0020724590867757797, + "block7_o_sharpness": 0.00022908422397449613, + "block7_mlp_win_sharpness": 0.0002457577211316675, + "block7_mlp_wout_sharpness": 0.00024905212922021747, + "block11_q_sharpness": 3.3060488931369036e-05, + "block11_k_sharpness": 5.924881770624779e-05, + "block11_v_sharpness": 0.00017832466983236372, + "block11_o_sharpness": 0.00010330010991310701, + "block11_mlp_win_sharpness": 0.0005000350065529346, + "block11_mlp_wout_sharpness": 0.0008467062725685537, + "sum_layer_numerators": 0.009441653665809346, + "block_diag_sharpness": 0.002328684006103934, + "cross_layer_sharpness": 0.008005963254502355 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_10000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_10000.json new file mode 100644 index 0000000000000000000000000000000000000000..88d628e2ee3229012e36d8571efbaa78b6a72e3c --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_10000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.0010445165680721402, + "total_l1_linf_norm": 8.463610649108887, + "total_spectral_norm": 0.001044516684487462, + "embed_lm_head_update_fnorm": 0.0006603488000109792, + "embed_lm_head_max_l1_linf_norm": 0.00018554019334260374, + "embed_lm_head_max_spectral_norm": 0.0001300754665862769, + "layer_1_update_fnorm": 0.0001264675665879622, + "layer_1_max_l1_linf_norm": 0.00031476927688345313, + "layer_1_max_spectral_norm": 7.097703928593546e-06, + "layer_2_update_fnorm": 8.259715104941279e-05, + "layer_2_max_l1_linf_norm": 0.00040402839658781886, + "layer_2_max_spectral_norm": 8.73699627845781e-06, + "layer_3_update_fnorm": 0.00010536667832639068, + "layer_3_max_l1_linf_norm": 0.00039949759957380593, + "layer_3_max_spectral_norm": 8.767241524765268e-06, + "layer_4_update_fnorm": 0.00014854020264465362, + "layer_4_max_l1_linf_norm": 0.00039810652378946543, + "layer_4_max_spectral_norm": 9.804695764614735e-06, + "layer_5_update_fnorm": 0.00020016984490212053, + "layer_5_max_l1_linf_norm": 0.00037517689634114504, + "layer_5_max_spectral_norm": 1.0787626706587616e-05, + "layer_6_update_fnorm": 0.00023679289733991027, + "layer_6_max_l1_linf_norm": 0.0003617893671616912, + "layer_6_max_spectral_norm": 7.988512152223848e-06, + "layer_7_update_fnorm": 0.00028244024724699557, + "layer_7_max_l1_linf_norm": 0.00037628132849931717, + "layer_7_max_spectral_norm": 8.290209734695964e-06, + "layer_8_update_fnorm": 0.00028496721643023193, + "layer_8_max_l1_linf_norm": 0.0003808016190305352, + "layer_8_max_spectral_norm": 8.361930667888373e-06, + "layer_9_update_fnorm": 0.00029013698804192245, + "layer_9_max_l1_linf_norm": 0.0003799654368776828, + "layer_9_max_spectral_norm": 8.37978586787358e-06, + "layer_10_update_fnorm": 0.0002931155322585255, + "layer_10_max_l1_linf_norm": 0.00037475701537914574, + "layer_10_max_spectral_norm": 8.310022167279385e-06, + "layer_11_update_fnorm": 0.0002824499679263681, + "layer_11_max_l1_linf_norm": 0.0003498335718177259, + "layer_11_max_spectral_norm": 7.738050044281408e-06, + "layer_12_update_fnorm": 0.0003032821405213326, + "layer_12_max_l1_linf_norm": 0.00034371187211945653, + "layer_12_max_spectral_norm": 7.608122814417584e-06, + "block0_q_update_fnorm": 4.574559716274962e-05, + "block0_q_max_l1_linf_norm": 0.00010551615559961647, + "block0_q_max_spectral_norm": 6.0194029174454045e-06, + "block0_k_update_fnorm": 5.625575431622565e-05, + "block0_k_max_l1_linf_norm": 0.00011213660764042288, + "block0_k_max_spectral_norm": 6.020446107868338e-06, + "block0_v_update_fnorm": 3.613729131757282e-05, + "block0_v_max_l1_linf_norm": 6.563222268596292e-05, + "block0_v_max_spectral_norm": 6.005026079947129e-06, + "block0_o_update_fnorm": 5.846151179866865e-05, + "block0_o_max_l1_linf_norm": 7.446389645338058e-05, + "block0_o_max_spectral_norm": 6.014077371219173e-06, + "block0_mlp_win_update_fnorm": 5.4570995416725054e-05, + "block0_mlp_win_max_l1_linf_norm": 8.60798463691026e-05, + "block0_mlp_win_max_spectral_norm": 6.016670340613928e-06, + "block0_mlp_wout_update_fnorm": 5.4199801525101066e-05, + "block0_mlp_wout_max_l1_linf_norm": 8.647095819469541e-05, + "block0_mlp_wout_max_spectral_norm": 6.014537120790919e-06, + "block3_q_update_fnorm": 1.588253871886991e-05, + "block3_q_max_l1_linf_norm": 4.68696525786072e-05, + "block3_q_max_spectral_norm": 5.656679149979027e-06, + "block3_k_update_fnorm": 2.0098130335099995e-05, + "block3_k_max_l1_linf_norm": 4.859908949583769e-05, + "block3_k_max_spectral_norm": 5.994618732074741e-06, + "block3_v_update_fnorm": 4.2894924263237044e-05, + "block3_v_max_l1_linf_norm": 5.5407246691174805e-05, + "block3_v_max_spectral_norm": 6.021009539836086e-06, + "block3_o_update_fnorm": 5.4736377933295444e-05, + "block3_o_max_l1_linf_norm": 4.751134110847488e-05, + "block3_o_max_spectral_norm": 6.020653472660342e-06, + "block3_mlp_win_update_fnorm": 7.900300261098891e-05, + "block3_mlp_win_max_l1_linf_norm": 9.300460806116462e-05, + "block3_mlp_win_max_spectral_norm": 6.020791715855012e-06, + "block3_mlp_wout_update_fnorm": 0.00010004799696616828, + "block3_mlp_wout_max_l1_linf_norm": 0.00015625344531144947, + "block3_mlp_wout_max_spectral_norm": 6.028275038261199e-06, + "block7_q_update_fnorm": 0.00011719736357918009, + "block7_q_max_l1_linf_norm": 0.00010480555647518486, + "block7_q_max_spectral_norm": 6.046368980605621e-06, + "block7_k_update_fnorm": 0.00012244378740433604, + "block7_k_max_l1_linf_norm": 0.0001033059525070712, + "block7_k_max_spectral_norm": 6.040662356099347e-06, + "block7_v_update_fnorm": 7.31673208065331e-05, + "block7_v_max_l1_linf_norm": 9.399175178259611e-05, + "block7_v_max_spectral_norm": 6.029194082657341e-06, + "block7_o_update_fnorm": 0.00012309898738749325, + "block7_o_max_l1_linf_norm": 0.00010454748553456739, + "block7_o_max_spectral_norm": 6.040268090146128e-06, + "block7_mlp_win_update_fnorm": 0.00013324384053703398, + "block7_mlp_win_max_l1_linf_norm": 7.925982936285436e-05, + "block7_mlp_win_max_spectral_norm": 6.0455331549746916e-06, + "block7_mlp_wout_update_fnorm": 0.00011855891352752224, + "block7_mlp_wout_max_l1_linf_norm": 0.00019585801055654883, + "block7_mlp_wout_max_spectral_norm": 5.703232545783976e-06, + "block11_q_update_fnorm": 0.00012329434684943408, + "block11_q_max_l1_linf_norm": 0.0001045475946739316, + "block11_q_max_spectral_norm": 6.026990376994945e-06, + "block11_k_update_fnorm": 0.00012451258953660727, + "block11_k_max_l1_linf_norm": 0.00010788322106236592, + "block11_k_max_spectral_norm": 6.022260549798375e-06, + "block11_v_update_fnorm": 0.00011914671631529927, + "block11_v_max_l1_linf_norm": 0.0001029554259730503, + "block11_v_max_spectral_norm": 6.037434104655404e-06, + "block11_o_update_fnorm": 0.00012381676060613245, + "block11_o_max_l1_linf_norm": 0.00010495135211385787, + "block11_o_max_spectral_norm": 6.032409601175459e-06, + "block11_mlp_win_update_fnorm": 0.00012077540304744616, + "block11_mlp_win_max_l1_linf_norm": 7.811699470039457e-05, + "block11_mlp_win_max_spectral_norm": 6.013245638314402e-06, + "block11_mlp_wout_update_fnorm": 0.00013046726235188544, + "block11_mlp_wout_max_l1_linf_norm": 0.00022852493566460907, + "block11_mlp_wout_max_spectral_norm": 6.045177997293649e-06, + "total_sharpness": -0.017622454091906548, + "block_total_sharpness": -0.0347004197537899, + "v_norm_block": 0.0008092926582321525, + "v_T_H_v_block": -2.2727201454131318e-08, + "v_norm": 0.0010445170337334275, + "ip_v_neg_g_hvp": 1.3292245057527907e-05, + "cos_v_neg_g_hvp": 0.02217768505215645, + "g_hvp_norm": 0.5738080143928528, + "ip_v_neg_g_t": 3.166215537930839e-05, + "cos_v_neg_g_t": 0.03771344572305679, + "g_t_norm": 0.803764283657074, + "g_norm": 0.5738080143928528, + "hv_norm": 0.01169899757951498, + "cos_v_hv": -0.0015733771724626422, + "hg_norm": 4510.8486328125, + "cos_g_hg": -0.050232212990522385, + "v_parallel_norm": 2.2353449367074063e-06, + "v_perp_norm": 0.0010445151710882783, + "embed_lm_head_v_norm": 0.0006603494985029101, + "embed_lm_head_cos_v_neg_g": 0.056950222700834274, + "layer_1_v_norm": 0.00012647152470890433, + "layer_1_cos_v_neg_g": 0.023425176739692688, + "layer_2_v_norm": 8.260320464614779e-05, + "layer_2_cos_v_neg_g": -0.0072475639171898365, + "layer_3_v_norm": 0.00010537142225075513, + "layer_3_cos_v_neg_g": 0.007396496366709471, + "layer_4_v_norm": 0.00014854356413707137, + "layer_4_cos_v_neg_g": 0.00516651151701808, + "layer_5_v_norm": 0.00020017233327962458, + "layer_5_cos_v_neg_g": 0.019410869106650352, + "layer_6_v_norm": 0.00023679500736761838, + "layer_6_cos_v_neg_g": 0.02649001218378544, + "layer_7_v_norm": 0.00028244202258065343, + "layer_7_cos_v_neg_g": 0.02613353356719017, + "layer_8_v_norm": 0.00028496896266005933, + "layer_8_cos_v_neg_g": 0.027317428961396217, + "layer_9_v_norm": 0.0002901387051679194, + "layer_9_cos_v_neg_g": 0.030218398198485374, + "layer_10_v_norm": 0.000293117220280692, + "layer_10_cos_v_neg_g": 0.031762897968292236, + "layer_11_v_norm": 0.000282451743260026, + "layer_11_cos_v_neg_g": 0.041384123265743256, + "layer_12_v_norm": 0.0003032837703358382, + "layer_12_cos_v_neg_g": 0.07027791440486908, + "block0_q_v_norm": 4.575652565108612e-05, + "block0_q_cos_v_neg_g": 0.03691348060965538, + "block0_k_v_norm": 5.626464189845137e-05, + "block0_k_cos_v_neg_g": 0.018246863037347794, + "block0_v_v_norm": 3.615112291299738e-05, + "block0_v_cos_v_neg_g": 0.018183572217822075, + "block0_o_v_norm": 5.8470064686844125e-05, + "block0_o_cos_v_neg_g": 0.038059961050748825, + "block0_mlp_win_v_norm": 5.458015948534012e-05, + "block0_mlp_win_cos_v_neg_g": 0.025458836928009987, + "block0_mlp_wout_v_norm": 5.420902743935585e-05, + "block0_mlp_wout_cos_v_neg_g": 0.029669499024748802, + "block3_q_v_norm": 1.5913989045657218e-05, + "block3_q_cos_v_neg_g": 0.0022296584211289883, + "block3_k_v_norm": 2.012299228226766e-05, + "block3_k_cos_v_neg_g": -0.046357594430446625, + "block3_v_v_norm": 4.2906580347334966e-05, + "block3_v_cos_v_neg_g": -0.0027385042048990726, + "block3_o_v_norm": 5.474551289808005e-05, + "block3_o_cos_v_neg_g": 0.010320981964468956, + "block3_mlp_win_v_norm": 7.900933269411325e-05, + "block3_mlp_win_cos_v_neg_g": 0.0009175107115879655, + "block3_mlp_wout_v_norm": 0.00010005299554904923, + "block3_mlp_wout_cos_v_neg_g": 0.018276412039995193, + "block7_q_v_norm": 0.00011720163456629962, + "block7_q_cos_v_neg_g": 0.029762301594018936, + "block7_k_v_norm": 0.00012244786194059998, + "block7_k_cos_v_neg_g": 0.0704674944281578, + "block7_v_v_norm": 7.317415293073282e-05, + "block7_v_cos_v_neg_g": 0.026640472933650017, + "block7_o_v_norm": 0.00012310304737184197, + "block7_o_cos_v_neg_g": 0.08143767714500427, + "block7_mlp_win_v_norm": 0.00013324758037924767, + "block7_mlp_win_cos_v_neg_g": 0.03181599825620651, + "block7_mlp_wout_v_norm": 0.00011856313358293846, + "block7_mlp_wout_cos_v_neg_g": 0.11631888151168823, + "block11_q_v_norm": 0.00012329839228186756, + "block11_q_cos_v_neg_g": 0.07384598255157471, + "block11_k_v_norm": 0.0001245166058652103, + "block11_k_cos_v_neg_g": 0.09764590859413147, + "block11_v_v_norm": 0.00011915091454284266, + "block11_v_cos_v_neg_g": 0.0441843718290329, + "block11_o_v_norm": 0.00012382080603856593, + "block11_o_cos_v_neg_g": 0.08713383227586746, + "block11_mlp_win_v_norm": 0.00012077954306732863, + "block11_mlp_win_cos_v_neg_g": 0.08567804098129272, + "block11_mlp_wout_v_norm": 0.0001304710895055905, + "block11_mlp_wout_cos_v_neg_g": 0.07965870946645737, + "embed_lm_head_sharpness": 0.010670901276171207, + "layer_1_sharpness": -1.2474918365478516, + "layer_2_sharpness": -0.37211406230926514, + "layer_3_sharpness": -0.058980681002140045, + "layer_4_sharpness": -0.015294622629880905, + "layer_5_sharpness": 0.006190325133502483, + "layer_6_sharpness": 0.005708346143364906, + "layer_7_sharpness": 0.0035888231359422207, + "layer_8_sharpness": 0.003724200651049614, + "layer_9_sharpness": 0.0019426712533459067, + "layer_10_sharpness": 0.0010923368390649557, + "layer_11_sharpness": 0.001106694689951837, + "layer_12_sharpness": 0.0022636032663285732, + "block0_q_sharpness": 0.21571655571460724, + "block0_k_sharpness": 0.022720182314515114, + "block0_v_sharpness": -6.591163635253906, + "block0_o_sharpness": -0.20097514986991882, + "block0_mlp_win_sharpness": -0.03898860141634941, + "block0_mlp_wout_sharpness": -0.05833159759640694, + "block3_q_sharpness": 5.405714182415977e-05, + "block3_k_sharpness": 0.031143778935074806, + "block3_v_sharpness": -0.01515151746571064, + "block3_o_sharpness": 0.004696099553257227, + "block3_mlp_win_sharpness": -0.007617915514856577, + "block3_mlp_wout_sharpness": -0.0007188617018982768, + "block7_q_sharpness": 0.0001513295283075422, + "block7_k_sharpness": 8.033290214370936e-05, + "block7_v_sharpness": 0.015474901534616947, + "block7_o_sharpness": 0.00011445838026702404, + "block7_mlp_win_sharpness": 0.0017517171800136566, + "block7_mlp_wout_sharpness": 0.0001494417665526271, + "block11_q_sharpness": 0.0004285583272576332, + "block11_k_sharpness": 0.0001185213986900635, + "block11_v_sharpness": 0.00019401531608309597, + "block11_o_sharpness": 8.311649435199797e-05, + "block11_mlp_win_sharpness": 0.0017546408344060183, + "block11_mlp_wout_sharpness": 0.002464554039761424, + "sum_layer_numerators": -2.177435278243342e-08, + "block_diag_sharpness": -0.03324559070313639, + "cross_layer_sharpness": -0.0014548290506535139 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1500.json new file mode 100644 index 0000000000000000000000000000000000000000..f47e8464d29443b069a4e3768fc4d44bf6e48b4d --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_1500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.409280300140381, + "total_l1_linf_norm": 20618.3671875, + "total_spectral_norm": 2.409280300140381, + "embed_lm_head_update_fnorm": 1.3305307626724243, + "embed_lm_head_max_l1_linf_norm": 0.3338589668273926, + "embed_lm_head_max_spectral_norm": 0.27997735142707825, + "layer_1_update_fnorm": 0.5944745540618896, + "layer_1_max_l1_linf_norm": 0.42916542291641235, + "layer_1_max_spectral_norm": 0.012047868221998215, + "layer_2_update_fnorm": 0.5288812518119812, + "layer_2_max_l1_linf_norm": 0.41118675470352173, + "layer_2_max_spectral_norm": 0.012057353742420673, + "layer_3_update_fnorm": 0.524431049823761, + "layer_3_max_l1_linf_norm": 0.4081832766532898, + "layer_3_max_spectral_norm": 0.012056724168360233, + "layer_4_update_fnorm": 0.5473325252532959, + "layer_4_max_l1_linf_norm": 0.39198052883148193, + "layer_4_max_spectral_norm": 0.01204774435609579, + "layer_5_update_fnorm": 0.5737541317939758, + "layer_5_max_l1_linf_norm": 0.40149950981140137, + "layer_5_max_spectral_norm": 0.01205001026391983, + "layer_6_update_fnorm": 0.585416853427887, + "layer_6_max_l1_linf_norm": 0.41250380873680115, + "layer_6_max_spectral_norm": 0.012066319584846497, + "layer_7_update_fnorm": 0.5931315422058105, + "layer_7_max_l1_linf_norm": 0.4125750660896301, + "layer_7_max_spectral_norm": 0.012043511494994164, + "layer_8_update_fnorm": 0.5960122346878052, + "layer_8_max_l1_linf_norm": 0.41668567061424255, + "layer_8_max_spectral_norm": 0.012045837938785553, + "layer_9_update_fnorm": 0.6002507209777832, + "layer_9_max_l1_linf_norm": 0.4114213287830353, + "layer_9_max_spectral_norm": 0.012043784372508526, + "layer_10_update_fnorm": 0.6011089086532593, + "layer_10_max_l1_linf_norm": 0.4116894602775574, + "layer_10_max_spectral_norm": 0.012042555026710033, + "layer_11_update_fnorm": 0.6017870903015137, + "layer_11_max_l1_linf_norm": 0.40970081090927124, + "layer_11_max_spectral_norm": 0.01204619463533163, + "layer_12_update_fnorm": 0.603224515914917, + "layer_12_max_l1_linf_norm": 0.39987462759017944, + "layer_12_max_spectral_norm": 0.012043209746479988, + "block0_q_update_fnorm": 0.24305696785449982, + "block0_q_max_l1_linf_norm": 0.20672821998596191, + "block0_q_max_spectral_norm": 0.012042323127388954, + "block0_k_update_fnorm": 0.24426089227199554, + "block0_k_max_l1_linf_norm": 0.20716845989227295, + "block0_k_max_spectral_norm": 0.012043891474604607, + "block0_v_update_fnorm": 0.2054334580898285, + "block0_v_max_l1_linf_norm": 0.1955603063106537, + "block0_v_max_spectral_norm": 0.012033424340188503, + "block0_o_update_fnorm": 0.23090441524982452, + "block0_o_max_l1_linf_norm": 0.19462545216083527, + "block0_o_max_spectral_norm": 0.01204204186797142, + "block0_mlp_win_update_fnorm": 0.26530900597572327, + "block0_mlp_win_max_l1_linf_norm": 0.14953862130641937, + "block0_mlp_win_max_spectral_norm": 0.012047868221998215, + "block0_mlp_wout_update_fnorm": 0.2620120346546173, + "block0_mlp_wout_max_l1_linf_norm": 0.42916542291641235, + "block0_mlp_wout_max_spectral_norm": 0.012045140378177166, + "block3_q_update_fnorm": 0.20869460701942444, + "block3_q_max_l1_linf_norm": 0.20869436860084534, + "block3_q_max_spectral_norm": 0.012037931010127068, + "block3_k_update_fnorm": 0.19104938209056854, + "block3_k_max_l1_linf_norm": 0.21138712763786316, + "block3_k_max_spectral_norm": 0.012037738226354122, + "block3_v_update_fnorm": 0.1943739801645279, + "block3_v_max_l1_linf_norm": 0.20400014519691467, + "block3_v_max_spectral_norm": 0.012034007348120213, + "block3_o_update_fnorm": 0.23410944640636444, + "block3_o_max_l1_linf_norm": 0.1985754370689392, + "block3_o_max_spectral_norm": 0.012043984606862068, + "block3_mlp_win_update_fnorm": 0.26536494493484497, + "block3_mlp_win_max_l1_linf_norm": 0.17788231372833252, + "block3_mlp_win_max_spectral_norm": 0.01204774435609579, + "block3_mlp_wout_update_fnorm": 0.23742656409740448, + "block3_mlp_wout_max_l1_linf_norm": 0.39198052883148193, + "block3_mlp_wout_max_spectral_norm": 0.011356694623827934, + "block7_q_update_fnorm": 0.242941752076149, + "block7_q_max_l1_linf_norm": 0.21045061945915222, + "block7_q_max_spectral_norm": 0.012039304710924625, + "block7_k_update_fnorm": 0.24252234399318695, + "block7_k_max_l1_linf_norm": 0.21701902151107788, + "block7_k_max_spectral_norm": 0.01204475574195385, + "block7_v_update_fnorm": 0.23637469112873077, + "block7_v_max_l1_linf_norm": 0.21199463307857513, + "block7_v_max_spectral_norm": 0.012041234411299229, + "block7_o_update_fnorm": 0.24793657660484314, + "block7_o_max_l1_linf_norm": 0.20753002166748047, + "block7_o_max_spectral_norm": 0.012045837938785553, + "block7_mlp_win_update_fnorm": 0.24080537259578705, + "block7_mlp_win_max_l1_linf_norm": 0.1653691530227661, + "block7_mlp_win_max_spectral_norm": 0.011556660756468773, + "block7_mlp_wout_update_fnorm": 0.2489119917154312, + "block7_mlp_wout_max_l1_linf_norm": 0.41668567061424255, + "block7_mlp_wout_max_spectral_norm": 0.011397572234272957, + "block11_q_update_fnorm": 0.24883726239204407, + "block11_q_max_l1_linf_norm": 0.21107828617095947, + "block11_q_max_spectral_norm": 0.012034840881824493, + "block11_k_update_fnorm": 0.249177485704422, + "block11_k_max_l1_linf_norm": 0.21075439453125, + "block11_k_max_spectral_norm": 0.012043005786836147, + "block11_v_update_fnorm": 0.24632219970226288, + "block11_v_max_l1_linf_norm": 0.2079174667596817, + "block11_v_max_spectral_norm": 0.012042845599353313, + "block11_o_update_fnorm": 0.2491564303636551, + "block11_o_max_l1_linf_norm": 0.20751672983169556, + "block11_o_max_spectral_norm": 0.012043209746479988, + "block11_mlp_win_update_fnorm": 0.2443448305130005, + "block11_mlp_win_max_l1_linf_norm": 0.14927589893341064, + "block11_mlp_win_max_spectral_norm": 0.011370048858225346, + "block11_mlp_wout_update_fnorm": 0.23937413096427917, + "block11_mlp_wout_max_l1_linf_norm": 0.39987462759017944, + "block11_mlp_wout_max_spectral_norm": 0.011400450952351093, + "total_sharpness": 0.00517876073718071, + "block_total_sharpness": 0.00657278299331665, + "v_norm_block": 2.008561372756958, + "v_T_H_v_block": 0.02651670202612877, + "v_norm": 2.409280300140381, + "ip_v_neg_g_hvp": 0.055775877088308334, + "cos_v_neg_g_hvp": 0.04486265033483505, + "g_hvp_norm": 0.5160290598869324, + "ip_v_neg_g_t": 0.05611567571759224, + "cos_v_neg_g_t": 0.05128007382154465, + "g_t_norm": 0.4542011320590973, + "g_norm": 0.5160290598869324, + "hv_norm": 0.5912302732467651, + "cos_v_hv": 0.021103598177433014, + "hg_norm": 13.837800979614258, + "cos_g_hg": 0.6367314457893372, + "v_parallel_norm": 0.007516834419220686, + "v_perp_norm": 2.409268617630005, + "embed_lm_head_v_norm": 1.3305307626724243, + "embed_lm_head_cos_v_neg_g": 0.09743329137563705, + "layer_1_v_norm": 0.5944745540618896, + "layer_1_cos_v_neg_g": 0.03145920857787132, + "layer_2_v_norm": 0.5288812518119812, + "layer_2_cos_v_neg_g": 0.036180395632982254, + "layer_3_v_norm": 0.524431049823761, + "layer_3_cos_v_neg_g": 0.03332484886050224, + "layer_4_v_norm": 0.5473325252532959, + "layer_4_cos_v_neg_g": 0.03588441386818886, + "layer_5_v_norm": 0.5737541317939758, + "layer_5_cos_v_neg_g": 0.04337474703788757, + "layer_6_v_norm": 0.585416853427887, + "layer_6_cos_v_neg_g": 0.04577118158340454, + "layer_7_v_norm": 0.5931315422058105, + "layer_7_cos_v_neg_g": 0.0483197346329689, + "layer_8_v_norm": 0.5960122346878052, + "layer_8_cos_v_neg_g": 0.04677361994981766, + "layer_9_v_norm": 0.6002507209777832, + "layer_9_cos_v_neg_g": 0.05271855741739273, + "layer_10_v_norm": 0.6011089086532593, + "layer_10_cos_v_neg_g": 0.057730045169591904, + "layer_11_v_norm": 0.6017870903015137, + "layer_11_cos_v_neg_g": 0.06585463136434555, + "layer_12_v_norm": 0.603224515914917, + "layer_12_cos_v_neg_g": 0.08575078099966049, + "block0_q_v_norm": 0.24305696785449982, + "block0_q_cos_v_neg_g": 0.07020294666290283, + "block0_k_v_norm": 0.24426089227199554, + "block0_k_cos_v_neg_g": 0.07851026207208633, + "block0_v_v_norm": 0.2054334580898285, + "block0_v_cos_v_neg_g": 0.02711147628724575, + "block0_o_v_norm": 0.23090441524982452, + "block0_o_cos_v_neg_g": 0.048956576734781265, + "block0_mlp_win_v_norm": 0.26530900597572327, + "block0_mlp_win_cos_v_neg_g": 0.05930284038186073, + "block0_mlp_wout_v_norm": 0.2620120346546173, + "block0_mlp_wout_cos_v_neg_g": 0.08532118797302246, + "block3_q_v_norm": 0.20869460701942444, + "block3_q_cos_v_neg_g": 0.04663529619574547, + "block3_k_v_norm": 0.19104938209056854, + "block3_k_cos_v_neg_g": 0.044313542544841766, + "block3_v_v_norm": 0.1943739801645279, + "block3_v_cos_v_neg_g": 0.029828744009137154, + "block3_o_v_norm": 0.23410944640636444, + "block3_o_cos_v_neg_g": 0.05774465203285217, + "block3_mlp_win_v_norm": 0.26536494493484497, + "block3_mlp_win_cos_v_neg_g": 0.04863811284303665, + "block3_mlp_wout_v_norm": 0.23742656409740448, + "block3_mlp_wout_cos_v_neg_g": 0.09202668070793152, + "block7_q_v_norm": 0.242941752076149, + "block7_q_cos_v_neg_g": 0.05659123510122299, + "block7_k_v_norm": 0.24252234399318695, + "block7_k_cos_v_neg_g": 0.0758700966835022, + "block7_v_v_norm": 0.23637469112873077, + "block7_v_cos_v_neg_g": 0.033232398331165314, + "block7_o_v_norm": 0.24793657660484314, + "block7_o_cos_v_neg_g": 0.07365914434194565, + "block7_mlp_win_v_norm": 0.24080537259578705, + "block7_mlp_win_cos_v_neg_g": 0.07671166211366653, + "block7_mlp_wout_v_norm": 0.2489119917154312, + "block7_mlp_wout_cos_v_neg_g": 0.1268312931060791, + "block11_q_v_norm": 0.24883726239204407, + "block11_q_cos_v_neg_g": 0.0931314006447792, + "block11_k_v_norm": 0.249177485704422, + "block11_k_cos_v_neg_g": 0.09573346376419067, + "block11_v_v_norm": 0.24632219970226288, + "block11_v_cos_v_neg_g": 0.06636718660593033, + "block11_o_v_norm": 0.2491564303636551, + "block11_o_cos_v_neg_g": 0.09616030007600784, + "block11_mlp_win_v_norm": 0.2443448305130005, + "block11_mlp_win_cos_v_neg_g": 0.11725275963544846, + "block11_mlp_wout_v_norm": 0.23937413096427917, + "block11_mlp_wout_cos_v_neg_g": 0.09942039102315903, + "embed_lm_head_sharpness": 0.0005195384728722274, + "layer_1_sharpness": 0.006289876531809568, + "layer_2_sharpness": 0.0014196528354659677, + "layer_3_sharpness": 0.0013050655834376812, + "layer_4_sharpness": 0.0012109862873330712, + "layer_5_sharpness": 0.0011659824522212148, + "layer_6_sharpness": 0.0013956364709883928, + "layer_7_sharpness": 0.001718576648272574, + "layer_8_sharpness": 0.0009220180218107998, + "layer_9_sharpness": 0.0005687488592229784, + "layer_10_sharpness": 0.00041393848368898034, + "layer_11_sharpness": 0.0004336038837209344, + "layer_12_sharpness": 0.00042684029904194176, + "block0_q_sharpness": 0.0008261125185526907, + "block0_k_sharpness": 0.0003791135677602142, + "block0_v_sharpness": 0.011536628007888794, + "block0_o_sharpness": 0.0007145138806663454, + "block0_mlp_win_sharpness": 0.0018037193221971393, + "block0_mlp_wout_sharpness": 0.0015163235366344452, + "block3_q_sharpness": 0.00011764312512241304, + "block3_k_sharpness": 0.0014996384270489216, + "block3_v_sharpness": 0.0024117452558130026, + "block3_o_sharpness": 0.00026970726321451366, + "block3_mlp_win_sharpness": 0.0001597547670826316, + "block3_mlp_wout_sharpness": 0.00012343405978754163, + "block7_q_sharpness": 0.00010861523333005607, + "block7_k_sharpness": 0.00014490897592622787, + "block7_v_sharpness": 0.0017691490938887, + "block7_o_sharpness": 0.00014358361659105867, + "block7_mlp_win_sharpness": 0.0002847746363840997, + "block7_mlp_wout_sharpness": 0.00013815515558235347, + "block11_q_sharpness": 3.338820533826947e-05, + "block11_k_sharpness": 3.661763184936717e-05, + "block11_v_sharpness": 0.00013287468755152076, + "block11_o_sharpness": 7.526405534008518e-05, + "block11_mlp_win_sharpness": 0.0003036965208593756, + "block11_mlp_wout_sharpness": 0.000598943792283535, + "sum_layer_numerators": 0.005802756076380716, + "block_diag_sharpness": 0.001438348425377791, + "cross_layer_sharpness": 0.005134434567938859 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2000.json new file mode 100644 index 0000000000000000000000000000000000000000..5f4a3325c58b89225a3914c0ad8436122c0b0b01 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.400869131088257, + "total_l1_linf_norm": 20543.154296875, + "total_spectral_norm": 2.400869369506836, + "embed_lm_head_update_fnorm": 1.330980896949768, + "embed_lm_head_max_l1_linf_norm": 0.3511328399181366, + "embed_lm_head_max_spectral_norm": 0.2578089237213135, + "layer_1_update_fnorm": 0.5877026319503784, + "layer_1_max_l1_linf_norm": 0.4396933913230896, + "layer_1_max_spectral_norm": 0.012055939994752407, + "layer_2_update_fnorm": 0.5053511261940002, + "layer_2_max_l1_linf_norm": 0.408344566822052, + "layer_2_max_spectral_norm": 0.01205422542989254, + "layer_3_update_fnorm": 0.5124761462211609, + "layer_3_max_l1_linf_norm": 0.398994117975235, + "layer_3_max_spectral_norm": 0.01238157507032156, + "layer_4_update_fnorm": 0.5514675974845886, + "layer_4_max_l1_linf_norm": 0.3948969542980194, + "layer_4_max_spectral_norm": 0.012048427946865559, + "layer_5_update_fnorm": 0.5775600075721741, + "layer_5_max_l1_linf_norm": 0.4059373140335083, + "layer_5_max_spectral_norm": 0.01204871665686369, + "layer_6_update_fnorm": 0.5877916812896729, + "layer_6_max_l1_linf_norm": 0.41144779324531555, + "layer_6_max_spectral_norm": 0.012059487402439117, + "layer_7_update_fnorm": 0.5973922610282898, + "layer_7_max_l1_linf_norm": 0.4144100546836853, + "layer_7_max_spectral_norm": 0.012063565663993359, + "layer_8_update_fnorm": 0.5945393443107605, + "layer_8_max_l1_linf_norm": 0.41105228662490845, + "layer_8_max_spectral_norm": 0.012047238647937775, + "layer_9_update_fnorm": 0.5982463359832764, + "layer_9_max_l1_linf_norm": 0.4208625555038452, + "layer_9_max_spectral_norm": 0.012041452340781689, + "layer_10_update_fnorm": 0.5967984199523926, + "layer_10_max_l1_linf_norm": 0.4118824899196625, + "layer_10_max_spectral_norm": 0.012039394117891788, + "layer_11_update_fnorm": 0.5986903309822083, + "layer_11_max_l1_linf_norm": 0.4084416925907135, + "layer_11_max_spectral_norm": 0.012048066593706608, + "layer_12_update_fnorm": 0.6026729345321655, + "layer_12_max_l1_linf_norm": 0.3972029387950897, + "layer_12_max_spectral_norm": 0.012045378796756268, + "block0_q_update_fnorm": 0.24466176331043243, + "block0_q_max_l1_linf_norm": 0.2058471441268921, + "block0_q_max_spectral_norm": 0.012044782750308514, + "block0_k_update_fnorm": 0.24559040367603302, + "block0_k_max_l1_linf_norm": 0.20703202486038208, + "block0_k_max_spectral_norm": 0.012044252827763557, + "block0_v_update_fnorm": 0.17014040052890778, + "block0_v_max_l1_linf_norm": 0.18183991312980652, + "block0_v_max_spectral_norm": 0.012029641307890415, + "block0_o_update_fnorm": 0.22896867990493774, + "block0_o_max_l1_linf_norm": 0.19292622804641724, + "block0_o_max_spectral_norm": 0.012041092850267887, + "block0_mlp_win_update_fnorm": 0.27146655321121216, + "block0_mlp_win_max_l1_linf_norm": 0.15774263441562653, + "block0_mlp_win_max_spectral_norm": 0.012055939994752407, + "block0_mlp_wout_update_fnorm": 0.26466256380081177, + "block0_mlp_wout_max_l1_linf_norm": 0.4396933913230896, + "block0_mlp_wout_max_spectral_norm": 0.01204420905560255, + "block3_q_update_fnorm": 0.2130843549966812, + "block3_q_max_l1_linf_norm": 0.21135003864765167, + "block3_q_max_spectral_norm": 0.01203601248562336, + "block3_k_update_fnorm": 0.1985660046339035, + "block3_k_max_l1_linf_norm": 0.2084316611289978, + "block3_k_max_spectral_norm": 0.012040836736559868, + "block3_v_update_fnorm": 0.18799063563346863, + "block3_v_max_l1_linf_norm": 0.2008262574672699, + "block3_v_max_spectral_norm": 0.012031667865812778, + "block3_o_update_fnorm": 0.23569057881832123, + "block3_o_max_l1_linf_norm": 0.19717110693454742, + "block3_o_max_spectral_norm": 0.01204376108944416, + "block3_mlp_win_update_fnorm": 0.26573047041893005, + "block3_mlp_win_max_l1_linf_norm": 0.18651066720485687, + "block3_mlp_win_max_spectral_norm": 0.012048427946865559, + "block3_mlp_wout_update_fnorm": 0.24006451666355133, + "block3_mlp_wout_max_l1_linf_norm": 0.3948969542980194, + "block3_mlp_wout_max_spectral_norm": 0.011392604559659958, + "block7_q_update_fnorm": 0.24227537214756012, + "block7_q_max_l1_linf_norm": 0.20765134692192078, + "block7_q_max_spectral_norm": 0.012047238647937775, + "block7_k_update_fnorm": 0.24421803653240204, + "block7_k_max_l1_linf_norm": 0.21191748976707458, + "block7_k_max_spectral_norm": 0.012043885886669159, + "block7_v_update_fnorm": 0.22622767090797424, + "block7_v_max_l1_linf_norm": 0.20764809846878052, + "block7_v_max_spectral_norm": 0.01204109936952591, + "block7_o_update_fnorm": 0.24768514931201935, + "block7_o_max_l1_linf_norm": 0.20885923504829407, + "block7_o_max_spectral_norm": 0.012044530361890793, + "block7_mlp_win_update_fnorm": 0.2470664530992508, + "block7_mlp_win_max_l1_linf_norm": 0.15612539649009705, + "block7_mlp_win_max_spectral_norm": 0.012042182497680187, + "block7_mlp_wout_update_fnorm": 0.24787583947181702, + "block7_mlp_wout_max_l1_linf_norm": 0.41105228662490845, + "block7_mlp_wout_max_spectral_norm": 0.011388057842850685, + "block11_q_update_fnorm": 0.2494693100452423, + "block11_q_max_l1_linf_norm": 0.2122918665409088, + "block11_q_max_spectral_norm": 0.01204388216137886, + "block11_k_update_fnorm": 0.24966992437839508, + "block11_k_max_l1_linf_norm": 0.21147917211055756, + "block11_k_max_spectral_norm": 0.012043172493577003, + "block11_v_update_fnorm": 0.2463267743587494, + "block11_v_max_l1_linf_norm": 0.20933623611927032, + "block11_v_max_spectral_norm": 0.012043865397572517, + "block11_o_update_fnorm": 0.24896256625652313, + "block11_o_max_l1_linf_norm": 0.2104058861732483, + "block11_o_max_spectral_norm": 0.012045378796756268, + "block11_mlp_win_update_fnorm": 0.24274064600467682, + "block11_mlp_win_max_l1_linf_norm": 0.15107296407222748, + "block11_mlp_win_max_spectral_norm": 0.011390081606805325, + "block11_mlp_wout_update_fnorm": 0.23859663307666779, + "block11_mlp_wout_max_l1_linf_norm": 0.3972029387950897, + "block11_mlp_wout_max_spectral_norm": 0.01139793824404478, + "total_sharpness": 0.008680427446961403, + "block_total_sharpness": 0.011162047274410725, + "v_norm_block": 1.9981650114059448, + "v_T_H_v_block": 0.04456629604101181, + "v_norm": 2.400869131088257, + "ip_v_neg_g_hvp": 0.06352865695953369, + "cos_v_neg_g_hvp": 0.03994547575712204, + "g_hvp_norm": 0.6624202132225037, + "ip_v_neg_g_t": 0.06378669291734695, + "cos_v_neg_g_t": 0.04545082151889801, + "g_t_norm": 0.5845475792884827, + "g_norm": 0.6624202132225037, + "hv_norm": 0.9376031160354614, + "cos_v_hv": 0.022227497771382332, + "hg_norm": 59.82093811035156, + "cos_g_hg": 0.4477449953556061, + "v_parallel_norm": 0.005970210302621126, + "v_perp_norm": 2.4008617401123047, + "embed_lm_head_v_norm": 1.330980896949768, + "embed_lm_head_cos_v_neg_g": 0.08007574081420898, + "layer_1_v_norm": 0.5877026319503784, + "layer_1_cos_v_neg_g": 0.026950612664222717, + "layer_2_v_norm": 0.5053511261940002, + "layer_2_cos_v_neg_g": 0.033472225069999695, + "layer_3_v_norm": 0.5124761462211609, + "layer_3_cos_v_neg_g": 0.03742719069123268, + "layer_4_v_norm": 0.5514675974845886, + "layer_4_cos_v_neg_g": 0.041308242827653885, + "layer_5_v_norm": 0.5775600075721741, + "layer_5_cos_v_neg_g": 0.045585375279188156, + "layer_6_v_norm": 0.5877916812896729, + "layer_6_cos_v_neg_g": 0.044855330139398575, + "layer_7_v_norm": 0.5973922610282898, + "layer_7_cos_v_neg_g": 0.04693763703107834, + "layer_8_v_norm": 0.5945392847061157, + "layer_8_cos_v_neg_g": 0.04662218317389488, + "layer_9_v_norm": 0.5982463359832764, + "layer_9_cos_v_neg_g": 0.04948993772268295, + "layer_10_v_norm": 0.5967984199523926, + "layer_10_cos_v_neg_g": 0.052060581743717194, + "layer_11_v_norm": 0.5986903309822083, + "layer_11_cos_v_neg_g": 0.06295601278543472, + "layer_12_v_norm": 0.6026729345321655, + "layer_12_cos_v_neg_g": 0.08059154450893402, + "block0_q_v_norm": 0.24466176331043243, + "block0_q_cos_v_neg_g": 0.06432078778743744, + "block0_k_v_norm": 0.24559040367603302, + "block0_k_cos_v_neg_g": 0.06955913454294205, + "block0_v_v_norm": 0.17014040052890778, + "block0_v_cos_v_neg_g": 0.028215810656547546, + "block0_o_v_norm": 0.22896867990493774, + "block0_o_cos_v_neg_g": 0.05539291724562645, + "block0_mlp_win_v_norm": 0.27146655321121216, + "block0_mlp_win_cos_v_neg_g": 0.05842820927500725, + "block0_mlp_wout_v_norm": 0.26466256380081177, + "block0_mlp_wout_cos_v_neg_g": 0.07696279138326645, + "block3_q_v_norm": 0.2130843549966812, + "block3_q_cos_v_neg_g": 0.05573688820004463, + "block3_k_v_norm": 0.1985660046339035, + "block3_k_cos_v_neg_g": 0.0708124116063118, + "block3_v_v_norm": 0.18799063563346863, + "block3_v_cos_v_neg_g": 0.03738468512892723, + "block3_o_v_norm": 0.23569057881832123, + "block3_o_cos_v_neg_g": 0.07369240373373032, + "block3_mlp_win_v_norm": 0.26573047041893005, + "block3_mlp_win_cos_v_neg_g": 0.053057022392749786, + "block3_mlp_wout_v_norm": 0.24006451666355133, + "block3_mlp_wout_cos_v_neg_g": 0.10699395835399628, + "block7_q_v_norm": 0.24227537214756012, + "block7_q_cos_v_neg_g": 0.06768925487995148, + "block7_k_v_norm": 0.24421803653240204, + "block7_k_cos_v_neg_g": 0.09564699977636337, + "block7_v_v_norm": 0.22622767090797424, + "block7_v_cos_v_neg_g": 0.04080040380358696, + "block7_o_v_norm": 0.24768514931201935, + "block7_o_cos_v_neg_g": 0.08140574395656586, + "block7_mlp_win_v_norm": 0.2470664530992508, + "block7_mlp_win_cos_v_neg_g": 0.07350101321935654, + "block7_mlp_wout_v_norm": 0.24787583947181702, + "block7_mlp_wout_cos_v_neg_g": 0.13818252086639404, + "block11_q_v_norm": 0.2494693100452423, + "block11_q_cos_v_neg_g": 0.10216893255710602, + "block11_k_v_norm": 0.24966992437839508, + "block11_k_cos_v_neg_g": 0.1106676235795021, + "block11_v_v_norm": 0.2463267743587494, + "block11_v_cos_v_neg_g": 0.07573333382606506, + "block11_o_v_norm": 0.24896256625652313, + "block11_o_cos_v_neg_g": 0.10161705315113068, + "block11_mlp_win_v_norm": 0.24274064600467682, + "block11_mlp_win_cos_v_neg_g": 0.11204981803894043, + "block11_mlp_wout_v_norm": 0.23859663307666779, + "block11_mlp_wout_cos_v_neg_g": 0.08613083511590958, + "embed_lm_head_sharpness": 0.0005301825585775077, + "layer_1_sharpness": 0.009169661439955235, + "layer_2_sharpness": 0.002475456101819873, + "layer_3_sharpness": 0.002220466732978821, + "layer_4_sharpness": 0.0021192869171500206, + "layer_5_sharpness": 0.0016243787249550223, + "layer_6_sharpness": 0.001933154882863164, + "layer_7_sharpness": 0.00182132248301059, + "layer_8_sharpness": 0.0017987940227612853, + "layer_9_sharpness": 0.0009163285722024739, + "layer_10_sharpness": 0.0005989281344227493, + "layer_11_sharpness": 0.0006099711754359305, + "layer_12_sharpness": 0.0007750692893750966, + "block0_q_sharpness": 0.0016609721351414919, + "block0_k_sharpness": 0.0007350678206421435, + "block0_v_sharpness": 0.022238515317440033, + "block0_o_sharpness": 0.0014275475405156612, + "block0_mlp_win_sharpness": 0.0019311226205900311, + "block0_mlp_wout_sharpness": 0.0016539368079975247, + "block3_q_sharpness": 0.00018513134273234755, + "block3_k_sharpness": 0.002005932154133916, + "block3_v_sharpness": 0.003826978849247098, + "block3_o_sharpness": 0.00038629889604635537, + "block3_mlp_win_sharpness": 0.00023247524222824723, + "block3_mlp_wout_sharpness": 0.00014506178558804095, + "block7_q_sharpness": 0.00018301373347640038, + "block7_k_sharpness": 0.00021179114992264658, + "block7_v_sharpness": 0.0036208080127835274, + "block7_o_sharpness": 0.00018285101396031678, + "block7_mlp_win_sharpness": 0.0005499640246853232, + "block7_mlp_wout_sharpness": 0.00013971827866043895, + "block11_q_sharpness": 4.004669972346164e-05, + "block11_k_sharpness": 5.162557135918178e-05, + "block11_v_sharpness": 0.0002053446223726496, + "block11_o_sharpness": 6.932741962373257e-05, + "block11_mlp_win_sharpness": 0.000499737448990345, + "block11_mlp_wout_sharpness": 0.0010296982945874333, + "sum_layer_numerators": 0.008564004396477679, + "block_diag_sharpness": 0.002144935225195208, + "cross_layer_sharpness": 0.009017112049215516 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2500.json new file mode 100644 index 0000000000000000000000000000000000000000..5ad222f3ddafc98c55b4a8ae7f9b267c7d73332e --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_2500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.393455982208252, + "total_l1_linf_norm": 20475.61328125, + "total_spectral_norm": 2.393455982208252, + "embed_lm_head_update_fnorm": 1.3301358222961426, + "embed_lm_head_max_l1_linf_norm": 0.3439089059829712, + "embed_lm_head_max_spectral_norm": 0.2267378866672516, + "layer_1_update_fnorm": 0.5871920585632324, + "layer_1_max_l1_linf_norm": 0.4459496736526489, + "layer_1_max_spectral_norm": 0.012051813304424286, + "layer_2_update_fnorm": 0.5024411082267761, + "layer_2_max_l1_linf_norm": 0.4133606553077698, + "layer_2_max_spectral_norm": 0.012061826884746552, + "layer_3_update_fnorm": 0.4804835319519043, + "layer_3_max_l1_linf_norm": 0.39694803953170776, + "layer_3_max_spectral_norm": 0.013146850280463696, + "layer_4_update_fnorm": 0.5510693192481995, + "layer_4_max_l1_linf_norm": 0.3978492021560669, + "layer_4_max_spectral_norm": 0.012043974362313747, + "layer_5_update_fnorm": 0.5802663564682007, + "layer_5_max_l1_linf_norm": 0.4129101037979126, + "layer_5_max_spectral_norm": 0.012045422568917274, + "layer_6_update_fnorm": 0.5908129215240479, + "layer_6_max_l1_linf_norm": 0.41210636496543884, + "layer_6_max_spectral_norm": 0.012048449367284775, + "layer_7_update_fnorm": 0.596086323261261, + "layer_7_max_l1_linf_norm": 0.4126014709472656, + "layer_7_max_spectral_norm": 0.01206184457987547, + "layer_8_update_fnorm": 0.5952334403991699, + "layer_8_max_l1_linf_norm": 0.41562899947166443, + "layer_8_max_spectral_norm": 0.012063043192029, + "layer_9_update_fnorm": 0.5974695682525635, + "layer_9_max_l1_linf_norm": 0.41182902455329895, + "layer_9_max_spectral_norm": 0.012046164833009243, + "layer_10_update_fnorm": 0.5970394015312195, + "layer_10_max_l1_linf_norm": 0.41347840428352356, + "layer_10_max_spectral_norm": 0.012044692412018776, + "layer_11_update_fnorm": 0.5958213806152344, + "layer_11_max_l1_linf_norm": 0.4072948694229126, + "layer_11_max_spectral_norm": 0.01204412430524826, + "layer_12_update_fnorm": 0.6031308174133301, + "layer_12_max_l1_linf_norm": 0.39590978622436523, + "layer_12_max_spectral_norm": 0.012045596726238728, + "block0_q_update_fnorm": 0.24481801688671112, + "block0_q_max_l1_linf_norm": 0.20964859426021576, + "block0_q_max_spectral_norm": 0.012045005336403847, + "block0_k_update_fnorm": 0.24448013305664062, + "block0_k_max_l1_linf_norm": 0.21171912550926208, + "block0_k_max_spectral_norm": 0.012042529881000519, + "block0_v_update_fnorm": 0.1655925065279007, + "block0_v_max_l1_linf_norm": 0.1910582035779953, + "block0_v_max_spectral_norm": 0.012030706740915775, + "block0_o_update_fnorm": 0.22786062955856323, + "block0_o_max_l1_linf_norm": 0.19177016615867615, + "block0_o_max_spectral_norm": 0.012042887508869171, + "block0_mlp_win_update_fnorm": 0.27372679114341736, + "block0_mlp_win_max_l1_linf_norm": 0.16219347715377808, + "block0_mlp_win_max_spectral_norm": 0.012051813304424286, + "block0_mlp_wout_update_fnorm": 0.2659493088722229, + "block0_mlp_wout_max_l1_linf_norm": 0.4459496736526489, + "block0_mlp_wout_max_spectral_norm": 0.012042355723679066, + "block3_q_update_fnorm": 0.21027693152427673, + "block3_q_max_l1_linf_norm": 0.20855413377285004, + "block3_q_max_spectral_norm": 0.012038210406899452, + "block3_k_update_fnorm": 0.20370294153690338, + "block3_k_max_l1_linf_norm": 0.21225735545158386, + "block3_k_max_spectral_norm": 0.012043974362313747, + "block3_v_update_fnorm": 0.18326720595359802, + "block3_v_max_l1_linf_norm": 0.20075026154518127, + "block3_v_max_spectral_norm": 0.01203254796564579, + "block3_o_update_fnorm": 0.2362114042043686, + "block3_o_max_l1_linf_norm": 0.20055338740348816, + "block3_o_max_spectral_norm": 0.012037514708936214, + "block3_mlp_win_update_fnorm": 0.26454076170921326, + "block3_mlp_win_max_l1_linf_norm": 0.19461257755756378, + "block3_mlp_win_max_spectral_norm": 0.012043518014252186, + "block3_mlp_wout_update_fnorm": 0.24170900881290436, + "block3_mlp_wout_max_l1_linf_norm": 0.3977300226688385, + "block3_mlp_wout_max_spectral_norm": 0.011409125290811062, + "block7_q_update_fnorm": 0.2417549192905426, + "block7_q_max_l1_linf_norm": 0.2081369012594223, + "block7_q_max_spectral_norm": 0.0120443906635046, + "block7_k_update_fnorm": 0.24558331072330475, + "block7_k_max_l1_linf_norm": 0.21051180362701416, + "block7_k_max_spectral_norm": 0.012040527537465096, + "block7_v_update_fnorm": 0.22192633152008057, + "block7_v_max_l1_linf_norm": 0.20902737975120544, + "block7_v_max_spectral_norm": 0.012039508670568466, + "block7_o_update_fnorm": 0.24760867655277252, + "block7_o_max_l1_linf_norm": 0.20835624635219574, + "block7_o_max_spectral_norm": 0.012043993920087814, + "block7_mlp_win_update_fnorm": 0.2518932521343231, + "block7_mlp_win_max_l1_linf_norm": 0.1576738953590393, + "block7_mlp_win_max_spectral_norm": 0.012063043192029, + "block7_mlp_wout_update_fnorm": 0.24781396985054016, + "block7_mlp_wout_max_l1_linf_norm": 0.41562899947166443, + "block7_mlp_wout_max_spectral_norm": 0.011377787217497826, + "block11_q_update_fnorm": 0.24922341108322144, + "block11_q_max_l1_linf_norm": 0.21168015897274017, + "block11_q_max_spectral_norm": 0.012036333791911602, + "block11_k_update_fnorm": 0.25014108419418335, + "block11_k_max_l1_linf_norm": 0.21145117282867432, + "block11_k_max_spectral_norm": 0.012042838148772717, + "block11_v_update_fnorm": 0.2463545799255371, + "block11_v_max_l1_linf_norm": 0.20816683769226074, + "block11_v_max_spectral_norm": 0.01204462256282568, + "block11_o_update_fnorm": 0.2491159439086914, + "block11_o_max_l1_linf_norm": 0.2081158310174942, + "block11_o_max_spectral_norm": 0.012045596726238728, + "block11_mlp_win_update_fnorm": 0.24440819025039673, + "block11_mlp_win_max_l1_linf_norm": 0.16003260016441345, + "block11_mlp_win_max_spectral_norm": 0.011371039785444736, + "block11_mlp_wout_update_fnorm": 0.2375744730234146, + "block11_mlp_wout_max_l1_linf_norm": 0.393539696931839, + "block11_mlp_wout_max_spectral_norm": 0.011368012055754662, + "total_sharpness": 0.006429749075323343, + "block_total_sharpness": 0.00821381714195013, + "v_norm_block": 1.989816665649414, + "v_T_H_v_block": 0.03252154588699341, + "v_norm": 2.393455982208252, + "ip_v_neg_g_hvp": 0.060079701244831085, + "cos_v_neg_g_hvp": 0.03968821465969086, + "g_hvp_norm": 0.6324712038040161, + "ip_v_neg_g_t": 0.06063782796263695, + "cos_v_neg_g_t": 0.04456779733300209, + "g_t_norm": 0.56845623254776, + "g_norm": 0.6324712038040161, + "hv_norm": 0.7685278654098511, + "cos_v_hv": 0.020024416968226433, + "hg_norm": 183.44529724121094, + "cos_g_hg": 0.0025046905502676964, + "v_parallel_norm": 0.006083470769226551, + "v_perp_norm": 2.3934483528137207, + "embed_lm_head_v_norm": 1.3301358222961426, + "embed_lm_head_cos_v_neg_g": 0.08245643228292465, + "layer_1_v_norm": 0.5871920585632324, + "layer_1_cos_v_neg_g": 0.029518865048885345, + "layer_2_v_norm": 0.5024411082267761, + "layer_2_cos_v_neg_g": 0.029169008135795593, + "layer_3_v_norm": 0.4804835021495819, + "layer_3_cos_v_neg_g": 0.03228950500488281, + "layer_4_v_norm": 0.5510693192481995, + "layer_4_cos_v_neg_g": 0.03581393510103226, + "layer_5_v_norm": 0.5802663564682007, + "layer_5_cos_v_neg_g": 0.04195166379213333, + "layer_6_v_norm": 0.5908129215240479, + "layer_6_cos_v_neg_g": 0.04173259064555168, + "layer_7_v_norm": 0.596086323261261, + "layer_7_cos_v_neg_g": 0.041582901030778885, + "layer_8_v_norm": 0.5952334403991699, + "layer_8_cos_v_neg_g": 0.039766404777765274, + "layer_9_v_norm": 0.5974695682525635, + "layer_9_cos_v_neg_g": 0.04300565645098686, + "layer_10_v_norm": 0.5970394015312195, + "layer_10_cos_v_neg_g": 0.045247916132211685, + "layer_11_v_norm": 0.5958214402198792, + "layer_11_cos_v_neg_g": 0.055842265486717224, + "layer_12_v_norm": 0.6031308174133301, + "layer_12_cos_v_neg_g": 0.09065498411655426, + "block0_q_v_norm": 0.24481801688671112, + "block0_q_cos_v_neg_g": 0.06586674600839615, + "block0_k_v_norm": 0.24448013305664062, + "block0_k_cos_v_neg_g": 0.06537078320980072, + "block0_v_v_norm": 0.1655925065279007, + "block0_v_cos_v_neg_g": 0.03332863375544548, + "block0_o_v_norm": 0.22786062955856323, + "block0_o_cos_v_neg_g": 0.05519820749759674, + "block0_mlp_win_v_norm": 0.27372679114341736, + "block0_mlp_win_cos_v_neg_g": 0.0600140318274498, + "block0_mlp_wout_v_norm": 0.2659493088722229, + "block0_mlp_wout_cos_v_neg_g": 0.07792191952466965, + "block3_q_v_norm": 0.21027693152427673, + "block3_q_cos_v_neg_g": 0.040770918130874634, + "block3_k_v_norm": 0.20370294153690338, + "block3_k_cos_v_neg_g": 0.05114148184657097, + "block3_v_v_norm": 0.18326720595359802, + "block3_v_cos_v_neg_g": 0.03729059919714928, + "block3_o_v_norm": 0.2362114042043686, + "block3_o_cos_v_neg_g": 0.07228736579418182, + "block3_mlp_win_v_norm": 0.26454076170921326, + "block3_mlp_win_cos_v_neg_g": 0.04642297327518463, + "block3_mlp_wout_v_norm": 0.24170900881290436, + "block3_mlp_wout_cos_v_neg_g": 0.10964369773864746, + "block7_q_v_norm": 0.2417549192905426, + "block7_q_cos_v_neg_g": 0.05595948174595833, + "block7_k_v_norm": 0.24558331072330475, + "block7_k_cos_v_neg_g": 0.09676825255155563, + "block7_v_v_norm": 0.22192633152008057, + "block7_v_cos_v_neg_g": 0.039317164570093155, + "block7_o_v_norm": 0.24760867655277252, + "block7_o_cos_v_neg_g": 0.07628536969423294, + "block7_mlp_win_v_norm": 0.2518932521343231, + "block7_mlp_win_cos_v_neg_g": 0.05982857942581177, + "block7_mlp_wout_v_norm": 0.24781396985054016, + "block7_mlp_wout_cos_v_neg_g": 0.13205654919147491, + "block11_q_v_norm": 0.24922341108322144, + "block11_q_cos_v_neg_g": 0.09322081506252289, + "block11_k_v_norm": 0.25014108419418335, + "block11_k_cos_v_neg_g": 0.10639774054288864, + "block11_v_v_norm": 0.2463545799255371, + "block11_v_cos_v_neg_g": 0.07012350112199783, + "block11_o_v_norm": 0.2491159439086914, + "block11_o_cos_v_neg_g": 0.10499367117881775, + "block11_mlp_win_v_norm": 0.24440819025039673, + "block11_mlp_win_cos_v_neg_g": 0.12568891048431396, + "block11_mlp_wout_v_norm": 0.2375744730234146, + "block11_mlp_wout_cos_v_neg_g": 0.10136295109987259, + "embed_lm_head_sharpness": 0.0004775479028467089, + "layer_1_sharpness": 0.006480096839368343, + "layer_2_sharpness": 0.0008718983735889196, + "layer_3_sharpness": 0.0016932609723880887, + "layer_4_sharpness": 0.0012988535454496741, + "layer_5_sharpness": 0.0011554654920473695, + "layer_6_sharpness": 0.001458584563806653, + "layer_7_sharpness": 0.0016904865624383092, + "layer_8_sharpness": 0.0015702680684626102, + "layer_9_sharpness": 0.0008761159842833877, + "layer_10_sharpness": 0.0005777078331448138, + "layer_11_sharpness": 0.0005667311488650739, + "layer_12_sharpness": 0.0004635690711438656, + "block0_q_sharpness": 0.00034114447771571577, + "block0_k_sharpness": 0.0001232004287885502, + "block0_v_sharpness": 0.024085422977805138, + "block0_o_sharpness": 0.0009448983473703265, + "block0_mlp_win_sharpness": 0.0016348535427823663, + "block0_mlp_wout_sharpness": 0.0010663534048944712, + "block3_q_sharpness": 6.311521428870037e-05, + "block3_k_sharpness": 0.00067943922476843, + "block3_v_sharpness": 0.003951343707740307, + "block3_o_sharpness": 0.00024710127036087215, + "block3_mlp_win_sharpness": 0.0001489155547460541, + "block3_mlp_wout_sharpness": 8.793871529633179e-05, + "block7_q_sharpness": 0.00010488618863746524, + "block7_k_sharpness": 0.00013856733858119696, + "block7_v_sharpness": 0.003905664663761854, + "block7_o_sharpness": 0.00012589093239512295, + "block7_mlp_win_sharpness": 0.0004243646690156311, + "block7_mlp_wout_sharpness": 0.00011014060874003917, + "block11_q_sharpness": 3.4917928132927045e-05, + "block11_k_sharpness": 4.640463521354832e-05, + "block11_v_sharpness": 0.00018277343770023435, + "block11_o_sharpness": 6.260881491471082e-05, + "block11_mlp_win_sharpness": 0.0004104121762793511, + "block11_mlp_wout_sharpness": 0.0005159465945325792, + "sum_layer_numerators": 0.006183454384039718, + "block_diag_sharpness": 0.0015617266932100588, + "cross_layer_sharpness": 0.006652090448740071 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3000.json new file mode 100644 index 0000000000000000000000000000000000000000..dc504bc564d8608a9153ac06e6e210209006ea12 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3826518058776855, + "total_l1_linf_norm": 20359.701171875, + "total_spectral_norm": 2.3826515674591064, + "embed_lm_head_update_fnorm": 1.3313124179840088, + "embed_lm_head_max_l1_linf_norm": 0.3588330149650574, + "embed_lm_head_max_spectral_norm": 0.21048572659492493, + "layer_1_update_fnorm": 0.5849788188934326, + "layer_1_max_l1_linf_norm": 0.4434426426887512, + "layer_1_max_spectral_norm": 0.012049746699631214, + "layer_2_update_fnorm": 0.4747047424316406, + "layer_2_max_l1_linf_norm": 0.4170493483543396, + "layer_2_max_spectral_norm": 0.012066812254488468, + "layer_3_update_fnorm": 0.44750335812568665, + "layer_3_max_l1_linf_norm": 0.4140755534172058, + "layer_3_max_spectral_norm": 0.01328236423432827, + "layer_4_update_fnorm": 0.5519772171974182, + "layer_4_max_l1_linf_norm": 0.39955782890319824, + "layer_4_max_spectral_norm": 0.012043967843055725, + "layer_5_update_fnorm": 0.5818566083908081, + "layer_5_max_l1_linf_norm": 0.4066111147403717, + "layer_5_max_spectral_norm": 0.012045645155012608, + "layer_6_update_fnorm": 0.5915240049362183, + "layer_6_max_l1_linf_norm": 0.411673367023468, + "layer_6_max_spectral_norm": 0.012050646357238293, + "layer_7_update_fnorm": 0.5983068346977234, + "layer_7_max_l1_linf_norm": 0.4095476269721985, + "layer_7_max_spectral_norm": 0.012047488242387772, + "layer_8_update_fnorm": 0.5978111028671265, + "layer_8_max_l1_linf_norm": 0.41115304827690125, + "layer_8_max_spectral_norm": 0.012065298855304718, + "layer_9_update_fnorm": 0.5973736643791199, + "layer_9_max_l1_linf_norm": 0.41559261083602905, + "layer_9_max_spectral_norm": 0.012045549228787422, + "layer_10_update_fnorm": 0.5961804986000061, + "layer_10_max_l1_linf_norm": 0.41206902265548706, + "layer_10_max_spectral_norm": 0.012044789269566536, + "layer_11_update_fnorm": 0.5936069488525391, + "layer_11_max_l1_linf_norm": 0.4082704782485962, + "layer_11_max_spectral_norm": 0.012041904032230377, + "layer_12_update_fnorm": 0.6030669808387756, + "layer_12_max_l1_linf_norm": 0.3988605737686157, + "layer_12_max_spectral_norm": 0.012043245136737823, + "block0_q_update_fnorm": 0.24343878030776978, + "block0_q_max_l1_linf_norm": 0.20673342049121857, + "block0_q_max_spectral_norm": 0.012042499147355556, + "block0_k_update_fnorm": 0.24321800470352173, + "block0_k_max_l1_linf_norm": 0.20725354552268982, + "block0_k_max_spectral_norm": 0.012039841152727604, + "block0_v_update_fnorm": 0.16167621314525604, + "block0_v_max_l1_linf_norm": 0.20185787975788116, + "block0_v_max_spectral_norm": 0.012029631994664669, + "block0_o_update_fnorm": 0.2251719832420349, + "block0_o_max_l1_linf_norm": 0.19054001569747925, + "block0_o_max_spectral_norm": 0.012043123133480549, + "block0_mlp_win_update_fnorm": 0.2742367386817932, + "block0_mlp_win_max_l1_linf_norm": 0.16596153378486633, + "block0_mlp_win_max_spectral_norm": 0.012049746699631214, + "block0_mlp_wout_update_fnorm": 0.26761743426322937, + "block0_mlp_wout_max_l1_linf_norm": 0.4434426426887512, + "block0_mlp_wout_max_spectral_norm": 0.012045294046401978, + "block3_q_update_fnorm": 0.2121352255344391, + "block3_q_max_l1_linf_norm": 0.21232986450195312, + "block3_q_max_spectral_norm": 0.012043658643960953, + "block3_k_update_fnorm": 0.20488668978214264, + "block3_k_max_l1_linf_norm": 0.21002335846424103, + "block3_k_max_spectral_norm": 0.01203982625156641, + "block3_v_update_fnorm": 0.18136993050575256, + "block3_v_max_l1_linf_norm": 0.20135298371315002, + "block3_v_max_spectral_norm": 0.012031608261168003, + "block3_o_update_fnorm": 0.23627832531929016, + "block3_o_max_l1_linf_norm": 0.19779758155345917, + "block3_o_max_spectral_norm": 0.012037829495966434, + "block3_mlp_win_update_fnorm": 0.2652098536491394, + "block3_mlp_win_max_l1_linf_norm": 0.18707340955734253, + "block3_mlp_win_max_spectral_norm": 0.012043967843055725, + "block3_mlp_wout_update_fnorm": 0.24178917706012726, + "block3_mlp_wout_max_l1_linf_norm": 0.39955782890319824, + "block3_mlp_wout_max_spectral_norm": 0.011393877677619457, + "block7_q_update_fnorm": 0.24165956676006317, + "block7_q_max_l1_linf_norm": 0.20949484407901764, + "block7_q_max_spectral_norm": 0.012045142240822315, + "block7_k_update_fnorm": 0.24527819454669952, + "block7_k_max_l1_linf_norm": 0.20873835682868958, + "block7_k_max_spectral_norm": 0.012042389251291752, + "block7_v_update_fnorm": 0.2237352728843689, + "block7_v_max_l1_linf_norm": 0.21456842124462128, + "block7_v_max_spectral_norm": 0.012033806182444096, + "block7_o_update_fnorm": 0.24786721169948578, + "block7_o_max_l1_linf_norm": 0.20773619413375854, + "block7_o_max_spectral_norm": 0.012047111056745052, + "block7_mlp_win_update_fnorm": 0.25647658109664917, + "block7_mlp_win_max_l1_linf_norm": 0.15209972858428955, + "block7_mlp_win_max_spectral_norm": 0.012065298855304718, + "block7_mlp_wout_update_fnorm": 0.24783584475517273, + "block7_mlp_wout_max_l1_linf_norm": 0.41115304827690125, + "block7_mlp_wout_max_spectral_norm": 0.011378638446331024, + "block11_q_update_fnorm": 0.2490771859884262, + "block11_q_max_l1_linf_norm": 0.20959007740020752, + "block11_q_max_spectral_norm": 0.012039372697472572, + "block11_k_update_fnorm": 0.2500998377799988, + "block11_k_max_l1_linf_norm": 0.2118077427148819, + "block11_k_max_spectral_norm": 0.012043245136737823, + "block11_v_update_fnorm": 0.24618761241436005, + "block11_v_max_l1_linf_norm": 0.2066621482372284, + "block11_v_max_spectral_norm": 0.01204210426658392, + "block11_o_update_fnorm": 0.24935778975486755, + "block11_o_max_l1_linf_norm": 0.20865270495414734, + "block11_o_max_spectral_norm": 0.012041693553328514, + "block11_mlp_win_update_fnorm": 0.24450986087322235, + "block11_mlp_win_max_l1_linf_norm": 0.1608329564332962, + "block11_mlp_win_max_spectral_norm": 0.011373396031558514, + "block11_mlp_wout_update_fnorm": 0.23743467032909393, + "block11_mlp_wout_max_l1_linf_norm": 0.3988605737686157, + "block11_mlp_wout_max_spectral_norm": 0.011374362744390965, + "total_sharpness": 0.0052713616751134396, + "block_total_sharpness": 0.006795718800276518, + "v_norm_block": 1.9760149717330933, + "v_T_H_v_block": 0.026534801349043846, + "v_norm": 2.3826518058776855, + "ip_v_neg_g_hvp": 0.05363619700074196, + "cos_v_neg_g_hvp": 0.03583681955933571, + "g_hvp_norm": 0.6281566023826599, + "ip_v_neg_g_t": 0.05411059409379959, + "cos_v_neg_g_t": 0.04185115545988083, + "g_t_norm": 0.5426430702209473, + "g_norm": 0.6281566023826599, + "hv_norm": 0.9005032181739807, + "cos_v_hv": 0.013947558589279652, + "hg_norm": 59.66423416137695, + "cos_g_hg": 0.31472542881965637, + "v_parallel_norm": 0.005813411436975002, + "v_perp_norm": 2.3826448917388916, + "embed_lm_head_v_norm": 1.3313124179840088, + "embed_lm_head_cos_v_neg_g": 0.07037018239498138, + "layer_1_v_norm": 0.5849788188934326, + "layer_1_cos_v_neg_g": 0.027291812002658844, + "layer_2_v_norm": 0.4747047424316406, + "layer_2_cos_v_neg_g": 0.03252016380429268, + "layer_3_v_norm": 0.44750332832336426, + "layer_3_cos_v_neg_g": 0.030784403905272484, + "layer_4_v_norm": 0.5519772171974182, + "layer_4_cos_v_neg_g": 0.033370498567819595, + "layer_5_v_norm": 0.5818566083908081, + "layer_5_cos_v_neg_g": 0.037562236189842224, + "layer_6_v_norm": 0.5915240049362183, + "layer_6_cos_v_neg_g": 0.035980671644210815, + "layer_7_v_norm": 0.5983068346977234, + "layer_7_cos_v_neg_g": 0.036668263375759125, + "layer_8_v_norm": 0.5978111028671265, + "layer_8_cos_v_neg_g": 0.0371953547000885, + "layer_9_v_norm": 0.5973736643791199, + "layer_9_cos_v_neg_g": 0.040506716817617416, + "layer_10_v_norm": 0.5961804986000061, + "layer_10_cos_v_neg_g": 0.043611079454422, + "layer_11_v_norm": 0.5936070084571838, + "layer_11_cos_v_neg_g": 0.053174834698438644, + "layer_12_v_norm": 0.6030669808387756, + "layer_12_cos_v_neg_g": 0.08747141808271408, + "block0_q_v_norm": 0.24343878030776978, + "block0_q_cos_v_neg_g": 0.08713854104280472, + "block0_k_v_norm": 0.24321800470352173, + "block0_k_cos_v_neg_g": 0.08231990784406662, + "block0_v_v_norm": 0.16167621314525604, + "block0_v_cos_v_neg_g": 0.03238992020487785, + "block0_o_v_norm": 0.2251719832420349, + "block0_o_cos_v_neg_g": 0.05741468444466591, + "block0_mlp_win_v_norm": 0.2742367386817932, + "block0_mlp_win_cos_v_neg_g": 0.049077484756708145, + "block0_mlp_wout_v_norm": 0.26761743426322937, + "block0_mlp_wout_cos_v_neg_g": 0.0688025951385498, + "block3_q_v_norm": 0.2121352255344391, + "block3_q_cos_v_neg_g": 0.039931803941726685, + "block3_k_v_norm": 0.20488668978214264, + "block3_k_cos_v_neg_g": 0.05098004639148712, + "block3_v_v_norm": 0.18136993050575256, + "block3_v_cos_v_neg_g": 0.03334851562976837, + "block3_o_v_norm": 0.23627832531929016, + "block3_o_cos_v_neg_g": 0.06702975183725357, + "block3_mlp_win_v_norm": 0.2652098536491394, + "block3_mlp_win_cos_v_neg_g": 0.03857718035578728, + "block3_mlp_wout_v_norm": 0.24178917706012726, + "block3_mlp_wout_cos_v_neg_g": 0.1032327339053154, + "block7_q_v_norm": 0.24165956676006317, + "block7_q_cos_v_neg_g": 0.04865705594420433, + "block7_k_v_norm": 0.24527819454669952, + "block7_k_cos_v_neg_g": 0.08877508342266083, + "block7_v_v_norm": 0.2237352728843689, + "block7_v_cos_v_neg_g": 0.03423053398728371, + "block7_o_v_norm": 0.24786721169948578, + "block7_o_cos_v_neg_g": 0.0772763341665268, + "block7_mlp_win_v_norm": 0.25647658109664917, + "block7_mlp_win_cos_v_neg_g": 0.05204690992832184, + "block7_mlp_wout_v_norm": 0.24783584475517273, + "block7_mlp_wout_cos_v_neg_g": 0.1320132613182068, + "block11_q_v_norm": 0.2490771859884262, + "block11_q_cos_v_neg_g": 0.09364290535449982, + "block11_k_v_norm": 0.2500998377799988, + "block11_k_cos_v_neg_g": 0.10784172266721725, + "block11_v_v_norm": 0.24618761241436005, + "block11_v_cos_v_neg_g": 0.06712909787893295, + "block11_o_v_norm": 0.24935778975486755, + "block11_o_cos_v_neg_g": 0.09947522729635239, + "block11_mlp_win_v_norm": 0.24450986087322235, + "block11_mlp_win_cos_v_neg_g": 0.11930802464485168, + "block11_mlp_wout_v_norm": 0.23743467032909393, + "block11_mlp_wout_cos_v_neg_g": 0.0978570356965065, + "embed_lm_head_sharpness": 0.0004441928176674992, + "layer_1_sharpness": 0.007910382933914661, + "layer_2_sharpness": 0.0039838035590946674, + "layer_3_sharpness": 0.0007804302149452269, + "layer_4_sharpness": 0.0007582837133668363, + "layer_5_sharpness": 0.0008371360599994659, + "layer_6_sharpness": 0.0012935854028910398, + "layer_7_sharpness": 0.0011715047294273973, + "layer_8_sharpness": 0.001123151509091258, + "layer_9_sharpness": 0.0007260891143232584, + "layer_10_sharpness": 0.0005291463457979262, + "layer_11_sharpness": 0.0005003975820727646, + "layer_12_sharpness": 0.00039319958887062967, + "block0_q_sharpness": 0.0011393281165510416, + "block0_k_sharpness": 0.0009782027918845415, + "block0_v_sharpness": 0.027505528181791306, + "block0_o_sharpness": 0.0011330351699143648, + "block0_mlp_win_sharpness": 0.0011052105110138655, + "block0_mlp_wout_sharpness": 0.0006761891418136656, + "block3_q_sharpness": 2.1225472664809786e-05, + "block3_k_sharpness": 0.0002605805639177561, + "block3_v_sharpness": 0.003901971271261573, + "block3_o_sharpness": 0.00022850409732200205, + "block3_mlp_win_sharpness": 5.058765600551851e-05, + "block3_mlp_wout_sharpness": 1.7714402929414064e-05, + "block7_q_sharpness": 8.50128781166859e-05, + "block7_k_sharpness": 0.0001544311089674011, + "block7_v_sharpness": 0.00290112872608006, + "block7_o_sharpness": 9.312737529398873e-05, + "block7_mlp_win_sharpness": 0.0002951921778731048, + "block7_mlp_wout_sharpness": 9.613290603738278e-05, + "block11_q_sharpness": 3.667629789561033e-05, + "block11_k_sharpness": 4.7565499698976055e-05, + "block11_v_sharpness": 0.00018135266145691276, + "block11_o_sharpness": 4.8838192014954984e-05, + "block11_mlp_win_sharpness": 0.0002827185671776533, + "block11_mlp_wout_sharpness": 0.00042184925405308604, + "sum_layer_numerators": 0.0063152952951306225, + "block_diag_sharpness": 0.0016173842171111538, + "cross_layer_sharpness": 0.005178334583165364 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3500.json new file mode 100644 index 0000000000000000000000000000000000000000..e14aa7bfadaac8d60cc13623f3f9571cb53cb350 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_3500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.38348650932312, + "total_l1_linf_norm": 20370.0546875, + "total_spectral_norm": 2.38348650932312, + "embed_lm_head_update_fnorm": 1.3366376161575317, + "embed_lm_head_max_l1_linf_norm": 0.36881324648857117, + "embed_lm_head_max_spectral_norm": 0.20832960307598114, + "layer_1_update_fnorm": 0.5815527439117432, + "layer_1_max_l1_linf_norm": 0.44337230920791626, + "layer_1_max_spectral_norm": 0.012051147408783436, + "layer_2_update_fnorm": 0.47426220774650574, + "layer_2_max_l1_linf_norm": 0.4266289174556732, + "layer_2_max_spectral_norm": 0.012053956277668476, + "layer_3_update_fnorm": 0.42735129594802856, + "layer_3_max_l1_linf_norm": 0.47820401191711426, + "layer_3_max_spectral_norm": 0.018544303253293037, + "layer_4_update_fnorm": 0.5533037185668945, + "layer_4_max_l1_linf_norm": 0.41100698709487915, + "layer_4_max_spectral_norm": 0.012043473310768604, + "layer_5_update_fnorm": 0.5832569599151611, + "layer_5_max_l1_linf_norm": 0.40954697132110596, + "layer_5_max_spectral_norm": 0.01205220352858305, + "layer_6_update_fnorm": 0.5940539836883545, + "layer_6_max_l1_linf_norm": 0.41526293754577637, + "layer_6_max_spectral_norm": 0.012046387419104576, + "layer_7_update_fnorm": 0.6002404093742371, + "layer_7_max_l1_linf_norm": 0.408903032541275, + "layer_7_max_spectral_norm": 0.012049097567796707, + "layer_8_update_fnorm": 0.5989207625389099, + "layer_8_max_l1_linf_norm": 0.410564661026001, + "layer_8_max_spectral_norm": 0.012050062417984009, + "layer_9_update_fnorm": 0.5996770262718201, + "layer_9_max_l1_linf_norm": 0.41191399097442627, + "layer_9_max_spectral_norm": 0.012065229006111622, + "layer_10_update_fnorm": 0.5969101786613464, + "layer_10_max_l1_linf_norm": 0.41780364513397217, + "layer_10_max_spectral_norm": 0.012042620219290257, + "layer_11_update_fnorm": 0.592867374420166, + "layer_11_max_l1_linf_norm": 0.40685296058654785, + "layer_11_max_spectral_norm": 0.012045769020915031, + "layer_12_update_fnorm": 0.6025076508522034, + "layer_12_max_l1_linf_norm": 0.39822277426719666, + "layer_12_max_spectral_norm": 0.012045355513691902, + "block0_q_update_fnorm": 0.2421584278345108, + "block0_q_max_l1_linf_norm": 0.21415036916732788, + "block0_q_max_spectral_norm": 0.01203913800418377, + "block0_k_update_fnorm": 0.2397364228963852, + "block0_k_max_l1_linf_norm": 0.21261996030807495, + "block0_k_max_spectral_norm": 0.012044189497828484, + "block0_v_update_fnorm": 0.1581640988588333, + "block0_v_max_l1_linf_norm": 0.19737014174461365, + "block0_v_max_spectral_norm": 0.012028547003865242, + "block0_o_update_fnorm": 0.22440099716186523, + "block0_o_max_l1_linf_norm": 0.19004565477371216, + "block0_o_max_spectral_norm": 0.012042407877743244, + "block0_mlp_win_update_fnorm": 0.27388593554496765, + "block0_mlp_win_max_l1_linf_norm": 0.170338436961174, + "block0_mlp_win_max_spectral_norm": 0.012047942727804184, + "block0_mlp_wout_update_fnorm": 0.2675846815109253, + "block0_mlp_wout_max_l1_linf_norm": 0.44337230920791626, + "block0_mlp_wout_max_spectral_norm": 0.012051147408783436, + "block3_q_update_fnorm": 0.21197821199893951, + "block3_q_max_l1_linf_norm": 0.2075735479593277, + "block3_q_max_spectral_norm": 0.012036745436489582, + "block3_k_update_fnorm": 0.20981208980083466, + "block3_k_max_l1_linf_norm": 0.21166469156742096, + "block3_k_max_spectral_norm": 0.012039421126246452, + "block3_v_update_fnorm": 0.18054623901844025, + "block3_v_max_l1_linf_norm": 0.19835643470287323, + "block3_v_max_spectral_norm": 0.012032624334096909, + "block3_o_update_fnorm": 0.23521244525909424, + "block3_o_max_l1_linf_norm": 0.19994421303272247, + "block3_o_max_spectral_norm": 0.012043057940900326, + "block3_mlp_win_update_fnorm": 0.2653413414955139, + "block3_mlp_win_max_l1_linf_norm": 0.19473204016685486, + "block3_mlp_win_max_spectral_norm": 0.012043473310768604, + "block3_mlp_wout_update_fnorm": 0.24222736060619354, + "block3_mlp_wout_max_l1_linf_norm": 0.40056663751602173, + "block3_mlp_wout_max_spectral_norm": 0.011388465762138367, + "block7_q_update_fnorm": 0.24186919629573822, + "block7_q_max_l1_linf_norm": 0.20778337121009827, + "block7_q_max_spectral_norm": 0.012043307535350323, + "block7_k_update_fnorm": 0.24590525031089783, + "block7_k_max_l1_linf_norm": 0.20905755460262299, + "block7_k_max_spectral_norm": 0.012038919143378735, + "block7_v_update_fnorm": 0.2226163148880005, + "block7_v_max_l1_linf_norm": 0.2088407278060913, + "block7_v_max_spectral_norm": 0.012037334032356739, + "block7_o_update_fnorm": 0.24792549014091492, + "block7_o_max_l1_linf_norm": 0.20828726887702942, + "block7_o_max_spectral_norm": 0.012042469345033169, + "block7_mlp_win_update_fnorm": 0.2594951093196869, + "block7_mlp_win_max_l1_linf_norm": 0.15038391947746277, + "block7_mlp_win_max_spectral_norm": 0.012050062417984009, + "block7_mlp_wout_update_fnorm": 0.24748998880386353, + "block7_mlp_wout_max_l1_linf_norm": 0.410564661026001, + "block7_mlp_wout_max_spectral_norm": 0.011374883353710175, + "block11_q_update_fnorm": 0.2492193728685379, + "block11_q_max_l1_linf_norm": 0.2085644006729126, + "block11_q_max_spectral_norm": 0.012045355513691902, + "block11_k_update_fnorm": 0.24997466802597046, + "block11_k_max_l1_linf_norm": 0.2140941023826599, + "block11_k_max_spectral_norm": 0.01203987654298544, + "block11_v_update_fnorm": 0.24619415402412415, + "block11_v_max_l1_linf_norm": 0.2072274386882782, + "block11_v_max_spectral_norm": 0.012042302638292313, + "block11_o_update_fnorm": 0.2489873766899109, + "block11_o_max_l1_linf_norm": 0.20769715309143066, + "block11_o_max_spectral_norm": 0.012039667926728725, + "block11_mlp_win_update_fnorm": 0.24341830611228943, + "block11_mlp_win_max_l1_linf_norm": 0.15446880459785461, + "block11_mlp_win_max_spectral_norm": 0.011370728723704815, + "block11_mlp_wout_update_fnorm": 0.23749393224716187, + "block11_mlp_wout_max_l1_linf_norm": 0.3947869539260864, + "block11_mlp_wout_max_spectral_norm": 0.011372731998562813, + "total_sharpness": 0.004263618029654026, + "block_total_sharpness": 0.0053953672759234905, + "v_norm_block": 1.9734249114990234, + "v_T_H_v_block": 0.021011751145124435, + "v_norm": 2.38348650932312, + "ip_v_neg_g_hvp": 0.04638944938778877, + "cos_v_neg_g_hvp": 0.031088965013623238, + "g_hvp_norm": 0.6260373592376709, + "ip_v_neg_g_t": 0.0469932034611702, + "cos_v_neg_g_t": 0.03682146966457367, + "g_t_norm": 0.5354528427124023, + "g_norm": 0.6260373592376709, + "hv_norm": 0.6365428566932678, + "cos_v_hv": 0.015964794903993607, + "hg_norm": 36.718013763427734, + "cos_g_hg": 0.6846843957901001, + "v_parallel_norm": 0.0062025487422943115, + "v_perp_norm": 2.3834784030914307, + "embed_lm_head_v_norm": 1.3366376161575317, + "embed_lm_head_cos_v_neg_g": 0.057957664132118225, + "layer_1_v_norm": 0.5815527439117432, + "layer_1_cos_v_neg_g": 0.01806035451591015, + "layer_2_v_norm": 0.47426220774650574, + "layer_2_cos_v_neg_g": 0.022832728922367096, + "layer_3_v_norm": 0.4273512661457062, + "layer_3_cos_v_neg_g": 0.027698921039700508, + "layer_4_v_norm": 0.5533037185668945, + "layer_4_cos_v_neg_g": 0.02845132164657116, + "layer_5_v_norm": 0.5832569599151611, + "layer_5_cos_v_neg_g": 0.03494557738304138, + "layer_6_v_norm": 0.5940539240837097, + "layer_6_cos_v_neg_g": 0.03435157611966133, + "layer_7_v_norm": 0.6002404093742371, + "layer_7_cos_v_neg_g": 0.034820593893527985, + "layer_8_v_norm": 0.5989207625389099, + "layer_8_cos_v_neg_g": 0.033152420073747635, + "layer_9_v_norm": 0.5996770262718201, + "layer_9_cos_v_neg_g": 0.03527972102165222, + "layer_10_v_norm": 0.5969101786613464, + "layer_10_cos_v_neg_g": 0.036049433052539825, + "layer_11_v_norm": 0.592867374420166, + "layer_11_cos_v_neg_g": 0.0464472733438015, + "layer_12_v_norm": 0.6025076508522034, + "layer_12_cos_v_neg_g": 0.07736718654632568, + "block0_q_v_norm": 0.2421584278345108, + "block0_q_cos_v_neg_g": 0.05794782564043999, + "block0_k_v_norm": 0.2397364228963852, + "block0_k_cos_v_neg_g": 0.05136994644999504, + "block0_v_v_norm": 0.1581640988588333, + "block0_v_cos_v_neg_g": 0.025074012577533722, + "block0_o_v_norm": 0.22440099716186523, + "block0_o_cos_v_neg_g": 0.04481194540858269, + "block0_mlp_win_v_norm": 0.27388593554496765, + "block0_mlp_win_cos_v_neg_g": 0.03595529869198799, + "block0_mlp_wout_v_norm": 0.2675846815109253, + "block0_mlp_wout_cos_v_neg_g": 0.053788844496011734, + "block3_q_v_norm": 0.21197821199893951, + "block3_q_cos_v_neg_g": 0.030830156058073044, + "block3_k_v_norm": 0.20981208980083466, + "block3_k_cos_v_neg_g": 0.04024949669837952, + "block3_v_v_norm": 0.18054623901844025, + "block3_v_cos_v_neg_g": 0.0292111374437809, + "block3_o_v_norm": 0.23521244525909424, + "block3_o_cos_v_neg_g": 0.0605219341814518, + "block3_mlp_win_v_norm": 0.2653413414955139, + "block3_mlp_win_cos_v_neg_g": 0.035010527819395065, + "block3_mlp_wout_v_norm": 0.24222736060619354, + "block3_mlp_wout_cos_v_neg_g": 0.09943725913763046, + "block7_q_v_norm": 0.24186919629573822, + "block7_q_cos_v_neg_g": 0.04029237851500511, + "block7_k_v_norm": 0.24590525031089783, + "block7_k_cos_v_neg_g": 0.0838426724076271, + "block7_v_v_norm": 0.2226163148880005, + "block7_v_cos_v_neg_g": 0.03183748945593834, + "block7_o_v_norm": 0.24792549014091492, + "block7_o_cos_v_neg_g": 0.07150421291589737, + "block7_mlp_win_v_norm": 0.2594951093196869, + "block7_mlp_win_cos_v_neg_g": 0.042981650680303574, + "block7_mlp_wout_v_norm": 0.24748998880386353, + "block7_mlp_wout_cos_v_neg_g": 0.12030927836894989, + "block11_q_v_norm": 0.2492193728685379, + "block11_q_cos_v_neg_g": 0.08289026468992233, + "block11_k_v_norm": 0.24997466802597046, + "block11_k_cos_v_neg_g": 0.09923820197582245, + "block11_v_v_norm": 0.24619415402412415, + "block11_v_cos_v_neg_g": 0.060328856110572815, + "block11_o_v_norm": 0.2489873766899109, + "block11_o_cos_v_neg_g": 0.0946478545665741, + "block11_mlp_win_v_norm": 0.24341830611228943, + "block11_mlp_win_cos_v_neg_g": 0.10869298875331879, + "block11_mlp_wout_v_norm": 0.23749393224716187, + "block11_mlp_wout_cos_v_neg_g": 0.08343447744846344, + "embed_lm_head_sharpness": 0.0004045467358082533, + "layer_1_sharpness": 0.004910479299724102, + "layer_2_sharpness": 0.0011644955957308412, + "layer_3_sharpness": 0.0016830768436193466, + "layer_4_sharpness": 0.00108385703060776, + "layer_5_sharpness": 0.0010667132446542382, + "layer_6_sharpness": 0.001140158623456955, + "layer_7_sharpness": 0.0010716085089370608, + "layer_8_sharpness": 0.0009451339137740433, + "layer_9_sharpness": 0.0005877059884369373, + "layer_10_sharpness": 0.00041091235470958054, + "layer_11_sharpness": 0.00047573953634127975, + "layer_12_sharpness": 0.0003517979057505727, + "block0_q_sharpness": 0.0007198535022325814, + "block0_k_sharpness": 0.00017956376541405916, + "block0_v_sharpness": 0.028469359502196312, + "block0_o_sharpness": 0.0008611289085820317, + "block0_mlp_win_sharpness": 0.0005932835047133267, + "block0_mlp_wout_sharpness": 0.0006422341684810817, + "block3_q_sharpness": 6.885911716381088e-05, + "block3_k_sharpness": 0.0005136229447089136, + "block3_v_sharpness": 0.003927451558411121, + "block3_o_sharpness": 0.000194330103113316, + "block3_mlp_win_sharpness": 0.00011605672625591978, + "block3_mlp_wout_sharpness": 5.3539664804702625e-05, + "block7_q_sharpness": 7.516812183894217e-05, + "block7_k_sharpness": 9.331662295153365e-05, + "block7_v_sharpness": 0.0026552428025752306, + "block7_o_sharpness": 8.168076601577923e-05, + "block7_mlp_win_sharpness": 0.0002734169829636812, + "block7_mlp_wout_sharpness": 7.830052345525473e-05, + "block11_q_sharpness": 3.262359678046778e-05, + "block11_k_sharpness": 4.22593584517017e-05, + "block11_v_sharpness": 0.0001872266730060801, + "block11_o_sharpness": 4.967179847881198e-05, + "block11_mlp_win_sharpness": 0.00024584069615229964, + "block11_mlp_wout_sharpness": 0.0003729259187821299, + "sum_layer_numerators": 0.0047049029429429914, + "block_diag_sharpness": 0.0012081182820477666, + "cross_layer_sharpness": 0.004187248993875724 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4000.json new file mode 100644 index 0000000000000000000000000000000000000000..10bd1ea1b2df49ee0f087228f8e2aba4196a18b2 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3615896701812744, + "total_l1_linf_norm": 20129.52734375, + "total_spectral_norm": 2.3615896701812744, + "embed_lm_head_update_fnorm": 1.3342876434326172, + "embed_lm_head_max_l1_linf_norm": 0.38766053318977356, + "embed_lm_head_max_spectral_norm": 0.19925250113010406, + "layer_1_update_fnorm": 0.561899721622467, + "layer_1_max_l1_linf_norm": 0.4287472367286682, + "layer_1_max_spectral_norm": 0.012043561786413193, + "layer_2_update_fnorm": 0.42343008518218994, + "layer_2_max_l1_linf_norm": 0.4142772853374481, + "layer_2_max_spectral_norm": 0.013707282952964306, + "layer_3_update_fnorm": 0.41179168224334717, + "layer_3_max_l1_linf_norm": 0.6085856556892395, + "layer_3_max_spectral_norm": 0.022604841738939285, + "layer_4_update_fnorm": 0.5456051230430603, + "layer_4_max_l1_linf_norm": 0.39931434392929077, + "layer_4_max_spectral_norm": 0.012042442336678505, + "layer_5_update_fnorm": 0.5818808674812317, + "layer_5_max_l1_linf_norm": 0.4119689166545868, + "layer_5_max_spectral_norm": 0.012048504315316677, + "layer_6_update_fnorm": 0.5947957038879395, + "layer_6_max_l1_linf_norm": 0.41192883253097534, + "layer_6_max_spectral_norm": 0.012055478058755398, + "layer_7_update_fnorm": 0.6007802486419678, + "layer_7_max_l1_linf_norm": 0.4103353023529053, + "layer_7_max_spectral_norm": 0.012047654017806053, + "layer_8_update_fnorm": 0.5998895168304443, + "layer_8_max_l1_linf_norm": 0.410264790058136, + "layer_8_max_spectral_norm": 0.012051617726683617, + "layer_9_update_fnorm": 0.5986542701721191, + "layer_9_max_l1_linf_norm": 0.41118186712265015, + "layer_9_max_spectral_norm": 0.012069519609212875, + "layer_10_update_fnorm": 0.5958581566810608, + "layer_10_max_l1_linf_norm": 0.4118281602859497, + "layer_10_max_spectral_norm": 0.012052849866449833, + "layer_11_update_fnorm": 0.5890074372291565, + "layer_11_max_l1_linf_norm": 0.4078473746776581, + "layer_11_max_spectral_norm": 0.012043838389217854, + "layer_12_update_fnorm": 0.6008039116859436, + "layer_12_max_l1_linf_norm": 0.3978114724159241, + "layer_12_max_spectral_norm": 0.012043575756251812, + "block0_q_update_fnorm": 0.23555059731006622, + "block0_q_max_l1_linf_norm": 0.21570464968681335, + "block0_q_max_spectral_norm": 0.012037951499223709, + "block0_k_update_fnorm": 0.22671282291412354, + "block0_k_max_l1_linf_norm": 0.21612504124641418, + "block0_k_max_spectral_norm": 0.012039895169436932, + "block0_v_update_fnorm": 0.14994826912879944, + "block0_v_max_l1_linf_norm": 0.1897919774055481, + "block0_v_max_spectral_norm": 0.012029225006699562, + "block0_o_update_fnorm": 0.22099845111370087, + "block0_o_max_l1_linf_norm": 0.18892502784729004, + "block0_o_max_spectral_norm": 0.012038322165608406, + "block0_mlp_win_update_fnorm": 0.2616789937019348, + "block0_mlp_win_max_l1_linf_norm": 0.18076659739017487, + "block0_mlp_win_max_spectral_norm": 0.012043561786413193, + "block0_mlp_wout_update_fnorm": 0.26247626543045044, + "block0_mlp_wout_max_l1_linf_norm": 0.4287472367286682, + "block0_mlp_wout_max_spectral_norm": 0.012038092128932476, + "block3_q_update_fnorm": 0.21106117963790894, + "block3_q_max_l1_linf_norm": 0.20762592554092407, + "block3_q_max_spectral_norm": 0.012037629261612892, + "block3_k_update_fnorm": 0.2058643102645874, + "block3_k_max_l1_linf_norm": 0.21015089750289917, + "block3_k_max_spectral_norm": 0.012042442336678505, + "block3_v_update_fnorm": 0.17663761973381042, + "block3_v_max_l1_linf_norm": 0.20034007728099823, + "block3_v_max_spectral_norm": 0.01203277800232172, + "block3_o_update_fnorm": 0.2253529578447342, + "block3_o_max_l1_linf_norm": 0.18959514796733856, + "block3_o_max_spectral_norm": 0.0120420902967453, + "block3_mlp_win_update_fnorm": 0.2647847533226013, + "block3_mlp_win_max_l1_linf_norm": 0.1812875121831894, + "block3_mlp_win_max_spectral_norm": 0.012036988511681557, + "block3_mlp_wout_update_fnorm": 0.2416372150182724, + "block3_mlp_wout_max_l1_linf_norm": 0.39931434392929077, + "block3_mlp_wout_max_spectral_norm": 0.011396597139537334, + "block7_q_update_fnorm": 0.24184338748455048, + "block7_q_max_l1_linf_norm": 0.20751526951789856, + "block7_q_max_spectral_norm": 0.012043693102896214, + "block7_k_update_fnorm": 0.24636037647724152, + "block7_k_max_l1_linf_norm": 0.20911741256713867, + "block7_k_max_spectral_norm": 0.012040369212627411, + "block7_v_update_fnorm": 0.2190949022769928, + "block7_v_max_l1_linf_norm": 0.20878851413726807, + "block7_v_max_spectral_norm": 0.01203927118331194, + "block7_o_update_fnorm": 0.24811968207359314, + "block7_o_max_l1_linf_norm": 0.20756646990776062, + "block7_o_max_spectral_norm": 0.012050065211951733, + "block7_mlp_win_update_fnorm": 0.264273077249527, + "block7_mlp_win_max_l1_linf_norm": 0.1497868448495865, + "block7_mlp_win_max_spectral_norm": 0.012051617726683617, + "block7_mlp_wout_update_fnorm": 0.2473108023405075, + "block7_mlp_wout_max_l1_linf_norm": 0.410264790058136, + "block7_mlp_wout_max_spectral_norm": 0.011372790671885014, + "block11_q_update_fnorm": 0.24851806461811066, + "block11_q_max_l1_linf_norm": 0.21608896553516388, + "block11_q_max_spectral_norm": 0.012043575756251812, + "block11_k_update_fnorm": 0.2500695288181305, + "block11_k_max_l1_linf_norm": 0.21188391745090485, + "block11_k_max_spectral_norm": 0.012042727321386337, + "block11_v_update_fnorm": 0.2456158846616745, + "block11_v_max_l1_linf_norm": 0.20592591166496277, + "block11_v_max_spectral_norm": 0.01204003393650055, + "block11_o_update_fnorm": 0.24890395998954773, + "block11_o_max_l1_linf_norm": 0.20841458439826965, + "block11_o_max_spectral_norm": 0.012040027417242527, + "block11_mlp_win_update_fnorm": 0.2413860261440277, + "block11_mlp_win_max_l1_linf_norm": 0.1587790697813034, + "block11_mlp_win_max_spectral_norm": 0.011393584311008453, + "block11_mlp_wout_update_fnorm": 0.2365652471780777, + "block11_mlp_wout_max_l1_linf_norm": 0.39243292808532715, + "block11_mlp_wout_max_spectral_norm": 0.011360283941030502, + "total_sharpness": 0.0075767552480101585, + "block_total_sharpness": 0.009841827675700188, + "v_norm_block": 1.9485331773757935, + "v_T_H_v_block": 0.03736726939678192, + "v_norm": 2.3615896701812744, + "ip_v_neg_g_hvp": 0.055013060569763184, + "cos_v_neg_g_hvp": 0.03134523704648018, + "g_hvp_norm": 0.743172824382782, + "ip_v_neg_g_t": 0.059118688106536865, + "cos_v_neg_g_t": 0.03564701974391937, + "g_t_norm": 0.7022587060928345, + "g_norm": 0.743172824382782, + "hv_norm": 1.0601587295532227, + "cos_v_hv": 0.016877837479114532, + "hg_norm": 46.43511962890625, + "cos_g_hg": 0.6300380825996399, + "v_parallel_norm": 0.005458443891257048, + "v_perp_norm": 2.3615834712982178, + "embed_lm_head_v_norm": 1.3342876434326172, + "embed_lm_head_cos_v_neg_g": 0.05223532021045685, + "layer_1_v_norm": 0.561899721622467, + "layer_1_cos_v_neg_g": 0.026977363973855972, + "layer_2_v_norm": 0.42343008518218994, + "layer_2_cos_v_neg_g": 0.040757227689027786, + "layer_3_v_norm": 0.41179168224334717, + "layer_3_cos_v_neg_g": 0.03520609810948372, + "layer_4_v_norm": 0.5456051230430603, + "layer_4_cos_v_neg_g": 0.031342655420303345, + "layer_5_v_norm": 0.5818808674812317, + "layer_5_cos_v_neg_g": 0.03390539437532425, + "layer_6_v_norm": 0.5947957038879395, + "layer_6_cos_v_neg_g": 0.03192875534296036, + "layer_7_v_norm": 0.6007802486419678, + "layer_7_cos_v_neg_g": 0.032610341906547546, + "layer_8_v_norm": 0.5998895168304443, + "layer_8_cos_v_neg_g": 0.030596312135457993, + "layer_9_v_norm": 0.5986542701721191, + "layer_9_cos_v_neg_g": 0.03446497395634651, + "layer_10_v_norm": 0.5958581566810608, + "layer_10_cos_v_neg_g": 0.035893164575099945, + "layer_11_v_norm": 0.5890074372291565, + "layer_11_cos_v_neg_g": 0.04685327038168907, + "layer_12_v_norm": 0.6008039116859436, + "layer_12_cos_v_neg_g": 0.0822114422917366, + "block0_q_v_norm": 0.23555059731006622, + "block0_q_cos_v_neg_g": 0.047430943697690964, + "block0_k_v_norm": 0.22671282291412354, + "block0_k_cos_v_neg_g": 0.055562298744916916, + "block0_v_v_norm": 0.14994826912879944, + "block0_v_cos_v_neg_g": 0.04018935561180115, + "block0_o_v_norm": 0.22099845111370087, + "block0_o_cos_v_neg_g": 0.04885172098875046, + "block0_mlp_win_v_norm": 0.2616789937019348, + "block0_mlp_win_cos_v_neg_g": 0.0388990081846714, + "block0_mlp_wout_v_norm": 0.26247626543045044, + "block0_mlp_wout_cos_v_neg_g": 0.06603975594043732, + "block3_q_v_norm": 0.21106117963790894, + "block3_q_cos_v_neg_g": 0.03348059207201004, + "block3_k_v_norm": 0.2058643102645874, + "block3_k_cos_v_neg_g": 0.05892235040664673, + "block3_v_v_norm": 0.17663761973381042, + "block3_v_cos_v_neg_g": 0.02959386818110943, + "block3_o_v_norm": 0.2253529578447342, + "block3_o_cos_v_neg_g": 0.05008585751056671, + "block3_mlp_win_v_norm": 0.2647847533226013, + "block3_mlp_win_cos_v_neg_g": 0.03534282371401787, + "block3_mlp_wout_v_norm": 0.2416372150182724, + "block3_mlp_wout_cos_v_neg_g": 0.10061834752559662, + "block7_q_v_norm": 0.24184338748455048, + "block7_q_cos_v_neg_g": 0.03843365237116814, + "block7_k_v_norm": 0.24636037647724152, + "block7_k_cos_v_neg_g": 0.08138544857501984, + "block7_v_v_norm": 0.2190949022769928, + "block7_v_cos_v_neg_g": 0.028957534581422806, + "block7_o_v_norm": 0.24811968207359314, + "block7_o_cos_v_neg_g": 0.07104579359292984, + "block7_mlp_win_v_norm": 0.264273077249527, + "block7_mlp_win_cos_v_neg_g": 0.04230165481567383, + "block7_mlp_wout_v_norm": 0.2473108023405075, + "block7_mlp_wout_cos_v_neg_g": 0.1262776404619217, + "block11_q_v_norm": 0.24851806461811066, + "block11_q_cos_v_neg_g": 0.08463089913129807, + "block11_k_v_norm": 0.2500695288181305, + "block11_k_cos_v_neg_g": 0.10382548719644547, + "block11_v_v_norm": 0.2456158846616745, + "block11_v_cos_v_neg_g": 0.056215155869722366, + "block11_o_v_norm": 0.24890395998954773, + "block11_o_cos_v_neg_g": 0.09511522203683853, + "block11_mlp_win_v_norm": 0.2413860261440277, + "block11_mlp_win_cos_v_neg_g": 0.11220505833625793, + "block11_mlp_wout_v_norm": 0.2365652471780777, + "block11_mlp_wout_cos_v_neg_g": 0.0982353463768959, + "embed_lm_head_sharpness": 0.0004508662677835673, + "layer_1_sharpness": 0.01608578860759735, + "layer_2_sharpness": 0.011589433997869492, + "layer_3_sharpness": 0.005597792100161314, + "layer_4_sharpness": 0.0017625471809878945, + "layer_5_sharpness": 0.0013749422505497932, + "layer_6_sharpness": 0.0012968836817890406, + "layer_7_sharpness": 0.001248171553015709, + "layer_8_sharpness": 0.0011334980372339487, + "layer_9_sharpness": 0.0007234130171127617, + "layer_10_sharpness": 0.00046427416964434087, + "layer_11_sharpness": 0.0005150767392478883, + "layer_12_sharpness": 0.0005390112637542188, + "block0_q_sharpness": 0.0010214740177616477, + "block0_k_sharpness": 0.0007678108522668481, + "block0_v_sharpness": 0.06022034212946892, + "block0_o_sharpness": 0.0014226889470592141, + "block0_mlp_win_sharpness": 0.0027010224293917418, + "block0_mlp_wout_sharpness": 0.003550498280674219, + "block3_q_sharpness": 7.497908518416807e-05, + "block3_k_sharpness": 0.001031458843499422, + "block3_v_sharpness": 0.005465722642838955, + "block3_o_sharpness": 0.0006292971083894372, + "block3_mlp_win_sharpness": 0.00018656968313734978, + "block3_mlp_wout_sharpness": 6.473005487350747e-05, + "block7_q_sharpness": 7.043268851703033e-05, + "block7_k_sharpness": 9.883145685307682e-05, + "block7_v_sharpness": 0.0032847279217094183, + "block7_o_sharpness": 7.934765017125756e-05, + "block7_mlp_win_sharpness": 0.0003283577680122107, + "block7_mlp_wout_sharpness": 8.270956459455192e-05, + "block11_q_sharpness": 3.9500027924077585e-05, + "block11_k_sharpness": 4.187354352325201e-05, + "block11_v_sharpness": 0.00019773720123339444, + "block11_o_sharpness": 4.429176260600798e-05, + "block11_mlp_win_sharpness": 0.0002696271112654358, + "block11_mlp_wout_sharpness": 0.000932772527448833, + "sum_layer_numerators": 0.011210736721768, + "block_diag_sharpness": 0.0029526946951820435, + "cross_layer_sharpness": 0.006889132980518144 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4500.json new file mode 100644 index 0000000000000000000000000000000000000000..0e591a53bdaee929a3007ef8c8d34b6b1cf37091 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_4500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3464081287384033, + "total_l1_linf_norm": 19987.150390625, + "total_spectral_norm": 2.3464083671569824, + "embed_lm_head_update_fnorm": 1.3280924558639526, + "embed_lm_head_max_l1_linf_norm": 0.3499900698661804, + "embed_lm_head_max_spectral_norm": 0.19497336447238922, + "layer_1_update_fnorm": 0.5628696084022522, + "layer_1_max_l1_linf_norm": 0.432954341173172, + "layer_1_max_spectral_norm": 0.01204422116279602, + "layer_2_update_fnorm": 0.3958136737346649, + "layer_2_max_l1_linf_norm": 0.4232464134693146, + "layer_2_max_spectral_norm": 0.0161042008548975, + "layer_3_update_fnorm": 0.41676846146583557, + "layer_3_max_l1_linf_norm": 0.5089229941368103, + "layer_3_max_spectral_norm": 0.019534073770046234, + "layer_4_update_fnorm": 0.533392071723938, + "layer_4_max_l1_linf_norm": 0.3964277505874634, + "layer_4_max_spectral_norm": 0.012038502842187881, + "layer_5_update_fnorm": 0.5800192356109619, + "layer_5_max_l1_linf_norm": 0.4070039987564087, + "layer_5_max_spectral_norm": 0.012046131305396557, + "layer_6_update_fnorm": 0.5919808745384216, + "layer_6_max_l1_linf_norm": 0.41070252656936646, + "layer_6_max_spectral_norm": 0.012046688236296177, + "layer_7_update_fnorm": 0.5996241569519043, + "layer_7_max_l1_linf_norm": 0.40622228384017944, + "layer_7_max_spectral_norm": 0.012052644975483418, + "layer_8_update_fnorm": 0.5975279808044434, + "layer_8_max_l1_linf_norm": 0.40642741322517395, + "layer_8_max_spectral_norm": 0.012056705541908741, + "layer_9_update_fnorm": 0.595504641532898, + "layer_9_max_l1_linf_norm": 0.41027846932411194, + "layer_9_max_spectral_norm": 0.012050929479300976, + "layer_10_update_fnorm": 0.5931458473205566, + "layer_10_max_l1_linf_norm": 0.41147083044052124, + "layer_10_max_spectral_norm": 0.01206780131906271, + "layer_11_update_fnorm": 0.5829335451126099, + "layer_11_max_l1_linf_norm": 0.4062895178794861, + "layer_11_max_spectral_norm": 0.01204250194132328, + "layer_12_update_fnorm": 0.6003309488296509, + "layer_12_max_l1_linf_norm": 0.39947080612182617, + "layer_12_max_spectral_norm": 0.012045873329043388, + "block0_q_update_fnorm": 0.2424274981021881, + "block0_q_max_l1_linf_norm": 0.20922061800956726, + "block0_q_max_spectral_norm": 0.01204422116279602, + "block0_k_update_fnorm": 0.23433832824230194, + "block0_k_max_l1_linf_norm": 0.2083539068698883, + "block0_k_max_spectral_norm": 0.012040598317980766, + "block0_v_update_fnorm": 0.13655254244804382, + "block0_v_max_l1_linf_norm": 0.17438818514347076, + "block0_v_max_spectral_norm": 0.012023485265672207, + "block0_o_update_fnorm": 0.216231569647789, + "block0_o_max_l1_linf_norm": 0.18508300185203552, + "block0_o_max_spectral_norm": 0.012037529610097408, + "block0_mlp_win_update_fnorm": 0.2618808448314667, + "block0_mlp_win_max_l1_linf_norm": 0.17814965546131134, + "block0_mlp_win_max_spectral_norm": 0.012039842084050179, + "block0_mlp_wout_update_fnorm": 0.26267552375793457, + "block0_mlp_wout_max_l1_linf_norm": 0.432954341173172, + "block0_mlp_wout_max_spectral_norm": 0.012040739879012108, + "block3_q_update_fnorm": 0.19285768270492554, + "block3_q_max_l1_linf_norm": 0.21170273423194885, + "block3_q_max_spectral_norm": 0.012035476043820381, + "block3_k_update_fnorm": 0.20002387464046478, + "block3_k_max_l1_linf_norm": 0.21083390712738037, + "block3_k_max_spectral_norm": 0.012038502842187881, + "block3_v_update_fnorm": 0.1736154854297638, + "block3_v_max_l1_linf_norm": 0.1982400268316269, + "block3_v_max_spectral_norm": 0.012032056227326393, + "block3_o_update_fnorm": 0.22224494814872742, + "block3_o_max_l1_linf_norm": 0.18672393262386322, + "block3_o_max_spectral_norm": 0.01203822996467352, + "block3_mlp_win_update_fnorm": 0.2639123499393463, + "block3_mlp_win_max_l1_linf_norm": 0.18338045477867126, + "block3_mlp_win_max_spectral_norm": 0.012036302126944065, + "block3_mlp_wout_update_fnorm": 0.2405318170785904, + "block3_mlp_wout_max_l1_linf_norm": 0.3964277505874634, + "block3_mlp_wout_max_spectral_norm": 0.011400358751416206, + "block7_q_update_fnorm": 0.24144800007343292, + "block7_q_max_l1_linf_norm": 0.20764297246932983, + "block7_q_max_spectral_norm": 0.01204273197799921, + "block7_k_update_fnorm": 0.24621914327144623, + "block7_k_max_l1_linf_norm": 0.2091144323348999, + "block7_k_max_spectral_norm": 0.012036974541842937, + "block7_v_update_fnorm": 0.20237396657466888, + "block7_v_max_l1_linf_norm": 0.21573889255523682, + "block7_v_max_spectral_norm": 0.012034873478114605, + "block7_o_update_fnorm": 0.2475912868976593, + "block7_o_max_l1_linf_norm": 0.20679698884487152, + "block7_o_max_spectral_norm": 0.012044300325214863, + "block7_mlp_win_update_fnorm": 0.27432140707969666, + "block7_mlp_win_max_l1_linf_norm": 0.15357474982738495, + "block7_mlp_win_max_spectral_norm": 0.012056705541908741, + "block7_mlp_wout_update_fnorm": 0.2458934783935547, + "block7_mlp_wout_max_l1_linf_norm": 0.40642741322517395, + "block7_mlp_wout_max_spectral_norm": 0.011362355202436447, + "block11_q_update_fnorm": 0.2485351413488388, + "block11_q_max_l1_linf_norm": 0.2095993459224701, + "block11_q_max_spectral_norm": 0.012045553885400295, + "block11_k_update_fnorm": 0.25049832463264465, + "block11_k_max_l1_linf_norm": 0.21292871236801147, + "block11_k_max_spectral_norm": 0.012042723596096039, + "block11_v_update_fnorm": 0.24558494985103607, + "block11_v_max_l1_linf_norm": 0.20685358345508575, + "block11_v_max_spectral_norm": 0.012045873329043388, + "block11_o_update_fnorm": 0.2488219141960144, + "block11_o_max_l1_linf_norm": 0.21031580865383148, + "block11_o_max_spectral_norm": 0.012039962224662304, + "block11_mlp_win_update_fnorm": 0.23961137235164642, + "block11_mlp_win_max_l1_linf_norm": 0.16119137406349182, + "block11_mlp_win_max_spectral_norm": 0.011397995054721832, + "block11_mlp_wout_update_fnorm": 0.2368171513080597, + "block11_mlp_wout_max_l1_linf_norm": 0.39947080612182617, + "block11_mlp_wout_max_spectral_norm": 0.011364550329744816, + "total_sharpness": 0.00868809875100851, + "block_total_sharpness": 0.011419418267905712, + "v_norm_block": 1.9343739748001099, + "v_T_H_v_block": 0.0427292063832283, + "v_norm": 2.3464081287384033, + "ip_v_neg_g_hvp": 0.05345536395907402, + "cos_v_neg_g_hvp": 0.026824120432138443, + "g_hvp_norm": 0.8493021726608276, + "ip_v_neg_g_t": 0.056889310479164124, + "cos_v_neg_g_t": 0.030902322381734848, + "g_t_norm": 0.7845777869224548, + "g_norm": 0.8493021726608276, + "hv_norm": 1.5115023851394653, + "cos_v_hv": 0.013487127609550953, + "hg_norm": 88.90340423583984, + "cos_g_hg": 0.6744021773338318, + "v_parallel_norm": 0.0062981355004012585, + "v_perp_norm": 2.3463997840881348, + "embed_lm_head_v_norm": 1.3280924558639526, + "embed_lm_head_cos_v_neg_g": 0.03009454533457756, + "layer_1_v_norm": 0.5628696084022522, + "layer_1_cos_v_neg_g": 0.020732825621962547, + "layer_2_v_norm": 0.3958136737346649, + "layer_2_cos_v_neg_g": 0.033708736300468445, + "layer_3_v_norm": 0.4167684316635132, + "layer_3_cos_v_neg_g": 0.031503260135650635, + "layer_4_v_norm": 0.533392071723938, + "layer_4_cos_v_neg_g": 0.026538610458374023, + "layer_5_v_norm": 0.5800192356109619, + "layer_5_cos_v_neg_g": 0.03152463957667351, + "layer_6_v_norm": 0.5919808745384216, + "layer_6_cos_v_neg_g": 0.03248896822333336, + "layer_7_v_norm": 0.5996241569519043, + "layer_7_cos_v_neg_g": 0.03277922049164772, + "layer_8_v_norm": 0.5975279808044434, + "layer_8_cos_v_neg_g": 0.031009666621685028, + "layer_9_v_norm": 0.595504641532898, + "layer_9_cos_v_neg_g": 0.03203386440873146, + "layer_10_v_norm": 0.5931458473205566, + "layer_10_cos_v_neg_g": 0.032129064202308655, + "layer_11_v_norm": 0.5829335451126099, + "layer_11_cos_v_neg_g": 0.04105440899729729, + "layer_12_v_norm": 0.6003309488296509, + "layer_12_cos_v_neg_g": 0.06674228608608246, + "block0_q_v_norm": 0.2424274981021881, + "block0_q_cos_v_neg_g": 0.03566249459981918, + "block0_k_v_norm": 0.23433832824230194, + "block0_k_cos_v_neg_g": 0.03407657518982887, + "block0_v_v_norm": 0.13655254244804382, + "block0_v_cos_v_neg_g": 0.039701130241155624, + "block0_o_v_norm": 0.216231569647789, + "block0_o_cos_v_neg_g": 0.04820888489484787, + "block0_mlp_win_v_norm": 0.2618808448314667, + "block0_mlp_win_cos_v_neg_g": 0.03516404703259468, + "block0_mlp_wout_v_norm": 0.26267552375793457, + "block0_mlp_wout_cos_v_neg_g": 0.0537637323141098, + "block3_q_v_norm": 0.19285768270492554, + "block3_q_cos_v_neg_g": 0.028728730976581573, + "block3_k_v_norm": 0.20002387464046478, + "block3_k_cos_v_neg_g": 0.037372130900621414, + "block3_v_v_norm": 0.1736154854297638, + "block3_v_cos_v_neg_g": 0.03165091574192047, + "block3_o_v_norm": 0.22224494814872742, + "block3_o_cos_v_neg_g": 0.03966521844267845, + "block3_mlp_win_v_norm": 0.2639123499393463, + "block3_mlp_win_cos_v_neg_g": 0.03282919153571129, + "block3_mlp_wout_v_norm": 0.2405318170785904, + "block3_mlp_wout_cos_v_neg_g": 0.10137288272380829, + "block7_q_v_norm": 0.24144800007343292, + "block7_q_cos_v_neg_g": 0.036787744611501694, + "block7_k_v_norm": 0.24621914327144623, + "block7_k_cos_v_neg_g": 0.08542075008153915, + "block7_v_v_norm": 0.20237396657466888, + "block7_v_cos_v_neg_g": 0.03625772148370743, + "block7_o_v_norm": 0.2475912868976593, + "block7_o_cos_v_neg_g": 0.07837376743555069, + "block7_mlp_win_v_norm": 0.27432140707969666, + "block7_mlp_win_cos_v_neg_g": 0.041200678795576096, + "block7_mlp_wout_v_norm": 0.2458934783935547, + "block7_mlp_wout_cos_v_neg_g": 0.12591026723384857, + "block11_q_v_norm": 0.2485351413488388, + "block11_q_cos_v_neg_g": 0.07963662594556808, + "block11_k_v_norm": 0.25049832463264465, + "block11_k_cos_v_neg_g": 0.09840237349271774, + "block11_v_v_norm": 0.24558494985103607, + "block11_v_cos_v_neg_g": 0.0555461086332798, + "block11_o_v_norm": 0.2488219141960144, + "block11_o_cos_v_neg_g": 0.08226147294044495, + "block11_mlp_win_v_norm": 0.23961137235164642, + "block11_mlp_win_cos_v_neg_g": 0.08567826449871063, + "block11_mlp_wout_v_norm": 0.2368171513080597, + "block11_mlp_wout_cos_v_neg_g": 0.08000563085079193, + "embed_lm_head_sharpness": 0.00045050011249259114, + "layer_1_sharpness": 0.013409432955086231, + "layer_2_sharpness": 0.006715971510857344, + "layer_3_sharpness": 0.003541733603924513, + "layer_4_sharpness": 0.0016233769711107016, + "layer_5_sharpness": 0.0013473050203174353, + "layer_6_sharpness": 0.0018098136642947793, + "layer_7_sharpness": 0.0018462988082319498, + "layer_8_sharpness": 0.0020216256380081177, + "layer_9_sharpness": 0.0009836035314947367, + "layer_10_sharpness": 0.0005587377236224711, + "layer_11_sharpness": 0.0005763170775026083, + "layer_12_sharpness": 0.0008402385283261538, + "block0_q_sharpness": 5.585205144598149e-05, + "block0_k_sharpness": 0.00013751904771197587, + "block0_v_sharpness": 0.08868169039487839, + "block0_o_sharpness": 0.0016672281781211495, + "block0_mlp_win_sharpness": 0.0016196152428165078, + "block0_mlp_wout_sharpness": 0.0017834261525422335, + "block3_q_sharpness": 4.330218871473335e-05, + "block3_k_sharpness": 0.00045877377851866186, + "block3_v_sharpness": 0.005605421029031277, + "block3_o_sharpness": 0.0004812418483197689, + "block3_mlp_win_sharpness": 0.00017614841635804623, + "block3_mlp_wout_sharpness": 6.80649172863923e-05, + "block7_q_sharpness": 0.0001083554161596112, + "block7_k_sharpness": 0.00010316078260075301, + "block7_v_sharpness": 0.005466639529913664, + "block7_o_sharpness": 0.00013920168566983193, + "block7_mlp_win_sharpness": 0.000633909716270864, + "block7_mlp_wout_sharpness": 9.998360474128276e-05, + "block11_q_sharpness": 4.16092443629168e-05, + "block11_k_sharpness": 4.486121906666085e-05, + "block11_v_sharpness": 0.00020208591013215482, + "block11_o_sharpness": 3.450279837124981e-05, + "block11_mlp_win_sharpness": 0.00044601838453672826, + "block11_mlp_wout_sharpness": 0.0017422271193936467, + "sum_layer_numerators": 0.00989481288022264, + "block_diag_sharpness": 0.002644397297581079, + "cross_layer_sharpness": 0.008775020970324633 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_500.json new file mode 100644 index 0000000000000000000000000000000000000000..3156bfdbf2f9430c9f499629e1755a06cfbc3a48 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.7237763404846191, + "total_l1_linf_norm": 14889.7099609375, + "total_spectral_norm": 1.7237765789031982, + "embed_lm_head_update_fnorm": 0.974010169506073, + "embed_lm_head_max_l1_linf_norm": 0.23947611451148987, + "embed_lm_head_max_spectral_norm": 0.22600649297237396, + "layer_1_update_fnorm": 0.43299901485443115, + "layer_1_max_l1_linf_norm": 0.31714552640914917, + "layer_1_max_spectral_norm": 0.0086057735607028, + "layer_2_update_fnorm": 0.4090450406074524, + "layer_2_max_l1_linf_norm": 0.31631356477737427, + "layer_2_max_spectral_norm": 0.00860710721462965, + "layer_3_update_fnorm": 0.39037007093429565, + "layer_3_max_l1_linf_norm": 0.31624433398246765, + "layer_3_max_spectral_norm": 0.008607831783592701, + "layer_4_update_fnorm": 0.37999671697616577, + "layer_4_max_l1_linf_norm": 0.31400659680366516, + "layer_4_max_spectral_norm": 0.008607553318142891, + "layer_5_update_fnorm": 0.4028443396091461, + "layer_5_max_l1_linf_norm": 0.30619966983795166, + "layer_5_max_spectral_norm": 0.00860789604485035, + "layer_6_update_fnorm": 0.3980216681957245, + "layer_6_max_l1_linf_norm": 0.3039093613624573, + "layer_6_max_spectral_norm": 0.008611648343503475, + "layer_7_update_fnorm": 0.41380488872528076, + "layer_7_max_l1_linf_norm": 0.29992061853408813, + "layer_7_max_spectral_norm": 0.008608734235167503, + "layer_8_update_fnorm": 0.40642049908638, + "layer_8_max_l1_linf_norm": 0.3004567623138428, + "layer_8_max_spectral_norm": 0.008615219965577126, + "layer_9_update_fnorm": 0.41969916224479675, + "layer_9_max_l1_linf_norm": 0.30352237820625305, + "layer_9_max_spectral_norm": 0.008615270256996155, + "layer_10_update_fnorm": 0.419710636138916, + "layer_10_max_l1_linf_norm": 0.299823135137558, + "layer_10_max_spectral_norm": 0.008614664897322655, + "layer_11_update_fnorm": 0.4253322184085846, + "layer_11_max_l1_linf_norm": 0.2995547354221344, + "layer_11_max_spectral_norm": 0.008615000173449516, + "layer_12_update_fnorm": 0.4251922070980072, + "layer_12_max_l1_linf_norm": 0.29641038179397583, + "layer_12_max_spectral_norm": 0.008604028262197971, + "block0_q_update_fnorm": 0.17667368054389954, + "block0_q_max_l1_linf_norm": 0.1484755426645279, + "block0_q_max_spectral_norm": 0.008605568669736385, + "block0_k_update_fnorm": 0.1764029562473297, + "block0_k_max_l1_linf_norm": 0.1477310061454773, + "block0_k_max_spectral_norm": 0.008602505549788475, + "block0_v_update_fnorm": 0.169058158993721, + "block0_v_max_l1_linf_norm": 0.14575523138046265, + "block0_v_max_spectral_norm": 0.008600609377026558, + "block0_o_update_fnorm": 0.173315167427063, + "block0_o_max_l1_linf_norm": 0.14535702764987946, + "block0_o_max_spectral_norm": 0.008597057312726974, + "block0_mlp_win_update_fnorm": 0.1727529615163803, + "block0_mlp_win_max_l1_linf_norm": 0.0909615010023117, + "block0_mlp_win_max_spectral_norm": 0.008390801958739758, + "block0_mlp_wout_update_fnorm": 0.19146643579006195, + "block0_mlp_wout_max_l1_linf_norm": 0.31714552640914917, + "block0_mlp_wout_max_spectral_norm": 0.0086057735607028, + "block3_q_update_fnorm": 0.13766008615493774, + "block3_q_max_l1_linf_norm": 0.14824087917804718, + "block3_q_max_spectral_norm": 0.008594133891165257, + "block3_k_update_fnorm": 0.1211576759815216, + "block3_k_max_l1_linf_norm": 0.1539197862148285, + "block3_k_max_spectral_norm": 0.008596482686698437, + "block3_v_update_fnorm": 0.13315749168395996, + "block3_v_max_l1_linf_norm": 0.14334452152252197, + "block3_v_max_spectral_norm": 0.008595927618443966, + "block3_o_update_fnorm": 0.14441898465156555, + "block3_o_max_l1_linf_norm": 0.12407685071229935, + "block3_o_max_spectral_norm": 0.008594566024839878, + "block3_mlp_win_update_fnorm": 0.1905510276556015, + "block3_mlp_win_max_l1_linf_norm": 0.11120449751615524, + "block3_mlp_win_max_spectral_norm": 0.008607553318142891, + "block3_mlp_wout_update_fnorm": 0.18925362825393677, + "block3_mlp_wout_max_l1_linf_norm": 0.31400659680366516, + "block3_mlp_wout_max_spectral_norm": 0.008601176552474499, + "block7_q_update_fnorm": 0.148058220744133, + "block7_q_max_l1_linf_norm": 0.1525503396987915, + "block7_q_max_spectral_norm": 0.008598214015364647, + "block7_k_update_fnorm": 0.144677072763443, + "block7_k_max_l1_linf_norm": 0.1569558084011078, + "block7_k_max_spectral_norm": 0.0085984505712986, + "block7_v_update_fnorm": 0.1625939905643463, + "block7_v_max_l1_linf_norm": 0.15412338078022003, + "block7_v_max_spectral_norm": 0.008599084801971912, + "block7_o_update_fnorm": 0.16932828724384308, + "block7_o_max_l1_linf_norm": 0.1429280936717987, + "block7_o_max_spectral_norm": 0.00859480444341898, + "block7_mlp_win_update_fnorm": 0.18494556844234467, + "block7_mlp_win_max_l1_linf_norm": 0.11628765612840652, + "block7_mlp_win_max_spectral_norm": 0.008603550493717194, + "block7_mlp_wout_update_fnorm": 0.18158099055290222, + "block7_mlp_wout_max_l1_linf_norm": 0.3004567623138428, + "block7_mlp_wout_max_spectral_norm": 0.008615219965577126, + "block11_q_update_fnorm": 0.1674526333808899, + "block11_q_max_l1_linf_norm": 0.15054285526275635, + "block11_q_max_spectral_norm": 0.008600768633186817, + "block11_k_update_fnorm": 0.16816827654838562, + "block11_k_max_l1_linf_norm": 0.15145955979824066, + "block11_k_max_spectral_norm": 0.008596867322921753, + "block11_v_update_fnorm": 0.16775937378406525, + "block11_v_max_l1_linf_norm": 0.14795878529548645, + "block11_v_max_spectral_norm": 0.008600780740380287, + "block11_o_update_fnorm": 0.1735207587480545, + "block11_o_max_l1_linf_norm": 0.14655207097530365, + "block11_o_max_spectral_norm": 0.008597737178206444, + "block11_mlp_win_update_fnorm": 0.1857527494430542, + "block11_mlp_win_max_l1_linf_norm": 0.09778895974159241, + "block11_mlp_win_max_spectral_norm": 0.008604028262197971, + "block11_mlp_wout_update_fnorm": 0.177994504570961, + "block11_mlp_wout_max_l1_linf_norm": 0.29641038179397583, + "block11_mlp_wout_max_spectral_norm": 0.008600900880992413, + "total_sharpness": 0.041979074478149414, + "block_total_sharpness": 0.05738958716392517, + "v_norm_block": 1.4222198724746704, + "v_T_H_v_block": 0.11608245223760605, + "v_norm": 1.7237763404846191, + "ip_v_neg_g_hvp": 0.10224564373493195, + "cos_v_neg_g_hvp": 0.05966709181666374, + "g_hvp_norm": 0.9940975904464722, + "ip_v_neg_g_t": 0.10256951302289963, + "cos_v_neg_g_t": 0.06416549533605576, + "g_t_norm": 0.9273332953453064, + "g_norm": 0.9940975904464722, + "hv_norm": 1.883240818977356, + "cos_v_hv": 0.038424476981163025, + "hg_norm": 74.2394027709961, + "cos_g_hg": 0.6006019711494446, + "v_parallel_norm": 0.004476515576243401, + "v_perp_norm": 1.7237706184387207, + "embed_lm_head_v_norm": 0.974010169506073, + "embed_lm_head_cos_v_neg_g": 0.09484206140041351, + "layer_1_v_norm": 0.43299901485443115, + "layer_1_cos_v_neg_g": 0.06312371045351028, + "layer_2_v_norm": 0.4090450406074524, + "layer_2_cos_v_neg_g": 0.0655670166015625, + "layer_3_v_norm": 0.39037010073661804, + "layer_3_cos_v_neg_g": 0.05823420733213425, + "layer_4_v_norm": 0.37999671697616577, + "layer_4_cos_v_neg_g": 0.06756263971328735, + "layer_5_v_norm": 0.4028443396091461, + "layer_5_cos_v_neg_g": 0.06966914236545563, + "layer_6_v_norm": 0.3980216979980469, + "layer_6_cos_v_neg_g": 0.07825513184070587, + "layer_7_v_norm": 0.41380488872528076, + "layer_7_cos_v_neg_g": 0.08250871300697327, + "layer_8_v_norm": 0.40642049908638, + "layer_8_cos_v_neg_g": 0.08464154601097107, + "layer_9_v_norm": 0.41969916224479675, + "layer_9_cos_v_neg_g": 0.08433307707309723, + "layer_10_v_norm": 0.419710636138916, + "layer_10_cos_v_neg_g": 0.08100783079862595, + "layer_11_v_norm": 0.425332248210907, + "layer_11_cos_v_neg_g": 0.07532904297113419, + "layer_12_v_norm": 0.4251922070980072, + "layer_12_cos_v_neg_g": 0.06737631559371948, + "block0_q_v_norm": 0.17667368054389954, + "block0_q_cos_v_neg_g": 0.0753159448504448, + "block0_k_v_norm": 0.1764029562473297, + "block0_k_cos_v_neg_g": 0.07153994590044022, + "block0_v_v_norm": 0.169058158993721, + "block0_v_cos_v_neg_g": 0.058279529213905334, + "block0_o_v_norm": 0.173315167427063, + "block0_o_cos_v_neg_g": 0.07332087308168411, + "block0_mlp_win_v_norm": 0.1727529615163803, + "block0_mlp_win_cos_v_neg_g": 0.11098863184452057, + "block0_mlp_wout_v_norm": 0.19146643579006195, + "block0_mlp_wout_cos_v_neg_g": 0.09715379029512405, + "block3_q_v_norm": 0.13766008615493774, + "block3_q_cos_v_neg_g": 0.07247291505336761, + "block3_k_v_norm": 0.1211576759815216, + "block3_k_cos_v_neg_g": 0.07989950478076935, + "block3_v_v_norm": 0.13315749168395996, + "block3_v_cos_v_neg_g": 0.06692159175872803, + "block3_o_v_norm": 0.14441898465156555, + "block3_o_cos_v_neg_g": 0.06650136411190033, + "block3_mlp_win_v_norm": 0.1905510276556015, + "block3_mlp_win_cos_v_neg_g": 0.076374351978302, + "block3_mlp_wout_v_norm": 0.18925362825393677, + "block3_mlp_wout_cos_v_neg_g": 0.09371190518140793, + "block7_q_v_norm": 0.148058220744133, + "block7_q_cos_v_neg_g": 0.08228248357772827, + "block7_k_v_norm": 0.144677072763443, + "block7_k_cos_v_neg_g": 0.08565381914377213, + "block7_v_v_norm": 0.1625939905643463, + "block7_v_cos_v_neg_g": 0.07427842170000076, + "block7_o_v_norm": 0.16932828724384308, + "block7_o_cos_v_neg_g": 0.06796811521053314, + "block7_mlp_win_v_norm": 0.18494556844234467, + "block7_mlp_win_cos_v_neg_g": 0.10593974590301514, + "block7_mlp_wout_v_norm": 0.18158099055290222, + "block7_mlp_wout_cos_v_neg_g": 0.11083604395389557, + "block11_q_v_norm": 0.1674526333808899, + "block11_q_cos_v_neg_g": 0.07195010781288147, + "block11_k_v_norm": 0.16816827654838562, + "block11_k_cos_v_neg_g": 0.08077285438776016, + "block11_v_v_norm": 0.16775937378406525, + "block11_v_cos_v_neg_g": 0.06943648308515549, + "block11_o_v_norm": 0.1735207587480545, + "block11_o_cos_v_neg_g": 0.08309151232242584, + "block11_mlp_win_v_norm": 0.1857527494430542, + "block11_mlp_win_cos_v_neg_g": 0.09038231521844864, + "block11_mlp_wout_v_norm": 0.177994504570961, + "block11_mlp_wout_cos_v_neg_g": 0.08826135098934174, + "embed_lm_head_sharpness": 0.0009018682758323848, + "layer_1_sharpness": 0.030135076493024826, + "layer_2_sharpness": 0.012832725420594215, + "layer_3_sharpness": 0.013658403418958187, + "layer_4_sharpness": 0.013074172660708427, + "layer_5_sharpness": 0.009321502409875393, + "layer_6_sharpness": 0.006679065991193056, + "layer_7_sharpness": 0.004535517189651728, + "layer_8_sharpness": 0.003115002065896988, + "layer_9_sharpness": 0.0024652741849422455, + "layer_10_sharpness": 0.0016274662921205163, + "layer_11_sharpness": 0.001250717556104064, + "layer_12_sharpness": 0.00113786687143147, + "block0_q_sharpness": 0.0004134426126256585, + "block0_k_sharpness": 0.0007034185691736639, + "block0_v_sharpness": 0.013720914721488953, + "block0_o_sharpness": 0.005110191181302071, + "block0_mlp_win_sharpness": 0.007013348396867514, + "block0_mlp_wout_sharpness": 0.02351090870797634, + "block3_q_sharpness": 0.0002821256930474192, + "block3_k_sharpness": 0.006698787212371826, + "block3_v_sharpness": 0.008074118755757809, + "block3_o_sharpness": 0.006073120515793562, + "block3_mlp_win_sharpness": 0.001252901740372181, + "block3_mlp_wout_sharpness": 0.0038535490166395903, + "block7_q_sharpness": 0.00030264686211012304, + "block7_k_sharpness": 0.0005530675989575684, + "block7_v_sharpness": 0.0013050396228209138, + "block7_o_sharpness": 0.0006995528237894177, + "block7_mlp_win_sharpness": 0.0005903025157749653, + "block7_mlp_wout_sharpness": 0.0023604142479598522, + "block11_q_sharpness": 4.194415669189766e-05, + "block11_k_sharpness": 9.36352662392892e-05, + "block11_v_sharpness": 0.0003164680674672127, + "block11_o_sharpness": 0.00014119312982074916, + "block11_mlp_win_sharpness": 0.00026596421957947314, + "block11_mlp_wout_sharpness": 0.0017317681340500712, + "sum_layer_numerators": 0.016781296349830496, + "block_diag_sharpness": 0.008296444676983709, + "cross_layer_sharpness": 0.049093142486941466 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5000.json new file mode 100644 index 0000000000000000000000000000000000000000..f0492628a2448b520a56a91e6c431a03cdf2b8f7 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3486227989196777, + "total_l1_linf_norm": 20018.71484375, + "total_spectral_norm": 2.348623037338257, + "embed_lm_head_update_fnorm": 1.335641622543335, + "embed_lm_head_max_l1_linf_norm": 0.35147497057914734, + "embed_lm_head_max_spectral_norm": 0.1942535638809204, + "layer_1_update_fnorm": 0.5496317744255066, + "layer_1_max_l1_linf_norm": 0.43638771772384644, + "layer_1_max_spectral_norm": 0.012047197669744492, + "layer_2_update_fnorm": 0.40305396914482117, + "layer_2_max_l1_linf_norm": 0.3806273639202118, + "layer_2_max_spectral_norm": 0.012931516394019127, + "layer_3_update_fnorm": 0.4207814931869507, + "layer_3_max_l1_linf_norm": 0.4535190463066101, + "layer_3_max_spectral_norm": 0.017856402322649956, + "layer_4_update_fnorm": 0.5237335562705994, + "layer_4_max_l1_linf_norm": 0.39696991443634033, + "layer_4_max_spectral_norm": 0.014365802519023418, + "layer_5_update_fnorm": 0.5760722160339355, + "layer_5_max_l1_linf_norm": 0.40276724100112915, + "layer_5_max_spectral_norm": 0.01204413827508688, + "layer_6_update_fnorm": 0.5897384285926819, + "layer_6_max_l1_linf_norm": 0.4052271246910095, + "layer_6_max_spectral_norm": 0.012045517563819885, + "layer_7_update_fnorm": 0.5998094081878662, + "layer_7_max_l1_linf_norm": 0.40749311447143555, + "layer_7_max_spectral_norm": 0.012047900818288326, + "layer_8_update_fnorm": 0.5989722013473511, + "layer_8_max_l1_linf_norm": 0.40651851892471313, + "layer_8_max_spectral_norm": 0.012054563499987125, + "layer_9_update_fnorm": 0.5978046655654907, + "layer_9_max_l1_linf_norm": 0.40948569774627686, + "layer_9_max_spectral_norm": 0.012046092189848423, + "layer_10_update_fnorm": 0.597928524017334, + "layer_10_max_l1_linf_norm": 0.4141569137573242, + "layer_10_max_spectral_norm": 0.012063226662576199, + "layer_11_update_fnorm": 0.5851102471351624, + "layer_11_max_l1_linf_norm": 0.40425702929496765, + "layer_11_max_spectral_norm": 0.012043768540024757, + "layer_12_update_fnorm": 0.6006309390068054, + "layer_12_max_l1_linf_norm": 0.40038901567459106, + "layer_12_max_spectral_norm": 0.012045880779623985, + "block0_q_update_fnorm": 0.2360856682062149, + "block0_q_max_l1_linf_norm": 0.2056228518486023, + "block0_q_max_spectral_norm": 0.012039502151310444, + "block0_k_update_fnorm": 0.20351170003414154, + "block0_k_max_l1_linf_norm": 0.21299554407596588, + "block0_k_max_spectral_norm": 0.012032789178192616, + "block0_v_update_fnorm": 0.1384708285331726, + "block0_v_max_l1_linf_norm": 0.1543479561805725, + "block0_v_max_spectral_norm": 0.012026222422719002, + "block0_o_update_fnorm": 0.2136867493391037, + "block0_o_max_l1_linf_norm": 0.18664580583572388, + "block0_o_max_spectral_norm": 0.012038442306220531, + "block0_mlp_win_update_fnorm": 0.2644968032836914, + "block0_mlp_win_max_l1_linf_norm": 0.1782326102256775, + "block0_mlp_win_max_spectral_norm": 0.012047197669744492, + "block0_mlp_wout_update_fnorm": 0.2645910084247589, + "block0_mlp_wout_max_l1_linf_norm": 0.43638771772384644, + "block0_mlp_wout_max_spectral_norm": 0.012042089365422726, + "block3_q_update_fnorm": 0.18890711665153503, + "block3_q_max_l1_linf_norm": 0.2063615620136261, + "block3_q_max_spectral_norm": 0.012032915838062763, + "block3_k_update_fnorm": 0.195642352104187, + "block3_k_max_l1_linf_norm": 0.20995649695396423, + "block3_k_max_spectral_norm": 0.012036887928843498, + "block3_v_update_fnorm": 0.16389916837215424, + "block3_v_max_l1_linf_norm": 0.18599915504455566, + "block3_v_max_spectral_norm": 0.012027254328131676, + "block3_o_update_fnorm": 0.2165994644165039, + "block3_o_max_l1_linf_norm": 0.1817580759525299, + "block3_o_max_spectral_norm": 0.012035230174660683, + "block3_mlp_win_update_fnorm": 0.26316896080970764, + "block3_mlp_win_max_l1_linf_norm": 0.17508471012115479, + "block3_mlp_win_max_spectral_norm": 0.01204135362058878, + "block3_mlp_wout_update_fnorm": 0.2385873794555664, + "block3_mlp_wout_max_l1_linf_norm": 0.39501240849494934, + "block3_mlp_wout_max_spectral_norm": 0.011404735036194324, + "block7_q_update_fnorm": 0.24059973657131195, + "block7_q_max_l1_linf_norm": 0.20721784234046936, + "block7_q_max_spectral_norm": 0.012039963155984879, + "block7_k_update_fnorm": 0.24604235589504242, + "block7_k_max_l1_linf_norm": 0.20839878916740417, + "block7_k_max_spectral_norm": 0.01203902903944254, + "block7_v_update_fnorm": 0.2075999230146408, + "block7_v_max_l1_linf_norm": 0.2120172083377838, + "block7_v_max_spectral_norm": 0.012036116793751717, + "block7_o_update_fnorm": 0.24798324704170227, + "block7_o_max_l1_linf_norm": 0.20643854141235352, + "block7_o_max_spectral_norm": 0.01203931961208582, + "block7_mlp_win_update_fnorm": 0.27433228492736816, + "block7_mlp_win_max_l1_linf_norm": 0.1531572937965393, + "block7_mlp_win_max_spectral_norm": 0.012054563499987125, + "block7_mlp_wout_update_fnorm": 0.24569086730480194, + "block7_mlp_wout_max_l1_linf_norm": 0.40651851892471313, + "block7_mlp_wout_max_spectral_norm": 0.011375464498996735, + "block11_q_update_fnorm": 0.24824579060077667, + "block11_q_max_l1_linf_norm": 0.21174398064613342, + "block11_q_max_spectral_norm": 0.012045880779623985, + "block11_k_update_fnorm": 0.2503015398979187, + "block11_k_max_l1_linf_norm": 0.21273112297058105, + "block11_k_max_spectral_norm": 0.012036185711622238, + "block11_v_update_fnorm": 0.24614515900611877, + "block11_v_max_l1_linf_norm": 0.207351952791214, + "block11_v_max_spectral_norm": 0.012043612077832222, + "block11_o_update_fnorm": 0.24900712072849274, + "block11_o_max_l1_linf_norm": 0.20813629031181335, + "block11_o_max_spectral_norm": 0.012038512155413628, + "block11_mlp_win_update_fnorm": 0.239908829331398, + "block11_mlp_win_max_l1_linf_norm": 0.15118388831615448, + "block11_mlp_win_max_spectral_norm": 0.011401191353797913, + "block11_mlp_wout_update_fnorm": 0.2370145171880722, + "block11_mlp_wout_max_l1_linf_norm": 0.39214563369750977, + "block11_mlp_wout_max_spectral_norm": 0.011358262971043587, + "total_sharpness": 0.005217598285526037, + "block_total_sharpness": 0.006878597661852837, + "v_norm_block": 1.9318623542785645, + "v_T_H_v_block": 0.02567156031727791, + "v_norm": 2.3486227989196777, + "ip_v_neg_g_hvp": 0.044075850397348404, + "cos_v_neg_g_hvp": 0.027591299265623093, + "g_hvp_norm": 0.6801665425300598, + "ip_v_neg_g_t": 0.046626027673482895, + "cos_v_neg_g_t": 0.03135710209608078, + "g_t_norm": 0.6331100463867188, + "g_norm": 0.6801665425300598, + "hv_norm": 1.0670403242111206, + "cos_v_hv": 0.011484261602163315, + "hg_norm": 238.7969512939453, + "cos_g_hg": 0.023649223148822784, + "v_parallel_norm": 0.00463150255382061, + "v_perp_norm": 2.348618268966675, + "embed_lm_head_v_norm": 1.335641622543335, + "embed_lm_head_cos_v_neg_g": 0.05240095406770706, + "layer_1_v_norm": 0.5496317744255066, + "layer_1_cos_v_neg_g": 0.017904987558722496, + "layer_2_v_norm": 0.40305396914482117, + "layer_2_cos_v_neg_g": 0.02135714516043663, + "layer_3_v_norm": 0.4207814633846283, + "layer_3_cos_v_neg_g": 0.027175823226571083, + "layer_4_v_norm": 0.5237335562705994, + "layer_4_cos_v_neg_g": 0.024045687168836594, + "layer_5_v_norm": 0.5760722160339355, + "layer_5_cos_v_neg_g": 0.028958410024642944, + "layer_6_v_norm": 0.5897384285926819, + "layer_6_cos_v_neg_g": 0.029173484072089195, + "layer_7_v_norm": 0.5998094081878662, + "layer_7_cos_v_neg_g": 0.029612792655825615, + "layer_8_v_norm": 0.5989722013473511, + "layer_8_cos_v_neg_g": 0.029100289568305016, + "layer_9_v_norm": 0.5978046655654907, + "layer_9_cos_v_neg_g": 0.030799182131886482, + "layer_10_v_norm": 0.597928524017334, + "layer_10_cos_v_neg_g": 0.03181708976626396, + "layer_11_v_norm": 0.5851102471351624, + "layer_11_cos_v_neg_g": 0.04015708342194557, + "layer_12_v_norm": 0.6006309390068054, + "layer_12_cos_v_neg_g": 0.0724390298128128, + "block0_q_v_norm": 0.2360856682062149, + "block0_q_cos_v_neg_g": 0.04445492848753929, + "block0_k_v_norm": 0.20351170003414154, + "block0_k_cos_v_neg_g": 0.030431227758526802, + "block0_v_v_norm": 0.1384708285331726, + "block0_v_cos_v_neg_g": 0.02735775336623192, + "block0_o_v_norm": 0.2136867493391037, + "block0_o_cos_v_neg_g": 0.037274397909641266, + "block0_mlp_win_v_norm": 0.2644968032836914, + "block0_mlp_win_cos_v_neg_g": 0.025932740420103073, + "block0_mlp_wout_v_norm": 0.2645910084247589, + "block0_mlp_wout_cos_v_neg_g": 0.043862320482730865, + "block3_q_v_norm": 0.18890711665153503, + "block3_q_cos_v_neg_g": 0.01839177869260311, + "block3_k_v_norm": 0.195642352104187, + "block3_k_cos_v_neg_g": 0.03291640803217888, + "block3_v_v_norm": 0.16389916837215424, + "block3_v_cos_v_neg_g": 0.02867596410214901, + "block3_o_v_norm": 0.2165994644165039, + "block3_o_cos_v_neg_g": 0.03193789720535278, + "block3_mlp_win_v_norm": 0.26316896080970764, + "block3_mlp_win_cos_v_neg_g": 0.02863931655883789, + "block3_mlp_wout_v_norm": 0.2385873794555664, + "block3_mlp_wout_cos_v_neg_g": 0.09928850829601288, + "block7_q_v_norm": 0.24059973657131195, + "block7_q_cos_v_neg_g": 0.03372516483068466, + "block7_k_v_norm": 0.24604235589504242, + "block7_k_cos_v_neg_g": 0.07870688289403915, + "block7_v_v_norm": 0.2075999230146408, + "block7_v_cos_v_neg_g": 0.030803969129920006, + "block7_o_v_norm": 0.24798324704170227, + "block7_o_cos_v_neg_g": 0.07498970627784729, + "block7_mlp_win_v_norm": 0.27433228492736816, + "block7_mlp_win_cos_v_neg_g": 0.037600502371788025, + "block7_mlp_wout_v_norm": 0.24569086730480194, + "block7_mlp_wout_cos_v_neg_g": 0.1208207979798317, + "block11_q_v_norm": 0.24824579060077667, + "block11_q_cos_v_neg_g": 0.07564085721969604, + "block11_k_v_norm": 0.2503015398979187, + "block11_k_cos_v_neg_g": 0.09650107473134995, + "block11_v_v_norm": 0.24614515900611877, + "block11_v_cos_v_neg_g": 0.04995233938097954, + "block11_o_v_norm": 0.24900712072849274, + "block11_o_cos_v_neg_g": 0.08440619707107544, + "block11_mlp_win_v_norm": 0.239908829331398, + "block11_mlp_win_cos_v_neg_g": 0.09947456419467926, + "block11_mlp_wout_v_norm": 0.2370145171880722, + "block11_mlp_wout_cos_v_neg_g": 0.08399808406829834, + "embed_lm_head_sharpness": 0.00039236267912201583, + "layer_1_sharpness": 0.004471370484679937, + "layer_2_sharpness": 0.0027320338413119316, + "layer_3_sharpness": 0.002988499589264393, + "layer_4_sharpness": 0.001570075168274343, + "layer_5_sharpness": 0.0010736758122220635, + "layer_6_sharpness": 0.0015180849004536867, + "layer_7_sharpness": 0.0015339836245402694, + "layer_8_sharpness": 0.001408225973136723, + "layer_9_sharpness": 0.0007907985709607601, + "layer_10_sharpness": 0.0004436550661921501, + "layer_11_sharpness": 0.0004433617286849767, + "layer_12_sharpness": 0.00034634594339877367, + "block0_q_sharpness": 0.00012453197268769145, + "block0_k_sharpness": 0.00016513328591827303, + "block0_v_sharpness": 0.010934650897979736, + "block0_o_sharpness": 0.0010238379472866654, + "block0_mlp_win_sharpness": 0.0005625325138680637, + "block0_mlp_wout_sharpness": 0.0009723909897729754, + "block3_q_sharpness": 3.0112447348074056e-05, + "block3_k_sharpness": 0.0008732916321605444, + "block3_v_sharpness": 0.005588240921497345, + "block3_o_sharpness": 0.0003675954940263182, + "block3_mlp_win_sharpness": 0.00015181316121015698, + "block3_mlp_wout_sharpness": 6.280509114731103e-05, + "block7_q_sharpness": 0.000102258323749993, + "block7_k_sharpness": 0.0001025852034217678, + "block7_v_sharpness": 0.004096043296158314, + "block7_o_sharpness": 9.674524335423484e-05, + "block7_mlp_win_sharpness": 0.00044534888002090156, + "block7_mlp_wout_sharpness": 8.508296741638333e-05, + "block11_q_sharpness": 7.125834963517264e-05, + "block11_k_sharpness": 5.243528357823379e-05, + "block11_v_sharpness": 0.00019711536879185587, + "block11_o_sharpness": 3.2710777304600924e-05, + "block11_mlp_win_sharpness": 0.0001637964160181582, + "block11_mlp_wout_sharpness": 0.0004611052863765508, + "sum_layer_numerators": 0.005413758223565197, + "block_diag_sharpness": 0.0014505960725105763, + "cross_layer_sharpness": 0.005428001589342261 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5500.json new file mode 100644 index 0000000000000000000000000000000000000000..b061a9ab89d6eeb806aef8f74d21ee1b45a21b27 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_5500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.3351807594299316, + "total_l1_linf_norm": 19888.76171875, + "total_spectral_norm": 2.3351809978485107, + "embed_lm_head_update_fnorm": 1.3362165689468384, + "embed_lm_head_max_l1_linf_norm": 0.3559502065181732, + "embed_lm_head_max_spectral_norm": 0.19004251062870026, + "layer_1_update_fnorm": 0.5132110118865967, + "layer_1_max_l1_linf_norm": 0.42565464973449707, + "layer_1_max_spectral_norm": 0.012037552893161774, + "layer_2_update_fnorm": 0.3965786397457123, + "layer_2_max_l1_linf_norm": 0.40506690740585327, + "layer_2_max_spectral_norm": 0.012980463914573193, + "layer_3_update_fnorm": 0.4257800579071045, + "layer_3_max_l1_linf_norm": 0.4495704770088196, + "layer_3_max_spectral_norm": 0.017947522923350334, + "layer_4_update_fnorm": 0.5164316296577454, + "layer_4_max_l1_linf_norm": 0.39352163672447205, + "layer_4_max_spectral_norm": 0.01574278622865677, + "layer_5_update_fnorm": 0.5766428112983704, + "layer_5_max_l1_linf_norm": 0.40394824743270874, + "layer_5_max_spectral_norm": 0.012041431851685047, + "layer_6_update_fnorm": 0.5879071354866028, + "layer_6_max_l1_linf_norm": 0.4057358205318451, + "layer_6_max_spectral_norm": 0.012044481933116913, + "layer_7_update_fnorm": 0.5976738929748535, + "layer_7_max_l1_linf_norm": 0.4024779796600342, + "layer_7_max_spectral_norm": 0.012046580202877522, + "layer_8_update_fnorm": 0.5983331203460693, + "layer_8_max_l1_linf_norm": 0.40475770831108093, + "layer_8_max_spectral_norm": 0.012051143683493137, + "layer_9_update_fnorm": 0.5965010523796082, + "layer_9_max_l1_linf_norm": 0.40756410360336304, + "layer_9_max_spectral_norm": 0.012047743424773216, + "layer_10_update_fnorm": 0.595177412033081, + "layer_10_max_l1_linf_norm": 0.4129799008369446, + "layer_10_max_spectral_norm": 0.012049161829054356, + "layer_11_update_fnorm": 0.5801153779029846, + "layer_11_max_l1_linf_norm": 0.40514981746673584, + "layer_11_max_spectral_norm": 0.01204568799585104, + "layer_12_update_fnorm": 0.5991606116294861, + "layer_12_max_l1_linf_norm": 0.40496766567230225, + "layer_12_max_spectral_norm": 0.012045927345752716, + "block0_q_update_fnorm": 0.22867995500564575, + "block0_q_max_l1_linf_norm": 0.21260672807693481, + "block0_q_max_spectral_norm": 0.012037552893161774, + "block0_k_update_fnorm": 0.17943565547466278, + "block0_k_max_l1_linf_norm": 0.20875656604766846, + "block0_k_max_spectral_norm": 0.012029451318085194, + "block0_v_update_fnorm": 0.12079408019781113, + "block0_v_max_l1_linf_norm": 0.1454632580280304, + "block0_v_max_spectral_norm": 0.01202609483152628, + "block0_o_update_fnorm": 0.1950288861989975, + "block0_o_max_l1_linf_norm": 0.17762000858783722, + "block0_o_max_spectral_norm": 0.012035987339913845, + "block0_mlp_win_update_fnorm": 0.24396367371082306, + "block0_mlp_win_max_l1_linf_norm": 0.1802373230457306, + "block0_mlp_win_max_spectral_norm": 0.012034215964376926, + "block0_mlp_wout_update_fnorm": 0.25809937715530396, + "block0_mlp_wout_max_l1_linf_norm": 0.42565464973449707, + "block0_mlp_wout_max_spectral_norm": 0.012036245316267014, + "block3_q_update_fnorm": 0.17698997259140015, + "block3_q_max_l1_linf_norm": 0.20526939630508423, + "block3_q_max_spectral_norm": 0.012031158432364464, + "block3_k_update_fnorm": 0.18698133528232574, + "block3_k_max_l1_linf_norm": 0.21129494905471802, + "block3_k_max_spectral_norm": 0.012035432271659374, + "block3_v_update_fnorm": 0.16820959746837616, + "block3_v_max_l1_linf_norm": 0.18535438179969788, + "block3_v_max_spectral_norm": 0.012027882970869541, + "block3_o_update_fnorm": 0.21347494423389435, + "block3_o_max_l1_linf_norm": 0.17988920211791992, + "block3_o_max_spectral_norm": 0.012034802697598934, + "block3_mlp_win_update_fnorm": 0.26284345984458923, + "block3_mlp_win_max_l1_linf_norm": 0.17992758750915527, + "block3_mlp_win_max_spectral_norm": 0.012045010924339294, + "block3_mlp_wout_update_fnorm": 0.23878693580627441, + "block3_mlp_wout_max_l1_linf_norm": 0.39352163672447205, + "block3_mlp_wout_max_spectral_norm": 0.01140272431075573, + "block7_q_update_fnorm": 0.24144957959651947, + "block7_q_max_l1_linf_norm": 0.20991995930671692, + "block7_q_max_spectral_norm": 0.012044046074151993, + "block7_k_update_fnorm": 0.24640142917633057, + "block7_k_max_l1_linf_norm": 0.2076563835144043, + "block7_k_max_spectral_norm": 0.01204405166208744, + "block7_v_update_fnorm": 0.20109377801418304, + "block7_v_max_l1_linf_norm": 0.21055248379707336, + "block7_v_max_spectral_norm": 0.012036006897687912, + "block7_o_update_fnorm": 0.2475859671831131, + "block7_o_max_l1_linf_norm": 0.20624250173568726, + "block7_o_max_spectral_norm": 0.012048705480992794, + "block7_mlp_win_update_fnorm": 0.27775371074676514, + "block7_mlp_win_max_l1_linf_norm": 0.15979225933551788, + "block7_mlp_win_max_spectral_norm": 0.012051143683493137, + "block7_mlp_wout_update_fnorm": 0.2449059635400772, + "block7_mlp_wout_max_l1_linf_norm": 0.40475770831108093, + "block7_mlp_wout_max_spectral_norm": 0.01137456763535738, + "block11_q_update_fnorm": 0.24811729788780212, + "block11_q_max_l1_linf_norm": 0.2124408483505249, + "block11_q_max_spectral_norm": 0.012038584798574448, + "block11_k_update_fnorm": 0.24977876245975494, + "block11_k_max_l1_linf_norm": 0.2141263633966446, + "block11_k_max_spectral_norm": 0.012039829045534134, + "block11_v_update_fnorm": 0.2454468011856079, + "block11_v_max_l1_linf_norm": 0.20668768882751465, + "block11_v_max_spectral_norm": 0.012045927345752716, + "block11_o_update_fnorm": 0.2485882043838501, + "block11_o_max_l1_linf_norm": 0.20701268315315247, + "block11_o_max_spectral_norm": 0.01204050611704588, + "block11_mlp_win_update_fnorm": 0.238532155752182, + "block11_mlp_win_max_l1_linf_norm": 0.1532495766878128, + "block11_mlp_win_max_spectral_norm": 0.011397798545658588, + "block11_mlp_wout_update_fnorm": 0.23652155697345734, + "block11_mlp_wout_max_l1_linf_norm": 0.3931800127029419, + "block11_mlp_wout_max_spectral_norm": 0.011353391222655773, + "total_sharpness": 0.009384134784340858, + "block_total_sharpness": 0.012069223448634148, + "v_norm_block": 1.9150968790054321, + "v_T_H_v_block": 0.044265035539865494, + "v_norm": 2.3351807594299316, + "ip_v_neg_g_hvp": 0.04805706813931465, + "cos_v_neg_g_hvp": 0.02584056369960308, + "g_hvp_norm": 0.7964065074920654, + "ip_v_neg_g_t": 0.053426094353199005, + "cos_v_neg_g_t": 0.027578823268413544, + "g_t_norm": 0.8295779824256897, + "g_norm": 0.7964065074920654, + "hv_norm": 2.321904420852661, + "cos_v_hv": 0.009437792003154755, + "hg_norm": 1467.8585205078125, + "cos_g_hg": 0.21067537367343903, + "v_parallel_norm": 0.0060760146006941795, + "v_perp_norm": 2.3351728916168213, + "embed_lm_head_v_norm": 1.3362165689468384, + "embed_lm_head_cos_v_neg_g": 0.03278665989637375, + "layer_1_v_norm": 0.5132110118865967, + "layer_1_cos_v_neg_g": 0.021188827231526375, + "layer_2_v_norm": 0.3965786397457123, + "layer_2_cos_v_neg_g": 0.029529094696044922, + "layer_3_v_norm": 0.4257800579071045, + "layer_3_cos_v_neg_g": 0.02921210415661335, + "layer_4_v_norm": 0.5164316296577454, + "layer_4_cos_v_neg_g": 0.024820474907755852, + "layer_5_v_norm": 0.5766428112983704, + "layer_5_cos_v_neg_g": 0.02889646776020527, + "layer_6_v_norm": 0.5879071950912476, + "layer_6_cos_v_neg_g": 0.02977428399026394, + "layer_7_v_norm": 0.5976738929748535, + "layer_7_cos_v_neg_g": 0.03026149608194828, + "layer_8_v_norm": 0.5983331203460693, + "layer_8_cos_v_neg_g": 0.028099913150072098, + "layer_9_v_norm": 0.5965010523796082, + "layer_9_cos_v_neg_g": 0.03078627400100231, + "layer_10_v_norm": 0.595177412033081, + "layer_10_cos_v_neg_g": 0.029736148193478584, + "layer_11_v_norm": 0.5801154375076294, + "layer_11_cos_v_neg_g": 0.03907572478055954, + "layer_12_v_norm": 0.5991606116294861, + "layer_12_cos_v_neg_g": 0.07144013047218323, + "block0_q_v_norm": 0.22867995500564575, + "block0_q_cos_v_neg_g": 0.033299464732408524, + "block0_k_v_norm": 0.17943565547466278, + "block0_k_cos_v_neg_g": 0.027406852692365646, + "block0_v_v_norm": 0.12079408019781113, + "block0_v_cos_v_neg_g": 0.04547916352748871, + "block0_o_v_norm": 0.1950288861989975, + "block0_o_cos_v_neg_g": 0.03507505729794502, + "block0_mlp_win_v_norm": 0.24396367371082306, + "block0_mlp_win_cos_v_neg_g": 0.03529714420437813, + "block0_mlp_wout_v_norm": 0.25809937715530396, + "block0_mlp_wout_cos_v_neg_g": 0.049088235944509506, + "block3_q_v_norm": 0.17698997259140015, + "block3_q_cos_v_neg_g": 0.02690231427550316, + "block3_k_v_norm": 0.18698133528232574, + "block3_k_cos_v_neg_g": 0.038965512067079544, + "block3_v_v_norm": 0.16820959746837616, + "block3_v_cos_v_neg_g": 0.02720423974096775, + "block3_o_v_norm": 0.21347494423389435, + "block3_o_cos_v_neg_g": 0.03134511038661003, + "block3_mlp_win_v_norm": 0.26284345984458923, + "block3_mlp_win_cos_v_neg_g": 0.029156843200325966, + "block3_mlp_wout_v_norm": 0.23878693580627441, + "block3_mlp_wout_cos_v_neg_g": 0.1000799611210823, + "block7_q_v_norm": 0.24144957959651947, + "block7_q_cos_v_neg_g": 0.03543860837817192, + "block7_k_v_norm": 0.24640142917633057, + "block7_k_cos_v_neg_g": 0.08116812258958817, + "block7_v_v_norm": 0.20109377801418304, + "block7_v_cos_v_neg_g": 0.03052946925163269, + "block7_o_v_norm": 0.2475859671831131, + "block7_o_cos_v_neg_g": 0.07702505588531494, + "block7_mlp_win_v_norm": 0.27775371074676514, + "block7_mlp_win_cos_v_neg_g": 0.03594208508729935, + "block7_mlp_wout_v_norm": 0.2449059635400772, + "block7_mlp_wout_cos_v_neg_g": 0.12364490330219269, + "block11_q_v_norm": 0.24811729788780212, + "block11_q_cos_v_neg_g": 0.080311119556427, + "block11_k_v_norm": 0.24977876245975494, + "block11_k_cos_v_neg_g": 0.10203384608030319, + "block11_v_v_norm": 0.2454468011856079, + "block11_v_cos_v_neg_g": 0.04815273731946945, + "block11_o_v_norm": 0.2485882043838501, + "block11_o_cos_v_neg_g": 0.08026225119829178, + "block11_mlp_win_v_norm": 0.238532155752182, + "block11_mlp_win_cos_v_neg_g": 0.09570233523845673, + "block11_mlp_wout_v_norm": 0.23652155697345734, + "block11_mlp_wout_cos_v_neg_g": 0.08298739045858383, + "embed_lm_head_sharpness": 0.0004903437802568078, + "layer_1_sharpness": 0.024112531915307045, + "layer_2_sharpness": 0.010616995394229889, + "layer_3_sharpness": 0.003282625460997224, + "layer_4_sharpness": 0.002121481578797102, + "layer_5_sharpness": 0.0015674976166337729, + "layer_6_sharpness": 0.0015487833879888058, + "layer_7_sharpness": 0.0016965988324955106, + "layer_8_sharpness": 0.0013822067994624376, + "layer_9_sharpness": 0.0007769673247821629, + "layer_10_sharpness": 0.00048169822548516095, + "layer_11_sharpness": 0.0005885654827579856, + "layer_12_sharpness": 0.00037801379221491516, + "block0_q_sharpness": 8.888053707778454e-05, + "block0_k_sharpness": 0.00029004921088926494, + "block0_v_sharpness": 0.2970811426639557, + "block0_o_sharpness": 0.0005435817292891443, + "block0_mlp_win_sharpness": 0.0016222151461988688, + "block0_mlp_wout_sharpness": 0.0015390270855277777, + "block3_q_sharpness": 8.674903074279428e-05, + "block3_k_sharpness": 0.0016611048486083746, + "block3_v_sharpness": 0.005026162602007389, + "block3_o_sharpness": 0.0003968894015997648, + "block3_mlp_win_sharpness": 0.0003010071231983602, + "block3_mlp_wout_sharpness": 0.00012852679356001318, + "block7_q_sharpness": 6.94500922691077e-05, + "block7_k_sharpness": 6.574038707185537e-05, + "block7_v_sharpness": 0.003989726305007935, + "block7_o_sharpness": 9.695054905023426e-05, + "block7_mlp_win_sharpness": 0.0004558962245937437, + "block7_mlp_wout_sharpness": 9.363581193611026e-05, + "block11_q_sharpness": 4.707486732513644e-05, + "block11_k_sharpness": 5.447743387776427e-05, + "block11_v_sharpness": 0.00016681475972291082, + "block11_o_sharpness": 3.1231786124408245e-05, + "block11_mlp_win_sharpness": 0.00022799379075877368, + "block11_mlp_wout_sharpness": 0.0005504617001861334, + "sum_layer_numerators": 0.012119864222675124, + "block_diag_sharpness": 0.0033045799040289097, + "cross_layer_sharpness": 0.008764643544605238 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6000.json new file mode 100644 index 0000000000000000000000000000000000000000..784305e3017a7bdf8520dfe380095d7d97c236cf --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.300644874572754, + "total_l1_linf_norm": 19474.068359375, + "total_spectral_norm": 2.300645112991333, + "embed_lm_head_update_fnorm": 1.3424556255340576, + "embed_lm_head_max_l1_linf_norm": 0.42519620060920715, + "embed_lm_head_max_spectral_norm": 0.26218801736831665, + "layer_1_update_fnorm": 0.5072323679924011, + "layer_1_max_l1_linf_norm": 0.454161137342453, + "layer_1_max_spectral_norm": 0.01204146072268486, + "layer_2_update_fnorm": 0.3107762932777405, + "layer_2_max_l1_linf_norm": 0.5095103979110718, + "layer_2_max_spectral_norm": 0.013907914981245995, + "layer_3_update_fnorm": 0.37186917662620544, + "layer_3_max_l1_linf_norm": 0.46523311734199524, + "layer_3_max_spectral_norm": 0.014595864340662956, + "layer_4_update_fnorm": 0.48790785670280457, + "layer_4_max_l1_linf_norm": 0.465299129486084, + "layer_4_max_spectral_norm": 0.015804357826709747, + "layer_5_update_fnorm": 0.5682775974273682, + "layer_5_max_l1_linf_norm": 0.4325416088104248, + "layer_5_max_spectral_norm": 0.01204854529350996, + "layer_6_update_fnorm": 0.5695706605911255, + "layer_6_max_l1_linf_norm": 0.40304380655288696, + "layer_6_max_spectral_norm": 0.012044528499245644, + "layer_7_update_fnorm": 0.596054196357727, + "layer_7_max_l1_linf_norm": 0.40076515078544617, + "layer_7_max_spectral_norm": 0.012048983946442604, + "layer_8_update_fnorm": 0.5960016250610352, + "layer_8_max_l1_linf_norm": 0.40216064453125, + "layer_8_max_spectral_norm": 0.012049430049955845, + "layer_9_update_fnorm": 0.5973056554794312, + "layer_9_max_l1_linf_norm": 0.4032534956932068, + "layer_9_max_spectral_norm": 0.012059731408953667, + "layer_10_update_fnorm": 0.595331072807312, + "layer_10_max_l1_linf_norm": 0.4100072979927063, + "layer_10_max_spectral_norm": 0.01205094251781702, + "layer_11_update_fnorm": 0.5770519971847534, + "layer_11_max_l1_linf_norm": 0.4273272752761841, + "layer_11_max_spectral_norm": 0.01204569824039936, + "layer_12_update_fnorm": 0.5987837910652161, + "layer_12_max_l1_linf_norm": 0.42038479447364807, + "layer_12_max_spectral_norm": 0.012043372727930546, + "block0_q_update_fnorm": 0.23954664170742035, + "block0_q_max_l1_linf_norm": 0.21832308173179626, + "block0_q_max_spectral_norm": 0.01204146072268486, + "block0_k_update_fnorm": 0.2249569594860077, + "block0_k_max_l1_linf_norm": 0.2082923948764801, + "block0_k_max_spectral_norm": 0.012035364285111427, + "block0_v_update_fnorm": 0.10726530104875565, + "block0_v_max_l1_linf_norm": 0.14519137144088745, + "block0_v_max_spectral_norm": 0.012023544870316982, + "block0_o_update_fnorm": 0.1863425076007843, + "block0_o_max_l1_linf_norm": 0.1841219961643219, + "block0_o_max_spectral_norm": 0.01203465461730957, + "block0_mlp_win_update_fnorm": 0.21860511600971222, + "block0_mlp_win_max_l1_linf_norm": 0.19178912043571472, + "block0_mlp_win_max_spectral_norm": 0.012029617093503475, + "block0_mlp_wout_update_fnorm": 0.23456323146820068, + "block0_mlp_wout_max_l1_linf_norm": 0.38955190777778625, + "block0_mlp_wout_max_spectral_norm": 0.01203725952655077, + "block3_q_update_fnorm": 0.1661364585161209, + "block3_q_max_l1_linf_norm": 0.20205555856227875, + "block3_q_max_spectral_norm": 0.01203048974275589, + "block3_k_update_fnorm": 0.16975833475589752, + "block3_k_max_l1_linf_norm": 0.21190330386161804, + "block3_k_max_spectral_norm": 0.012032059021294117, + "block3_v_update_fnorm": 0.13950370252132416, + "block3_v_max_l1_linf_norm": 0.15649068355560303, + "block3_v_max_spectral_norm": 0.012026878073811531, + "block3_o_update_fnorm": 0.19899572432041168, + "block3_o_max_l1_linf_norm": 0.16958074271678925, + "block3_o_max_spectral_norm": 0.012035781517624855, + "block3_mlp_win_update_fnorm": 0.25645023584365845, + "block3_mlp_win_max_l1_linf_norm": 0.17223641276359558, + "block3_mlp_win_max_spectral_norm": 0.012036509811878204, + "block3_mlp_wout_update_fnorm": 0.23722869157791138, + "block3_mlp_wout_max_l1_linf_norm": 0.38975274562835693, + "block3_mlp_wout_max_spectral_norm": 0.011359605006873608, + "block7_q_update_fnorm": 0.23989057540893555, + "block7_q_max_l1_linf_norm": 0.20654475688934326, + "block7_q_max_spectral_norm": 0.012043154798448086, + "block7_k_update_fnorm": 0.24602825939655304, + "block7_k_max_l1_linf_norm": 0.2092796266078949, + "block7_k_max_spectral_norm": 0.012041982263326645, + "block7_v_update_fnorm": 0.198020339012146, + "block7_v_max_l1_linf_norm": 0.21201947331428528, + "block7_v_max_spectral_norm": 0.012033253908157349, + "block7_o_update_fnorm": 0.24771350622177124, + "block7_o_max_l1_linf_norm": 0.20684656500816345, + "block7_o_max_spectral_norm": 0.012048463337123394, + "block7_mlp_win_update_fnorm": 0.2785285711288452, + "block7_mlp_win_max_l1_linf_norm": 0.15475517511367798, + "block7_mlp_win_max_spectral_norm": 0.012049430049955845, + "block7_mlp_wout_update_fnorm": 0.2425776571035385, + "block7_mlp_wout_max_l1_linf_norm": 0.40216064453125, + "block7_mlp_wout_max_spectral_norm": 0.011376502923667431, + "block11_q_update_fnorm": 0.2478477507829666, + "block11_q_max_l1_linf_norm": 0.2107728123664856, + "block11_q_max_spectral_norm": 0.012043372727930546, + "block11_k_update_fnorm": 0.2498684674501419, + "block11_k_max_l1_linf_norm": 0.21300239861011505, + "block11_k_max_spectral_norm": 0.012041904032230377, + "block11_v_update_fnorm": 0.2451862394809723, + "block11_v_max_l1_linf_norm": 0.2063581645488739, + "block11_v_max_spectral_norm": 0.01204274594783783, + "block11_o_update_fnorm": 0.24770157039165497, + "block11_o_max_l1_linf_norm": 0.2094663679599762, + "block11_o_max_spectral_norm": 0.012040936388075352, + "block11_mlp_win_update_fnorm": 0.235589861869812, + "block11_mlp_win_max_l1_linf_norm": 0.1554722785949707, + "block11_mlp_win_max_spectral_norm": 0.01136857271194458, + "block11_mlp_wout_update_fnorm": 0.2398407757282257, + "block11_mlp_wout_max_l1_linf_norm": 0.4055783748626709, + "block11_mlp_wout_max_spectral_norm": 0.01195498276501894, + "total_sharpness": 0.005837759003043175, + "block_total_sharpness": 0.007768197450786829, + "v_norm_block": 1.8683631420135498, + "v_T_H_v_block": 0.02711707539856434, + "v_norm": 2.300644874572754, + "ip_v_neg_g_hvp": 0.043863944709300995, + "cos_v_neg_g_hvp": 0.019519176334142685, + "g_hvp_norm": 0.9767796397209167, + "ip_v_neg_g_t": 0.06175605580210686, + "cos_v_neg_g_t": 0.024188775569200516, + "g_t_norm": 1.1097267866134644, + "g_norm": 0.9767796397209167, + "hv_norm": 1.3651810884475708, + "cos_v_hv": 0.009837971068918705, + "hg_norm": 878.859130859375, + "cos_g_hg": 0.1683347225189209, + "v_parallel_norm": 0.005741667468100786, + "v_perp_norm": 2.300637722015381, + "embed_lm_head_v_norm": 1.3424556255340576, + "embed_lm_head_cos_v_neg_g": 0.02054324932396412, + "layer_1_v_norm": 0.5072323679924011, + "layer_1_cos_v_neg_g": 0.005837759468704462, + "layer_2_v_norm": 0.3107762932777405, + "layer_2_cos_v_neg_g": 0.005536928307265043, + "layer_3_v_norm": 0.37186920642852783, + "layer_3_cos_v_neg_g": 0.039262302219867706, + "layer_4_v_norm": 0.48790785670280457, + "layer_4_cos_v_neg_g": 0.02573750726878643, + "layer_5_v_norm": 0.5682775974273682, + "layer_5_cos_v_neg_g": 0.029353074729442596, + "layer_6_v_norm": 0.5695706605911255, + "layer_6_cos_v_neg_g": 0.028967412188649178, + "layer_7_v_norm": 0.596054196357727, + "layer_7_cos_v_neg_g": 0.02973373606801033, + "layer_8_v_norm": 0.5960016250610352, + "layer_8_cos_v_neg_g": 0.028276408091187477, + "layer_9_v_norm": 0.5973056554794312, + "layer_9_cos_v_neg_g": 0.02998720295727253, + "layer_10_v_norm": 0.595331072807312, + "layer_10_cos_v_neg_g": 0.030816495418548584, + "layer_11_v_norm": 0.5770519971847534, + "layer_11_cos_v_neg_g": 0.039698462933301926, + "layer_12_v_norm": 0.5987837910652161, + "layer_12_cos_v_neg_g": 0.0563531257212162, + "block0_q_v_norm": 0.23954664170742035, + "block0_q_cos_v_neg_g": 0.029309343546628952, + "block0_k_v_norm": 0.2249569594860077, + "block0_k_cos_v_neg_g": 0.028501879423856735, + "block0_v_v_norm": 0.10726530104875565, + "block0_v_cos_v_neg_g": 0.01591183803975582, + "block0_o_v_norm": 0.1863425076007843, + "block0_o_cos_v_neg_g": 0.004073556046932936, + "block0_mlp_win_v_norm": 0.21860511600971222, + "block0_mlp_win_cos_v_neg_g": 0.008134429343044758, + "block0_mlp_wout_v_norm": 0.23456323146820068, + "block0_mlp_wout_cos_v_neg_g": 0.013139384798705578, + "block3_q_v_norm": 0.1661364585161209, + "block3_q_cos_v_neg_g": 0.026705078780651093, + "block3_k_v_norm": 0.16975833475589752, + "block3_k_cos_v_neg_g": 0.043329499661922455, + "block3_v_v_norm": 0.13950370252132416, + "block3_v_cos_v_neg_g": 0.03014499507844448, + "block3_o_v_norm": 0.19899572432041168, + "block3_o_cos_v_neg_g": 0.03497583419084549, + "block3_mlp_win_v_norm": 0.25645023584365845, + "block3_mlp_win_cos_v_neg_g": 0.030120233073830605, + "block3_mlp_wout_v_norm": 0.23722869157791138, + "block3_mlp_wout_cos_v_neg_g": 0.09179547429084778, + "block7_q_v_norm": 0.23989057540893555, + "block7_q_cos_v_neg_g": 0.03632916882634163, + "block7_k_v_norm": 0.24602825939655304, + "block7_k_cos_v_neg_g": 0.08161086589097977, + "block7_v_v_norm": 0.198020339012146, + "block7_v_cos_v_neg_g": 0.03203044459223747, + "block7_o_v_norm": 0.24771350622177124, + "block7_o_cos_v_neg_g": 0.07785291224718094, + "block7_mlp_win_v_norm": 0.2785285711288452, + "block7_mlp_win_cos_v_neg_g": 0.034871771931648254, + "block7_mlp_wout_v_norm": 0.2425776571035385, + "block7_mlp_wout_cos_v_neg_g": 0.12323283404111862, + "block11_q_v_norm": 0.2478477507829666, + "block11_q_cos_v_neg_g": 0.074977345764637, + "block11_k_v_norm": 0.2498684674501419, + "block11_k_cos_v_neg_g": 0.09716793149709702, + "block11_v_v_norm": 0.2451862394809723, + "block11_v_cos_v_neg_g": 0.05040937662124634, + "block11_o_v_norm": 0.24770157039165497, + "block11_o_cos_v_neg_g": 0.06815405189990997, + "block11_mlp_win_v_norm": 0.235589861869812, + "block11_mlp_win_cos_v_neg_g": 0.07806482911109924, + "block11_mlp_wout_v_norm": 0.2398407757282257, + "block11_mlp_wout_cos_v_neg_g": 0.061578087508678436, + "embed_lm_head_sharpness": 0.0004637862148229033, + "layer_1_sharpness": 0.011673888191580772, + "layer_2_sharpness": 0.013278079219162464, + "layer_3_sharpness": 0.006886125076562166, + "layer_4_sharpness": 0.00200224039144814, + "layer_5_sharpness": 0.0013036784948781133, + "layer_6_sharpness": 0.001425828435458243, + "layer_7_sharpness": 0.001391480676829815, + "layer_8_sharpness": 0.0014163802843540907, + "layer_9_sharpness": 0.0008788821869529784, + "layer_10_sharpness": 0.0005205111228860915, + "layer_11_sharpness": 0.0005704916547983885, + "layer_12_sharpness": 0.0009280705126002431, + "block0_q_sharpness": 5.9232625062577426e-05, + "block0_k_sharpness": 0.00020915652567055076, + "block0_v_sharpness": 0.10423066467046738, + "block0_o_sharpness": 0.0011653908295556903, + "block0_mlp_win_sharpness": 0.0014149066992104053, + "block0_mlp_wout_sharpness": 0.0015322343679144979, + "block3_q_sharpness": 8.436411735601723e-05, + "block3_k_sharpness": 0.0015660938806831837, + "block3_v_sharpness": 0.006984050385653973, + "block3_o_sharpness": 0.0005295194569043815, + "block3_mlp_win_sharpness": 0.0002428193693049252, + "block3_mlp_wout_sharpness": 7.427444506902248e-05, + "block7_q_sharpness": 0.00011411882587708533, + "block7_k_sharpness": 7.876160088926554e-05, + "block7_v_sharpness": 0.0044021764770150185, + "block7_o_sharpness": 8.411466842517257e-05, + "block7_mlp_win_sharpness": 0.0004414740251377225, + "block7_mlp_wout_sharpness": 8.942732529249042e-05, + "block11_q_sharpness": 6.527714140247554e-05, + "block11_k_sharpness": 4.655212251236662e-05, + "block11_v_sharpness": 0.00017634160758461803, + "block11_o_sharpness": 3.246620690333657e-05, + "block11_mlp_win_sharpness": 0.0010223645949736238, + "block11_mlp_wout_sharpness": 0.0013091647997498512, + "sum_layer_numerators": 0.008616649940967452, + "block_diag_sharpness": 0.0024684018732549096, + "cross_layer_sharpness": 0.005299795577531919 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6500.json new file mode 100644 index 0000000000000000000000000000000000000000..184e56b7861bb81daaacd815bd7b6fbbc35cd9fe --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_6500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2651777267456055, + "total_l1_linf_norm": 19133.61328125, + "total_spectral_norm": 2.2651784420013428, + "embed_lm_head_update_fnorm": 1.324539303779602, + "embed_lm_head_max_l1_linf_norm": 0.3582468628883362, + "embed_lm_head_max_spectral_norm": 0.1902610808610916, + "layer_1_update_fnorm": 0.41145479679107666, + "layer_1_max_l1_linf_norm": 0.3571416735649109, + "layer_1_max_spectral_norm": 0.012033993378281593, + "layer_2_update_fnorm": 0.33002856373786926, + "layer_2_max_l1_linf_norm": 0.3965156674385071, + "layer_2_max_spectral_norm": 0.01293160393834114, + "layer_3_update_fnorm": 0.37094399333000183, + "layer_3_max_l1_linf_norm": 0.38257211446762085, + "layer_3_max_spectral_norm": 0.01258168090134859, + "layer_4_update_fnorm": 0.4599657654762268, + "layer_4_max_l1_linf_norm": 0.4070455729961395, + "layer_4_max_spectral_norm": 0.0145080192014575, + "layer_5_update_fnorm": 0.5577831864356995, + "layer_5_max_l1_linf_norm": 0.3979942798614502, + "layer_5_max_spectral_norm": 0.0120430588722229, + "layer_6_update_fnorm": 0.5729294419288635, + "layer_6_max_l1_linf_norm": 0.3976164162158966, + "layer_6_max_spectral_norm": 0.01204662211239338, + "layer_7_update_fnorm": 0.5949208736419678, + "layer_7_max_l1_linf_norm": 0.4001063108444214, + "layer_7_max_spectral_norm": 0.012042159214615822, + "layer_8_update_fnorm": 0.5970618724822998, + "layer_8_max_l1_linf_norm": 0.40150323510169983, + "layer_8_max_spectral_norm": 0.012045411393046379, + "layer_9_update_fnorm": 0.5965790152549744, + "layer_9_max_l1_linf_norm": 0.40165287256240845, + "layer_9_max_spectral_norm": 0.012054837308824062, + "layer_10_update_fnorm": 0.5943006873130798, + "layer_10_max_l1_linf_norm": 0.40832334756851196, + "layer_10_max_spectral_norm": 0.012049393728375435, + "layer_11_update_fnorm": 0.5764122605323792, + "layer_11_max_l1_linf_norm": 0.40430566668510437, + "layer_11_max_spectral_norm": 0.012049281969666481, + "layer_12_update_fnorm": 0.5984247326850891, + "layer_12_max_l1_linf_norm": 0.4152108132839203, + "layer_12_max_spectral_norm": 0.012044639326632023, + "block0_q_update_fnorm": 0.1038433313369751, + "block0_q_max_l1_linf_norm": 0.202338308095932, + "block0_q_max_spectral_norm": 0.012020695023238659, + "block0_k_update_fnorm": 0.15903931856155396, + "block0_k_max_l1_linf_norm": 0.21208080649375916, + "block0_k_max_spectral_norm": 0.012030720710754395, + "block0_v_update_fnorm": 0.11674988269805908, + "block0_v_max_l1_linf_norm": 0.13889925181865692, + "block0_v_max_spectral_norm": 0.012021304108202457, + "block0_o_update_fnorm": 0.17730724811553955, + "block0_o_max_l1_linf_norm": 0.17604096233844757, + "block0_o_max_spectral_norm": 0.012030393816530704, + "block0_mlp_win_update_fnorm": 0.2043914794921875, + "block0_mlp_win_max_l1_linf_norm": 0.19777613878250122, + "block0_mlp_win_max_spectral_norm": 0.012030394747853279, + "block0_mlp_wout_update_fnorm": 0.21505002677440643, + "block0_mlp_wout_max_l1_linf_norm": 0.3571416735649109, + "block0_mlp_wout_max_spectral_norm": 0.012033993378281593, + "block3_q_update_fnorm": 0.12728257477283478, + "block3_q_max_l1_linf_norm": 0.17598000168800354, + "block3_q_max_spectral_norm": 0.01202476117759943, + "block3_k_update_fnorm": 0.1377400904893875, + "block3_k_max_l1_linf_norm": 0.1933974176645279, + "block3_k_max_spectral_norm": 0.012027321383357048, + "block3_v_update_fnorm": 0.13931405544281006, + "block3_v_max_l1_linf_norm": 0.1587211787700653, + "block3_v_max_spectral_norm": 0.012029137462377548, + "block3_o_update_fnorm": 0.19357483088970184, + "block3_o_max_l1_linf_norm": 0.16341756284236908, + "block3_o_max_spectral_norm": 0.012032890692353249, + "block3_mlp_win_update_fnorm": 0.24665315449237823, + "block3_mlp_win_max_l1_linf_norm": 0.17796021699905396, + "block3_mlp_win_max_spectral_norm": 0.012033962644636631, + "block3_mlp_wout_update_fnorm": 0.24137094616889954, + "block3_mlp_wout_max_l1_linf_norm": 0.39665859937667847, + "block3_mlp_wout_max_spectral_norm": 0.01171096321195364, + "block7_q_update_fnorm": 0.23928417265415192, + "block7_q_max_l1_linf_norm": 0.20773620903491974, + "block7_q_max_spectral_norm": 0.012043951079249382, + "block7_k_update_fnorm": 0.2457677274942398, + "block7_k_max_l1_linf_norm": 0.20836663246154785, + "block7_k_max_spectral_norm": 0.01203878317028284, + "block7_v_update_fnorm": 0.2014041543006897, + "block7_v_max_l1_linf_norm": 0.211580291390419, + "block7_v_max_spectral_norm": 0.012033521197736263, + "block7_o_update_fnorm": 0.24739904701709747, + "block7_o_max_l1_linf_norm": 0.20635788142681122, + "block7_o_max_spectral_norm": 0.012045411393046379, + "block7_mlp_win_update_fnorm": 0.27951961755752563, + "block7_mlp_win_max_l1_linf_norm": 0.15479563176631927, + "block7_mlp_win_max_spectral_norm": 0.01204367820173502, + "block7_mlp_wout_update_fnorm": 0.24247360229492188, + "block7_mlp_wout_max_l1_linf_norm": 0.40150323510169983, + "block7_mlp_wout_max_spectral_norm": 0.01138212624937296, + "block11_q_update_fnorm": 0.24806611239910126, + "block11_q_max_l1_linf_norm": 0.2094513177871704, + "block11_q_max_spectral_norm": 0.012044639326632023, + "block11_k_update_fnorm": 0.2502593696117401, + "block11_k_max_l1_linf_norm": 0.21565939486026764, + "block11_k_max_spectral_norm": 0.012039759196341038, + "block11_v_update_fnorm": 0.24497303366661072, + "block11_v_max_l1_linf_norm": 0.20705699920654297, + "block11_v_max_spectral_norm": 0.012042142450809479, + "block11_o_update_fnorm": 0.24744579195976257, + "block11_o_max_l1_linf_norm": 0.20876815915107727, + "block11_o_max_spectral_norm": 0.012041237205266953, + "block11_mlp_win_update_fnorm": 0.2359435260295868, + "block11_mlp_win_max_l1_linf_norm": 0.15814784169197083, + "block11_mlp_win_max_spectral_norm": 0.011366044171154499, + "block11_mlp_wout_update_fnorm": 0.23845958709716797, + "block11_mlp_wout_max_l1_linf_norm": 0.40339165925979614, + "block11_mlp_wout_max_spectral_norm": 0.011816171929240227, + "total_sharpness": 0.01981581188738346, + "block_total_sharpness": 0.02863592468202114, + "v_norm_block": 1.8375606536865234, + "v_T_H_v_block": 0.09669290482997894, + "v_norm": 2.2651777267456055, + "ip_v_neg_g_hvp": 0.03779331594705582, + "cos_v_neg_g_hvp": 0.02219369262456894, + "g_hvp_norm": 0.7517668008804321, + "ip_v_neg_g_t": 0.04963395744562149, + "cos_v_neg_g_t": 0.022702140733599663, + "g_t_norm": 0.9651832580566406, + "g_norm": 0.7517668008804321, + "hv_norm": 12.360174179077148, + "cos_v_hv": 0.0036315296310931444, + "hg_norm": 7947.9248046875, + "cos_g_hg": 0.11311141401529312, + "v_parallel_norm": 0.005423048976808786, + "v_perp_norm": 2.2651712894439697, + "embed_lm_head_v_norm": 1.324539303779602, + "embed_lm_head_cos_v_neg_g": 0.03149667754769325, + "layer_1_v_norm": 0.41145479679107666, + "layer_1_cos_v_neg_g": 0.0181049183011055, + "layer_2_v_norm": 0.33002856373786926, + "layer_2_cos_v_neg_g": 0.026322409510612488, + "layer_3_v_norm": 0.37094399333000183, + "layer_3_cos_v_neg_g": 0.030336186289787292, + "layer_4_v_norm": 0.4599657654762268, + "layer_4_cos_v_neg_g": 0.020115556195378304, + "layer_5_v_norm": 0.5577831864356995, + "layer_5_cos_v_neg_g": 0.0243784599006176, + "layer_6_v_norm": 0.5729293823242188, + "layer_6_cos_v_neg_g": 0.023343827575445175, + "layer_7_v_norm": 0.5949208736419678, + "layer_7_cos_v_neg_g": 0.025801021605730057, + "layer_8_v_norm": 0.5970618724822998, + "layer_8_cos_v_neg_g": 0.023886265233159065, + "layer_9_v_norm": 0.5965790152549744, + "layer_9_cos_v_neg_g": 0.0270583163946867, + "layer_10_v_norm": 0.5943006873130798, + "layer_10_cos_v_neg_g": 0.026100466027855873, + "layer_11_v_norm": 0.5764122009277344, + "layer_11_cos_v_neg_g": 0.036636002361774445, + "layer_12_v_norm": 0.5984247326850891, + "layer_12_cos_v_neg_g": 0.06698901951313019, + "block0_q_v_norm": 0.1038433313369751, + "block0_q_cos_v_neg_g": 0.0030009220354259014, + "block0_k_v_norm": 0.15903931856155396, + "block0_k_cos_v_neg_g": 0.01001705788075924, + "block0_v_v_norm": 0.11674988269805908, + "block0_v_cos_v_neg_g": 0.020844805985689163, + "block0_o_v_norm": 0.17730724811553955, + "block0_o_cos_v_neg_g": 0.036200787872076035, + "block0_mlp_win_v_norm": 0.2043914794921875, + "block0_mlp_win_cos_v_neg_g": 0.030732933431863785, + "block0_mlp_wout_v_norm": 0.21505002677440643, + "block0_mlp_wout_cos_v_neg_g": 0.041301362216472626, + "block3_q_v_norm": 0.12728257477283478, + "block3_q_cos_v_neg_g": 0.022432351484894753, + "block3_k_v_norm": 0.1377400904893875, + "block3_k_cos_v_neg_g": 0.03812827169895172, + "block3_v_v_norm": 0.13931405544281006, + "block3_v_cos_v_neg_g": 0.0178332831710577, + "block3_o_v_norm": 0.19357483088970184, + "block3_o_cos_v_neg_g": 0.02581528015434742, + "block3_mlp_win_v_norm": 0.24665315449237823, + "block3_mlp_win_cos_v_neg_g": 0.023068998008966446, + "block3_mlp_wout_v_norm": 0.24137094616889954, + "block3_mlp_wout_cos_v_neg_g": 0.07576563209295273, + "block7_q_v_norm": 0.23928417265415192, + "block7_q_cos_v_neg_g": 0.028129206970334053, + "block7_k_v_norm": 0.2457677274942398, + "block7_k_cos_v_neg_g": 0.07454590499401093, + "block7_v_v_norm": 0.2014041543006897, + "block7_v_cos_v_neg_g": 0.022613024339079857, + "block7_o_v_norm": 0.24739904701709747, + "block7_o_cos_v_neg_g": 0.06739148497581482, + "block7_mlp_win_v_norm": 0.27951961755752563, + "block7_mlp_win_cos_v_neg_g": 0.029612155631184578, + "block7_mlp_wout_v_norm": 0.24247360229492188, + "block7_mlp_wout_cos_v_neg_g": 0.1089896708726883, + "block11_q_v_norm": 0.24806611239910126, + "block11_q_cos_v_neg_g": 0.07857901602983475, + "block11_k_v_norm": 0.2502593696117401, + "block11_k_cos_v_neg_g": 0.09898363798856735, + "block11_v_v_norm": 0.24497303366661072, + "block11_v_cos_v_neg_g": 0.04330668970942497, + "block11_o_v_norm": 0.24744579195976257, + "block11_o_cos_v_neg_g": 0.07678205519914627, + "block11_mlp_win_v_norm": 0.2359435260295868, + "block11_mlp_win_cos_v_neg_g": 0.08851952850818634, + "block11_mlp_wout_v_norm": 0.23845958709716797, + "block11_mlp_wout_cos_v_neg_g": 0.07372725009918213, + "embed_lm_head_sharpness": 0.0004902004729956388, + "layer_1_sharpness": 0.19875991344451904, + "layer_2_sharpness": 0.06914477050304413, + "layer_3_sharpness": 0.008558902889490128, + "layer_4_sharpness": 0.0016824466874822974, + "layer_5_sharpness": 0.0011559887789189816, + "layer_6_sharpness": 0.0011876766802743077, + "layer_7_sharpness": 0.0012547210790216923, + "layer_8_sharpness": 0.001104125869460404, + "layer_9_sharpness": 0.0006219706265255809, + "layer_10_sharpness": 0.00038377969758585095, + "layer_11_sharpness": 0.00044770381646230817, + "layer_12_sharpness": 0.0003135000297334045, + "block0_q_sharpness": 0.016682114452123642, + "block0_k_sharpness": 0.0005049047758802772, + "block0_v_sharpness": 0.38373255729675293, + "block0_o_sharpness": 0.08115888386964798, + "block0_mlp_win_sharpness": 0.029037419706583023, + "block0_mlp_wout_sharpness": 0.034790895879268646, + "block3_q_sharpness": 3.585156082408503e-05, + "block3_k_sharpness": 0.0021727096755057573, + "block3_v_sharpness": 0.006254100240767002, + "block3_o_sharpness": 0.0005279146134853363, + "block3_mlp_win_sharpness": 0.00016985677939374, + "block3_mlp_wout_sharpness": 8.393335883738473e-05, + "block7_q_sharpness": 0.00010281903087161481, + "block7_k_sharpness": 0.00010556969937169924, + "block7_v_sharpness": 0.0036417183000594378, + "block7_o_sharpness": 4.862807327299379e-05, + "block7_mlp_win_sharpness": 0.00041907510603778064, + "block7_mlp_wout_sharpness": 7.907619146862999e-05, + "block11_q_sharpness": 0.00010668524191714823, + "block11_k_sharpness": 6.29062342341058e-05, + "block11_v_sharpness": 0.0001549034204799682, + "block11_o_sharpness": 2.7945956389885396e-05, + "block11_mlp_win_sharpness": 0.00016023233183659613, + "block11_mlp_wout_sharpness": 0.0003299301606602967, + "sum_layer_numerators": 0.04491901429345382, + "block_diag_sharpness": 0.013302916079470668, + "cross_layer_sharpness": 0.015333008602550473 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7000.json new file mode 100644 index 0000000000000000000000000000000000000000..8a4c783f5d795122fcf52c8fb49753ad87aa1a53 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.2748894691467285, + "total_l1_linf_norm": 19266.00390625, + "total_spectral_norm": 2.2748897075653076, + "embed_lm_head_update_fnorm": 1.3356552124023438, + "embed_lm_head_max_l1_linf_norm": 0.35331985354423523, + "embed_lm_head_max_spectral_norm": 0.21045684814453125, + "layer_1_update_fnorm": 0.47390806674957275, + "layer_1_max_l1_linf_norm": 0.36385583877563477, + "layer_1_max_spectral_norm": 0.012034453451633453, + "layer_2_update_fnorm": 0.35243475437164307, + "layer_2_max_l1_linf_norm": 0.4243425130844116, + "layer_2_max_spectral_norm": 0.015893690288066864, + "layer_3_update_fnorm": 0.3722132444381714, + "layer_3_max_l1_linf_norm": 0.429849237203598, + "layer_3_max_spectral_norm": 0.014251476153731346, + "layer_4_update_fnorm": 0.4375423491001129, + "layer_4_max_l1_linf_norm": 0.465774267911911, + "layer_4_max_spectral_norm": 0.01774941384792328, + "layer_5_update_fnorm": 0.5605344772338867, + "layer_5_max_l1_linf_norm": 0.41552478075027466, + "layer_5_max_spectral_norm": 0.012042801827192307, + "layer_6_update_fnorm": 0.5422550439834595, + "layer_6_max_l1_linf_norm": 0.40167444944381714, + "layer_6_max_spectral_norm": 0.012043779715895653, + "layer_7_update_fnorm": 0.5945630073547363, + "layer_7_max_l1_linf_norm": 0.40085652470588684, + "layer_7_max_spectral_norm": 0.012046528048813343, + "layer_8_update_fnorm": 0.5956375598907471, + "layer_8_max_l1_linf_norm": 0.40033647418022156, + "layer_8_max_spectral_norm": 0.012046984396874905, + "layer_9_update_fnorm": 0.5961735844612122, + "layer_9_max_l1_linf_norm": 0.4040600657463074, + "layer_9_max_spectral_norm": 0.012046551331877708, + "layer_10_update_fnorm": 0.5938850045204163, + "layer_10_max_l1_linf_norm": 0.4097400903701782, + "layer_10_max_spectral_norm": 0.012049887329339981, + "layer_11_update_fnorm": 0.5740955471992493, + "layer_11_max_l1_linf_norm": 0.4235268831253052, + "layer_11_max_spectral_norm": 0.012045563198626041, + "layer_12_update_fnorm": 0.5984366536140442, + "layer_12_max_l1_linf_norm": 0.4290839731693268, + "layer_12_max_spectral_norm": 0.012044024653732777, + "block0_q_update_fnorm": 0.18422506749629974, + "block0_q_max_l1_linf_norm": 0.19150285422801971, + "block0_q_max_spectral_norm": 0.012030484154820442, + "block0_k_update_fnorm": 0.19473162293434143, + "block0_k_max_l1_linf_norm": 0.19918140769004822, + "block0_k_max_spectral_norm": 0.012032272294163704, + "block0_v_update_fnorm": 0.1360589861869812, + "block0_v_max_l1_linf_norm": 0.15016014873981476, + "block0_v_max_spectral_norm": 0.012024958617985249, + "block0_o_update_fnorm": 0.18558740615844727, + "block0_o_max_l1_linf_norm": 0.17236824333667755, + "block0_o_max_spectral_norm": 0.012034204788506031, + "block0_mlp_win_update_fnorm": 0.2258552461862564, + "block0_mlp_win_max_l1_linf_norm": 0.19428735971450806, + "block0_mlp_win_max_spectral_norm": 0.012034453451633453, + "block0_mlp_wout_update_fnorm": 0.22044964134693146, + "block0_mlp_wout_max_l1_linf_norm": 0.36385583877563477, + "block0_mlp_wout_max_spectral_norm": 0.01203245297074318, + "block3_q_update_fnorm": 0.08198662847280502, + "block3_q_max_l1_linf_norm": 0.138578861951828, + "block3_q_max_spectral_norm": 0.012026498094201088, + "block3_k_update_fnorm": 0.08719763904809952, + "block3_k_max_l1_linf_norm": 0.15752048790454865, + "block3_k_max_spectral_norm": 0.012026138603687286, + "block3_v_update_fnorm": 0.1422485113143921, + "block3_v_max_l1_linf_norm": 0.16217775642871857, + "block3_v_max_spectral_norm": 0.012028483673930168, + "block3_o_update_fnorm": 0.19033709168434143, + "block3_o_max_l1_linf_norm": 0.160474956035614, + "block3_o_max_spectral_norm": 0.012034270912408829, + "block3_mlp_win_update_fnorm": 0.25066235661506653, + "block3_mlp_win_max_l1_linf_norm": 0.1760648488998413, + "block3_mlp_win_max_spectral_norm": 0.012036070227622986, + "block3_mlp_wout_update_fnorm": 0.23912790417671204, + "block3_mlp_wout_max_l1_linf_norm": 0.3918722867965698, + "block3_mlp_wout_max_spectral_norm": 0.011402454227209091, + "block7_q_update_fnorm": 0.2391837239265442, + "block7_q_max_l1_linf_norm": 0.207859605550766, + "block7_q_max_spectral_norm": 0.012042073532938957, + "block7_k_update_fnorm": 0.24617286026477814, + "block7_k_max_l1_linf_norm": 0.21013516187667847, + "block7_k_max_spectral_norm": 0.012045794166624546, + "block7_v_update_fnorm": 0.19507145881652832, + "block7_v_max_l1_linf_norm": 0.21077203750610352, + "block7_v_max_spectral_norm": 0.012031341902911663, + "block7_o_update_fnorm": 0.24761830270290375, + "block7_o_max_l1_linf_norm": 0.20586436986923218, + "block7_o_max_spectral_norm": 0.012042747810482979, + "block7_mlp_win_update_fnorm": 0.2805657982826233, + "block7_mlp_win_max_l1_linf_norm": 0.15766890347003937, + "block7_mlp_win_max_spectral_norm": 0.012046984396874905, + "block7_mlp_wout_update_fnorm": 0.24235787987709045, + "block7_mlp_wout_max_l1_linf_norm": 0.40033647418022156, + "block7_mlp_wout_max_spectral_norm": 0.011382265016436577, + "block11_q_update_fnorm": 0.24849526584148407, + "block11_q_max_l1_linf_norm": 0.2104579508304596, + "block11_q_max_spectral_norm": 0.012039736844599247, + "block11_k_update_fnorm": 0.25006407499313354, + "block11_k_max_l1_linf_norm": 0.2135307490825653, + "block11_k_max_spectral_norm": 0.01203954964876175, + "block11_v_update_fnorm": 0.24480639398097992, + "block11_v_max_l1_linf_norm": 0.20650720596313477, + "block11_v_max_spectral_norm": 0.012044024653732777, + "block11_o_update_fnorm": 0.24826295673847198, + "block11_o_max_l1_linf_norm": 0.20952725410461426, + "block11_o_max_spectral_norm": 0.012042932212352753, + "block11_mlp_win_update_fnorm": 0.2360743135213852, + "block11_mlp_win_max_l1_linf_norm": 0.14921991527080536, + "block11_mlp_win_max_spectral_norm": 0.011378822848200798, + "block11_mlp_wout_update_fnorm": 0.23742526769638062, + "block11_mlp_wout_max_l1_linf_norm": 0.4015195071697235, + "block11_mlp_wout_max_spectral_norm": 0.011710979044437408, + "total_sharpness": 0.0033606986980885267, + "block_total_sharpness": 0.005317062139511108, + "v_norm_block": 1.8415071964263916, + "v_T_H_v_block": 0.01803094893693924, + "v_norm": 2.2748894691467285, + "ip_v_neg_g_hvp": 0.035785309970378876, + "cos_v_neg_g_hvp": 0.018040930852293968, + "g_hvp_norm": 0.8719378709793091, + "ip_v_neg_g_t": 0.043808989226818085, + "cos_v_neg_g_t": 0.02743944525718689, + "g_t_norm": 0.7018229961395264, + "g_norm": 0.8719378709793091, + "hv_norm": 4.504334926605225, + "cos_v_hv": 0.0016973023302853107, + "hg_norm": 7024.10888671875, + "cos_g_hg": -0.030326539650559425, + "v_parallel_norm": 0.003855757415294647, + "v_perp_norm": 2.2748863697052, + "embed_lm_head_v_norm": 1.3356552124023438, + "embed_lm_head_cos_v_neg_g": 0.031125284731388092, + "layer_1_v_norm": 0.47390806674957275, + "layer_1_cos_v_neg_g": 0.005235967226326466, + "layer_2_v_norm": 0.35243475437164307, + "layer_2_cos_v_neg_g": 0.00030145185883156955, + "layer_3_v_norm": 0.3722132444381714, + "layer_3_cos_v_neg_g": 0.02172313816845417, + "layer_4_v_norm": 0.4375423491001129, + "layer_4_cos_v_neg_g": 0.024511948227882385, + "layer_5_v_norm": 0.5605344772338867, + "layer_5_cos_v_neg_g": 0.026509424671530724, + "layer_6_v_norm": 0.5422550439834595, + "layer_6_cos_v_neg_g": 0.02621655911207199, + "layer_7_v_norm": 0.5945630073547363, + "layer_7_cos_v_neg_g": 0.025109825655817986, + "layer_8_v_norm": 0.5956375598907471, + "layer_8_cos_v_neg_g": 0.024660073220729828, + "layer_9_v_norm": 0.5961735844612122, + "layer_9_cos_v_neg_g": 0.027677785605192184, + "layer_10_v_norm": 0.5938850045204163, + "layer_10_cos_v_neg_g": 0.0281539186835289, + "layer_11_v_norm": 0.5740955471992493, + "layer_11_cos_v_neg_g": 0.03774537891149521, + "layer_12_v_norm": 0.5984366536140442, + "layer_12_cos_v_neg_g": 0.0682603046298027, + "block0_q_v_norm": 0.18422506749629974, + "block0_q_cos_v_neg_g": 0.03675255551934242, + "block0_k_v_norm": 0.19473162293434143, + "block0_k_cos_v_neg_g": 0.023340247571468353, + "block0_v_v_norm": 0.1360589861869812, + "block0_v_cos_v_neg_g": 0.01090195681899786, + "block0_o_v_norm": 0.18558740615844727, + "block0_o_cos_v_neg_g": 0.008212883025407791, + "block0_mlp_win_v_norm": 0.2258552461862564, + "block0_mlp_win_cos_v_neg_g": 0.009729976765811443, + "block0_mlp_wout_v_norm": 0.22044964134693146, + "block0_mlp_wout_cos_v_neg_g": -0.00010506131366128102, + "block3_q_v_norm": 0.08198662847280502, + "block3_q_cos_v_neg_g": 0.020776022225618362, + "block3_k_v_norm": 0.08719763904809952, + "block3_k_cos_v_neg_g": 0.057107165455818176, + "block3_v_v_norm": 0.1422485113143921, + "block3_v_cos_v_neg_g": 0.02419957146048546, + "block3_o_v_norm": 0.19033709168434143, + "block3_o_cos_v_neg_g": 0.0319972038269043, + "block3_mlp_win_v_norm": 0.25066235661506653, + "block3_mlp_win_cos_v_neg_g": 0.02515127696096897, + "block3_mlp_wout_v_norm": 0.23912790417671204, + "block3_mlp_wout_cos_v_neg_g": 0.09617386758327484, + "block7_q_v_norm": 0.2391837239265442, + "block7_q_cos_v_neg_g": 0.030284222215414047, + "block7_k_v_norm": 0.24617286026477814, + "block7_k_cos_v_neg_g": 0.07718811929225922, + "block7_v_v_norm": 0.19507145881652832, + "block7_v_cos_v_neg_g": 0.025691546499729156, + "block7_o_v_norm": 0.24761830270290375, + "block7_o_cos_v_neg_g": 0.07291420549154282, + "block7_mlp_win_v_norm": 0.2805657982826233, + "block7_mlp_win_cos_v_neg_g": 0.031235523521900177, + "block7_mlp_wout_v_norm": 0.24235787987709045, + "block7_mlp_wout_cos_v_neg_g": 0.11678840965032578, + "block11_q_v_norm": 0.24849526584148407, + "block11_q_cos_v_neg_g": 0.07661623507738113, + "block11_k_v_norm": 0.25006407499313354, + "block11_k_cos_v_neg_g": 0.09963866323232651, + "block11_v_v_norm": 0.24480639398097992, + "block11_v_cos_v_neg_g": 0.04172639548778534, + "block11_o_v_norm": 0.24826295673847198, + "block11_o_cos_v_neg_g": 0.07507038861513138, + "block11_mlp_win_v_norm": 0.2360743135213852, + "block11_mlp_win_cos_v_neg_g": 0.09237714111804962, + "block11_mlp_wout_v_norm": 0.23742526769638062, + "block11_mlp_wout_cos_v_neg_g": 0.07890413701534271, + "embed_lm_head_sharpness": 0.00021564902272075415, + "layer_1_sharpness": 0.00703550735488534, + "layer_2_sharpness": 0.006865303497761488, + "layer_3_sharpness": 0.002772929612547159, + "layer_4_sharpness": 0.0017530795885249972, + "layer_5_sharpness": 0.0011749109253287315, + "layer_6_sharpness": 0.0015449430793523788, + "layer_7_sharpness": 0.0011865361593663692, + "layer_8_sharpness": 0.0014400541549548507, + "layer_9_sharpness": 0.0008032245677895844, + "layer_10_sharpness": 0.0004613623023033142, + "layer_11_sharpness": 0.0005286081577651203, + "layer_12_sharpness": 0.0003316715301480144, + "block0_q_sharpness": 0.00041146998410113156, + "block0_k_sharpness": 0.00024217170721385628, + "block0_v_sharpness": -0.003471414791420102, + "block0_o_sharpness": 0.009353190660476685, + "block0_mlp_win_sharpness": 0.0020563595462590456, + "block0_mlp_wout_sharpness": 0.0070788064040243626, + "block3_q_sharpness": 5.668745870934799e-05, + "block3_k_sharpness": 0.003832245245575905, + "block3_v_sharpness": 0.005983272101730108, + "block3_o_sharpness": 0.0006207741098478436, + "block3_mlp_win_sharpness": 0.00018895043467637151, + "block3_mlp_wout_sharpness": 6.923363253008574e-05, + "block7_q_sharpness": 0.00010393039701739326, + "block7_k_sharpness": 8.270532271126285e-05, + "block7_v_sharpness": 0.0042059882543981075, + "block7_o_sharpness": 6.600671622436494e-05, + "block7_mlp_win_sharpness": 0.0006375730154104531, + "block7_mlp_wout_sharpness": 9.109569509746507e-05, + "block11_q_sharpness": 4.526041084318422e-05, + "block11_k_sharpness": 4.535359039437026e-05, + "block11_v_sharpness": 0.00010410964023321867, + "block11_o_sharpness": 3.0395791327464394e-05, + "block11_mlp_win_sharpness": 0.00023871018493082374, + "block11_mlp_wout_sharpness": 0.000421264732722193, + "sum_layer_numerators": 0.005647617429037751, + "block_diag_sharpness": 0.0016653994967221043, + "cross_layer_sharpness": 0.0036516626427890043 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7500.json new file mode 100644 index 0000000000000000000000000000000000000000..2719ab73ed9b7f254d995371bc68e6476ea3c981 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_7500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.0488452911376953, + "total_l1_linf_norm": 16573.966796875, + "total_spectral_norm": 2.048844814300537, + "embed_lm_head_update_fnorm": 1.2109344005584717, + "embed_lm_head_max_l1_linf_norm": 0.4403546452522278, + "embed_lm_head_max_spectral_norm": 0.4304978847503662, + "layer_1_update_fnorm": 0.3511791527271271, + "layer_1_max_l1_linf_norm": 0.6923683285713196, + "layer_1_max_spectral_norm": 0.016176603734493256, + "layer_2_update_fnorm": 0.12178304046392441, + "layer_2_max_l1_linf_norm": 1.4725451469421387, + "layer_2_max_spectral_norm": 0.030973047018051147, + "layer_3_update_fnorm": 0.23841705918312073, + "layer_3_max_l1_linf_norm": 0.8734281659126282, + "layer_3_max_spectral_norm": 0.019647281616926193, + "layer_4_update_fnorm": 0.35350310802459717, + "layer_4_max_l1_linf_norm": 0.9122288227081299, + "layer_4_max_spectral_norm": 0.020260727033019066, + "layer_5_update_fnorm": 0.4296741783618927, + "layer_5_max_l1_linf_norm": 0.881483256816864, + "layer_5_max_spectral_norm": 0.019496463239192963, + "layer_6_update_fnorm": 0.46919915080070496, + "layer_6_max_l1_linf_norm": 0.7738747596740723, + "layer_6_max_spectral_norm": 0.01706176996231079, + "layer_7_update_fnorm": 0.5543575882911682, + "layer_7_max_l1_linf_norm": 0.648318886756897, + "layer_7_max_spectral_norm": 0.01442339364439249, + "layer_8_update_fnorm": 0.5688062310218811, + "layer_8_max_l1_linf_norm": 0.5795177221298218, + "layer_8_max_spectral_norm": 0.01301109790802002, + "layer_9_update_fnorm": 0.5826325416564941, + "layer_9_max_l1_linf_norm": 0.6001846790313721, + "layer_9_max_spectral_norm": 0.013302152045071125, + "layer_10_update_fnorm": 0.5874130129814148, + "layer_10_max_l1_linf_norm": 0.6146917939186096, + "layer_10_max_spectral_norm": 0.013599724508821964, + "layer_11_update_fnorm": 0.5692759156227112, + "layer_11_max_l1_linf_norm": 0.6342308521270752, + "layer_11_max_spectral_norm": 0.014051439240574837, + "layer_12_update_fnorm": 0.6059932708740234, + "layer_12_max_l1_linf_norm": 0.6313232183456421, + "layer_12_max_spectral_norm": 0.01387942023575306, + "block0_q_update_fnorm": 0.20690184831619263, + "block0_q_max_l1_linf_norm": 0.20902389287948608, + "block0_q_max_spectral_norm": 0.012035589665174484, + "block0_k_update_fnorm": 0.207858607172966, + "block0_k_max_l1_linf_norm": 0.2098880410194397, + "block0_k_max_spectral_norm": 0.012033488601446152, + "block0_v_update_fnorm": 0.10723487287759781, + "block0_v_max_l1_linf_norm": 0.12403708696365356, + "block0_v_max_spectral_norm": 0.01202546525746584, + "block0_o_update_fnorm": 0.11940387636423111, + "block0_o_max_l1_linf_norm": 0.1484026312828064, + "block0_o_max_spectral_norm": 0.012020645663142204, + "block0_mlp_win_update_fnorm": 0.07910820841789246, + "block0_mlp_win_max_l1_linf_norm": 0.16483086347579956, + "block0_mlp_win_max_spectral_norm": 0.012025751173496246, + "block0_mlp_wout_update_fnorm": 0.06947258114814758, + "block0_mlp_wout_max_l1_linf_norm": 0.12339137494564056, + "block0_mlp_wout_max_spectral_norm": 0.012023438699543476, + "block3_q_update_fnorm": 0.06339998543262482, + "block3_q_max_l1_linf_norm": 0.11292439699172974, + "block3_q_max_spectral_norm": 0.011998581700026989, + "block3_k_update_fnorm": 0.07152948528528214, + "block3_k_max_l1_linf_norm": 0.11677028238773346, + "block3_k_max_spectral_norm": 0.01202136930078268, + "block3_v_update_fnorm": 0.10679984092712402, + "block3_v_max_l1_linf_norm": 0.13073685765266418, + "block3_v_max_spectral_norm": 0.012022831477224827, + "block3_o_update_fnorm": 0.1188327819108963, + "block3_o_max_l1_linf_norm": 0.11183962970972061, + "block3_o_max_spectral_norm": 0.012022985145449638, + "block3_mlp_win_update_fnorm": 0.16216151416301727, + "block3_mlp_win_max_l1_linf_norm": 0.1829753965139389, + "block3_mlp_win_max_spectral_norm": 0.012027726508677006, + "block3_mlp_wout_update_fnorm": 0.2508586049079895, + "block3_mlp_wout_max_l1_linf_norm": 0.39171141386032104, + "block3_mlp_wout_max_spectral_norm": 0.012037009000778198, + "block7_q_update_fnorm": 0.21703635156154633, + "block7_q_max_l1_linf_norm": 0.20886608958244324, + "block7_q_max_spectral_norm": 0.012034664861857891, + "block7_k_update_fnorm": 0.2390846163034439, + "block7_k_max_l1_linf_norm": 0.2083948850631714, + "block7_k_max_spectral_norm": 0.012042136862874031, + "block7_v_update_fnorm": 0.1782497614622116, + "block7_v_max_l1_linf_norm": 0.20101647078990936, + "block7_v_max_spectral_norm": 0.01203248929232359, + "block7_o_update_fnorm": 0.24465206265449524, + "block7_o_max_l1_linf_norm": 0.20528292655944824, + "block7_o_max_spectral_norm": 0.012043718248605728, + "block7_mlp_win_update_fnorm": 0.2646970748901367, + "block7_mlp_win_max_l1_linf_norm": 0.17121970653533936, + "block7_mlp_win_max_spectral_norm": 0.01203984022140503, + "block7_mlp_wout_update_fnorm": 0.23922504484653473, + "block7_mlp_wout_max_l1_linf_norm": 0.3979591131210327, + "block7_mlp_wout_max_spectral_norm": 0.011364110745489597, + "block11_q_update_fnorm": 0.2466413676738739, + "block11_q_max_l1_linf_norm": 0.20600932836532593, + "block11_q_max_spectral_norm": 0.012037391774356365, + "block11_k_update_fnorm": 0.2492845505475998, + "block11_k_max_l1_linf_norm": 0.2119217962026596, + "block11_k_max_spectral_norm": 0.01203816756606102, + "block11_v_update_fnorm": 0.2390933632850647, + "block11_v_max_l1_linf_norm": 0.20689797401428223, + "block11_v_max_spectral_norm": 0.012041675858199596, + "block11_o_update_fnorm": 0.24714988470077515, + "block11_o_max_l1_linf_norm": 0.20693345367908478, + "block11_o_max_spectral_norm": 0.012046570889651775, + "block11_mlp_win_update_fnorm": 0.24009203910827637, + "block11_mlp_win_max_l1_linf_norm": 0.15827667713165283, + "block11_mlp_win_max_spectral_norm": 0.01185343787074089, + "block11_mlp_wout_update_fnorm": 0.26074010133743286, + "block11_mlp_wout_max_l1_linf_norm": 0.4499317407608032, + "block11_mlp_wout_max_spectral_norm": 0.012052027508616447, + "total_sharpness": 0.3618265390396118, + "block_total_sharpness": 0.09225858747959137, + "v_norm_block": 1.652695894241333, + "v_T_H_v_block": 0.2519954442977905, + "v_norm": 2.0488452911376953, + "ip_v_neg_g_hvp": 0.048105135560035706, + "cos_v_neg_g_hvp": 0.00252923765219748, + "g_hvp_norm": 9.28309154510498, + "ip_v_neg_g_t": 0.2219284176826477, + "cos_v_neg_g_t": 0.013169029727578163, + "g_t_norm": 8.225266456604004, + "g_norm": 9.28309154510498, + "hv_norm": 440.7418518066406, + "cos_v_hv": 0.0016819972079247236, + "hg_norm": 13081286.0, + "cos_g_hg": 0.08291441202163696, + "v_parallel_norm": 0.0007344563491642475, + "v_perp_norm": 2.048845052719116, + "embed_lm_head_v_norm": 1.2109344005584717, + "embed_lm_head_cos_v_neg_g": 0.0023459678050130606, + "layer_1_v_norm": 0.3511791527271271, + "layer_1_cos_v_neg_g": -0.00045949360355734825, + "layer_2_v_norm": 0.12178304046392441, + "layer_2_cos_v_neg_g": 0.012409759685397148, + "layer_3_v_norm": 0.23841705918312073, + "layer_3_cos_v_neg_g": 0.030362967401742935, + "layer_4_v_norm": 0.35350310802459717, + "layer_4_cos_v_neg_g": 0.02226868085563183, + "layer_5_v_norm": 0.4296741783618927, + "layer_5_cos_v_neg_g": 0.02690262906253338, + "layer_6_v_norm": 0.46919918060302734, + "layer_6_cos_v_neg_g": 0.02860274165868759, + "layer_7_v_norm": 0.5543575882911682, + "layer_7_cos_v_neg_g": 0.02699979394674301, + "layer_8_v_norm": 0.5688062310218811, + "layer_8_cos_v_neg_g": 0.024490628391504288, + "layer_9_v_norm": 0.5826325416564941, + "layer_9_cos_v_neg_g": 0.027198191732168198, + "layer_10_v_norm": 0.5874130129814148, + "layer_10_cos_v_neg_g": 0.026968689635396004, + "layer_11_v_norm": 0.5692759156227112, + "layer_11_cos_v_neg_g": 0.036192744970321655, + "layer_12_v_norm": 0.6059932708740234, + "layer_12_cos_v_neg_g": 0.06306172162294388, + "block0_q_v_norm": 0.20690184831619263, + "block0_q_cos_v_neg_g": -0.003452225122600794, + "block0_k_v_norm": 0.207858607172966, + "block0_k_cos_v_neg_g": -0.0010523281525820494, + "block0_v_v_norm": 0.10723487287759781, + "block0_v_cos_v_neg_g": -0.0019378747092559934, + "block0_o_v_norm": 0.11940387636423111, + "block0_o_cos_v_neg_g": 0.004293302074074745, + "block0_mlp_win_v_norm": 0.07910820841789246, + "block0_mlp_win_cos_v_neg_g": -0.003418520325794816, + "block0_mlp_wout_v_norm": 0.06947258114814758, + "block0_mlp_wout_cos_v_neg_g": -0.0015115690184757113, + "block3_q_v_norm": 0.06339998543262482, + "block3_q_cos_v_neg_g": 0.02671545185148716, + "block3_k_v_norm": 0.07152948528528214, + "block3_k_cos_v_neg_g": 0.021248219534754753, + "block3_v_v_norm": 0.10679984092712402, + "block3_v_cos_v_neg_g": 0.021087072789669037, + "block3_o_v_norm": 0.1188327819108963, + "block3_o_cos_v_neg_g": 0.025305921211838722, + "block3_mlp_win_v_norm": 0.16216151416301727, + "block3_mlp_win_cos_v_neg_g": 0.02743266336619854, + "block3_mlp_wout_v_norm": 0.2508586049079895, + "block3_mlp_wout_cos_v_neg_g": 0.06021757051348686, + "block7_q_v_norm": 0.21703635156154633, + "block7_q_cos_v_neg_g": 0.031537532806396484, + "block7_k_v_norm": 0.2390846163034439, + "block7_k_cos_v_neg_g": 0.06816476583480835, + "block7_v_v_norm": 0.1782497614622116, + "block7_v_cos_v_neg_g": 0.024606985971331596, + "block7_o_v_norm": 0.24465206265449524, + "block7_o_cos_v_neg_g": 0.07253123074769974, + "block7_mlp_win_v_norm": 0.2646970748901367, + "block7_mlp_win_cos_v_neg_g": 0.029271245002746582, + "block7_mlp_wout_v_norm": 0.23922504484653473, + "block7_mlp_wout_cos_v_neg_g": 0.11215874552726746, + "block11_q_v_norm": 0.2466413676738739, + "block11_q_cos_v_neg_g": 0.06887795031070709, + "block11_k_v_norm": 0.2492845505475998, + "block11_k_cos_v_neg_g": 0.08982900530099869, + "block11_v_v_norm": 0.2390933632850647, + "block11_v_cos_v_neg_g": 0.04495057836174965, + "block11_o_v_norm": 0.24714988470077515, + "block11_o_cos_v_neg_g": 0.08365514874458313, + "block11_mlp_win_v_norm": 0.24009203910827637, + "block11_mlp_win_cos_v_neg_g": 0.08432677388191223, + "block11_mlp_wout_v_norm": 0.26074010133743286, + "block11_mlp_wout_cos_v_neg_g": 0.06815702468156815, + "embed_lm_head_sharpness": 0.21346724033355713, + "layer_1_sharpness": 3.6465883255004883, + "layer_2_sharpness": 1.8553051948547363, + "layer_3_sharpness": -0.008251700550317764, + "layer_4_sharpness": 0.004631415940821171, + "layer_5_sharpness": 0.004220117349177599, + "layer_6_sharpness": 0.0029110147152096033, + "layer_7_sharpness": 0.0017824227688834071, + "layer_8_sharpness": 0.0013806521892547607, + "layer_9_sharpness": 0.0007986227865330875, + "layer_10_sharpness": 0.0005082337884232402, + "layer_11_sharpness": 0.0005889104213565588, + "layer_12_sharpness": 0.001023592660203576, + "block0_q_sharpness": -0.06099550798535347, + "block0_k_sharpness": -0.050324831157922745, + "block0_v_sharpness": 15.439591407775879, + "block0_o_sharpness": 7.274275302886963, + "block0_mlp_win_sharpness": -1.7929948568344116, + "block0_mlp_wout_sharpness": 0.19028933346271515, + "block3_q_sharpness": 0.0007680428680032492, + "block3_k_sharpness": 0.0035171692725270987, + "block3_v_sharpness": 0.011338353157043457, + "block3_o_sharpness": 0.003876590868458152, + "block3_mlp_win_sharpness": 0.0021179206669330597, + "block3_mlp_wout_sharpness": 0.00043705751886591315, + "block7_q_sharpness": 0.00017103762365877628, + "block7_k_sharpness": 0.000182643678272143, + "block7_v_sharpness": 0.00450478820130229, + "block7_o_sharpness": 8.356923353858292e-05, + "block7_mlp_win_sharpness": 0.0005156054394319654, + "block7_mlp_wout_sharpness": 0.00010659154213499278, + "block11_q_sharpness": 8.509197505190969e-05, + "block11_k_sharpness": 5.3894356824457645e-05, + "block11_v_sharpness": 0.00012430532660800964, + "block11_o_sharpness": 7.209831528598443e-05, + "block11_mlp_win_sharpness": 0.0007416453445330262, + "block11_mlp_wout_sharpness": 0.0014654549304395914, + "sum_layer_numerators": 0.48077564705290343, + "block_diag_sharpness": 0.17601779031651316, + "cross_layer_sharpness": -0.08375920283692179 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8000.json new file mode 100644 index 0000000000000000000000000000000000000000..adc9ca1a8620eaf9573671ded3de8f7e730bce98 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 2.1362016201019287, + "total_l1_linf_norm": 17604.2421875, + "total_spectral_norm": 2.136201858520508, + "embed_lm_head_update_fnorm": 1.2645865678787231, + "embed_lm_head_max_l1_linf_norm": 0.47111034393310547, + "embed_lm_head_max_spectral_norm": 0.30782854557037354, + "layer_1_update_fnorm": 0.37408870458602905, + "layer_1_max_l1_linf_norm": 0.4468030333518982, + "layer_1_max_spectral_norm": 0.012033874168992043, + "layer_2_update_fnorm": 0.22249546647071838, + "layer_2_max_l1_linf_norm": 0.4510030746459961, + "layer_2_max_spectral_norm": 0.012101773172616959, + "layer_3_update_fnorm": 0.2619306743144989, + "layer_3_max_l1_linf_norm": 0.42058712244033813, + "layer_3_max_spectral_norm": 0.013980644755065441, + "layer_4_update_fnorm": 0.3848476707935333, + "layer_4_max_l1_linf_norm": 0.48865270614624023, + "layer_4_max_spectral_norm": 0.01941383257508278, + "layer_5_update_fnorm": 0.4714718759059906, + "layer_5_max_l1_linf_norm": 0.44571852684020996, + "layer_5_max_spectral_norm": 0.013707896694540977, + "layer_6_update_fnorm": 0.5157232880592346, + "layer_6_max_l1_linf_norm": 0.4282790720462799, + "layer_6_max_spectral_norm": 0.01203946303576231, + "layer_7_update_fnorm": 0.5814160704612732, + "layer_7_max_l1_linf_norm": 0.39673614501953125, + "layer_7_max_spectral_norm": 0.012048332951962948, + "layer_8_update_fnorm": 0.5874386429786682, + "layer_8_max_l1_linf_norm": 0.3959919810295105, + "layer_8_max_spectral_norm": 0.012046324089169502, + "layer_9_update_fnorm": 0.5905677080154419, + "layer_9_max_l1_linf_norm": 0.40837186574935913, + "layer_9_max_spectral_norm": 0.012046373449265957, + "layer_10_update_fnorm": 0.5921769142150879, + "layer_10_max_l1_linf_norm": 0.42372721433639526, + "layer_10_max_spectral_norm": 0.012053331360220909, + "layer_11_update_fnorm": 0.5700770616531372, + "layer_11_max_l1_linf_norm": 0.46290433406829834, + "layer_11_max_spectral_norm": 0.012044324539601803, + "layer_12_update_fnorm": 0.6018306612968445, + "layer_12_max_l1_linf_norm": 0.46222639083862305, + "layer_12_max_spectral_norm": 0.01204740907996893, + "block0_q_update_fnorm": 0.17853468656539917, + "block0_q_max_l1_linf_norm": 0.20002663135528564, + "block0_q_max_spectral_norm": 0.012033874168992043, + "block0_k_update_fnorm": 0.18267713487148285, + "block0_k_max_l1_linf_norm": 0.19720561802387238, + "block0_k_max_spectral_norm": 0.012029066681861877, + "block0_v_update_fnorm": 0.11328474432229996, + "block0_v_max_l1_linf_norm": 0.14347805082798004, + "block0_v_max_spectral_norm": 0.012022881768643856, + "block0_o_update_fnorm": 0.13941513001918793, + "block0_o_max_l1_linf_norm": 0.156908318400383, + "block0_o_max_spectral_norm": 0.012028752826154232, + "block0_mlp_win_update_fnorm": 0.12789680063724518, + "block0_mlp_win_max_l1_linf_norm": 0.2091861367225647, + "block0_mlp_win_max_spectral_norm": 0.012017663568258286, + "block0_mlp_wout_update_fnorm": 0.1607624888420105, + "block0_mlp_wout_max_l1_linf_norm": 0.26161038875579834, + "block0_mlp_wout_max_spectral_norm": 0.012026791460812092, + "block3_q_update_fnorm": 0.033668775111436844, + "block3_q_max_l1_linf_norm": 0.11479946970939636, + "block3_q_max_spectral_norm": 0.011764186434447765, + "block3_k_update_fnorm": 0.035130277276039124, + "block3_k_max_l1_linf_norm": 0.11036020517349243, + "block3_k_max_spectral_norm": 0.011745152063667774, + "block3_v_update_fnorm": 0.10486409068107605, + "block3_v_max_l1_linf_norm": 0.12633562088012695, + "block3_v_max_spectral_norm": 0.012023008428514004, + "block3_o_update_fnorm": 0.13680611550807953, + "block3_o_max_l1_linf_norm": 0.1197553500533104, + "block3_o_max_spectral_norm": 0.012027873657643795, + "block3_mlp_win_update_fnorm": 0.20308923721313477, + "block3_mlp_win_max_l1_linf_norm": 0.1804158240556717, + "block3_mlp_win_max_spectral_norm": 0.012031570076942444, + "block3_mlp_wout_update_fnorm": 0.27193683385849, + "block3_mlp_wout_max_l1_linf_norm": 0.43740910291671753, + "block3_mlp_wout_max_spectral_norm": 0.01204163208603859, + "block7_q_update_fnorm": 0.23683831095695496, + "block7_q_max_l1_linf_norm": 0.20864230394363403, + "block7_q_max_spectral_norm": 0.012042325921356678, + "block7_k_update_fnorm": 0.24572788178920746, + "block7_k_max_l1_linf_norm": 0.21005620062351227, + "block7_k_max_spectral_norm": 0.012040154077112675, + "block7_v_update_fnorm": 0.18241719901561737, + "block7_v_max_l1_linf_norm": 0.20373351871967316, + "block7_v_max_spectral_norm": 0.012030835263431072, + "block7_o_update_fnorm": 0.24727541208267212, + "block7_o_max_l1_linf_norm": 0.20813241600990295, + "block7_o_max_spectral_norm": 0.012043677270412445, + "block7_mlp_win_update_fnorm": 0.27733755111694336, + "block7_mlp_win_max_l1_linf_norm": 0.15878868103027344, + "block7_mlp_win_max_spectral_norm": 0.012046324089169502, + "block7_mlp_wout_update_fnorm": 0.23892517387866974, + "block7_mlp_wout_max_l1_linf_norm": 0.3959919810295105, + "block7_mlp_wout_max_spectral_norm": 0.011402050033211708, + "block11_q_update_fnorm": 0.24792471528053284, + "block11_q_max_l1_linf_norm": 0.20998847484588623, + "block11_q_max_spectral_norm": 0.012042286805808544, + "block11_k_update_fnorm": 0.25027576088905334, + "block11_k_max_l1_linf_norm": 0.214077427983284, + "block11_k_max_spectral_norm": 0.012040859088301659, + "block11_v_update_fnorm": 0.24439860880374908, + "block11_v_max_l1_linf_norm": 0.20781978964805603, + "block11_v_max_spectral_norm": 0.01204740907996893, + "block11_o_update_fnorm": 0.24839553236961365, + "block11_o_max_l1_linf_norm": 0.2081812024116516, + "block11_o_max_spectral_norm": 0.012046526186168194, + "block11_mlp_win_update_fnorm": 0.23428115248680115, + "block11_mlp_win_max_l1_linf_norm": 0.15821106731891632, + "block11_mlp_win_max_spectral_norm": 0.01134986337274313, + "block11_mlp_wout_update_fnorm": 0.24812199175357819, + "block11_mlp_wout_max_l1_linf_norm": 0.43103599548339844, + "block11_mlp_wout_max_spectral_norm": 0.012046967633068562, + "total_sharpness": 0.021163351833820343, + "block_total_sharpness": 0.022854730486869812, + "v_norm_block": 1.7216793298721313, + "v_T_H_v_block": 0.06774552166461945, + "v_norm": 2.1362016201019287, + "ip_v_neg_g_hvp": 0.0446353480219841, + "cos_v_neg_g_hvp": 0.01779208332300186, + "g_hvp_norm": 1.1743834018707275, + "ip_v_neg_g_t": 0.10611476004123688, + "cos_v_neg_g_t": 0.02022227644920349, + "g_t_norm": 2.456425189971924, + "g_norm": 1.1743834018707275, + "hv_norm": 29.622007369995117, + "cos_v_hv": 0.0015262027736753225, + "hg_norm": 30410.76171875, + "cos_g_hg": 0.04541511833667755, + "v_parallel_norm": 0.0030391556210815907, + "v_perp_norm": 2.136199712753296, + "embed_lm_head_v_norm": 1.2645865678787231, + "embed_lm_head_cos_v_neg_g": 0.019406558945775032, + "layer_1_v_norm": 0.37408870458602905, + "layer_1_cos_v_neg_g": 0.024493547156453133, + "layer_2_v_norm": 0.22249546647071838, + "layer_2_cos_v_neg_g": 0.030696306377649307, + "layer_3_v_norm": 0.2619306743144989, + "layer_3_cos_v_neg_g": 0.019967157393693924, + "layer_4_v_norm": 0.3848476707935333, + "layer_4_cos_v_neg_g": 0.017642393708229065, + "layer_5_v_norm": 0.4714718759059906, + "layer_5_cos_v_neg_g": 0.022033097222447395, + "layer_6_v_norm": 0.5157232880592346, + "layer_6_cos_v_neg_g": 0.024310477077960968, + "layer_7_v_norm": 0.5814160704612732, + "layer_7_cos_v_neg_g": 0.023942213505506516, + "layer_8_v_norm": 0.5874386429786682, + "layer_8_cos_v_neg_g": 0.023862339556217194, + "layer_9_v_norm": 0.5905677080154419, + "layer_9_cos_v_neg_g": 0.02554117888212204, + "layer_10_v_norm": 0.5921769142150879, + "layer_10_cos_v_neg_g": 0.026532625779509544, + "layer_11_v_norm": 0.5700770616531372, + "layer_11_cos_v_neg_g": 0.03487486019730568, + "layer_12_v_norm": 0.6018306612968445, + "layer_12_cos_v_neg_g": 0.06774717569351196, + "block0_q_v_norm": 0.17853468656539917, + "block0_q_cos_v_neg_g": 0.01656544953584671, + "block0_k_v_norm": 0.18267713487148285, + "block0_k_cos_v_neg_g": 0.011933302506804466, + "block0_v_v_norm": 0.11328474432229996, + "block0_v_cos_v_neg_g": 0.02491174265742302, + "block0_o_v_norm": 0.13941513001918793, + "block0_o_cos_v_neg_g": 0.05416037142276764, + "block0_mlp_win_v_norm": 0.12789680063724518, + "block0_mlp_win_cos_v_neg_g": 0.04701361060142517, + "block0_mlp_wout_v_norm": 0.1607624888420105, + "block0_mlp_wout_cos_v_neg_g": 0.039721135050058365, + "block3_q_v_norm": 0.033668775111436844, + "block3_q_cos_v_neg_g": 0.01771828532218933, + "block3_k_v_norm": 0.035130277276039124, + "block3_k_cos_v_neg_g": 0.05236994847655296, + "block3_v_v_norm": 0.10486409068107605, + "block3_v_cos_v_neg_g": 0.008812476880848408, + "block3_o_v_norm": 0.13680611550807953, + "block3_o_cos_v_neg_g": 0.021706178784370422, + "block3_mlp_win_v_norm": 0.20308923721313477, + "block3_mlp_win_cos_v_neg_g": 0.0230962373316288, + "block3_mlp_wout_v_norm": 0.27193683385849, + "block3_mlp_wout_cos_v_neg_g": 0.039676785469055176, + "block7_q_v_norm": 0.23683831095695496, + "block7_q_cos_v_neg_g": 0.027322718873620033, + "block7_k_v_norm": 0.24572788178920746, + "block7_k_cos_v_neg_g": 0.07963437587022781, + "block7_v_v_norm": 0.18241719901561737, + "block7_v_cos_v_neg_g": 0.027698082849383354, + "block7_o_v_norm": 0.24727541208267212, + "block7_o_cos_v_neg_g": 0.07484930008649826, + "block7_mlp_win_v_norm": 0.27733755111694336, + "block7_mlp_win_cos_v_neg_g": 0.029167134314775467, + "block7_mlp_wout_v_norm": 0.23892517387866974, + "block7_mlp_wout_cos_v_neg_g": 0.11136548221111298, + "block11_q_v_norm": 0.24792471528053284, + "block11_q_cos_v_neg_g": 0.07303495705127716, + "block11_k_v_norm": 0.25027576088905334, + "block11_k_cos_v_neg_g": 0.0950329378247261, + "block11_v_v_norm": 0.24439860880374908, + "block11_v_cos_v_neg_g": 0.042204126715660095, + "block11_o_v_norm": 0.24839553236961365, + "block11_o_cos_v_neg_g": 0.08070725202560425, + "block11_mlp_win_v_norm": 0.23428115248680115, + "block11_mlp_win_cos_v_neg_g": 0.09191933274269104, + "block11_mlp_wout_v_norm": 0.24812199175357819, + "block11_mlp_wout_cos_v_neg_g": 0.07823718339204788, + "embed_lm_head_sharpness": 0.001567192142829299, + "layer_1_sharpness": 0.224776953458786, + "layer_2_sharpness": 0.03780420497059822, + "layer_3_sharpness": 0.012062380090355873, + "layer_4_sharpness": 0.0041148182936012745, + "layer_5_sharpness": 0.002188856713473797, + "layer_6_sharpness": 0.002085981657728553, + "layer_7_sharpness": 0.001403347821906209, + "layer_8_sharpness": 0.001511782524175942, + "layer_9_sharpness": 0.0009479260770604014, + "layer_10_sharpness": 0.000543207221198827, + "layer_11_sharpness": 0.0005696333828382194, + "layer_12_sharpness": 0.0011763620423153043, + "block0_q_sharpness": -0.010102382861077785, + "block0_k_sharpness": -0.0059914058074355125, + "block0_v_sharpness": 0.35921546816825867, + "block0_o_sharpness": 0.24920840561389923, + "block0_mlp_win_sharpness": 0.1046401634812355, + "block0_mlp_wout_sharpness": 0.01375490240752697, + "block3_q_sharpness": -0.00021792907500639558, + "block3_k_sharpness": 0.012377799488604069, + "block3_v_sharpness": 0.013230369426310062, + "block3_o_sharpness": 0.0030056792311370373, + "block3_mlp_win_sharpness": 0.0006178360781632364, + "block3_mlp_wout_sharpness": 0.000265921582467854, + "block7_q_sharpness": 8.529729529982433e-05, + "block7_k_sharpness": 0.00010373101395089179, + "block7_v_sharpness": 0.0047241030260920525, + "block7_o_sharpness": 5.6520275393268093e-05, + "block7_mlp_win_sharpness": 0.0007056882604956627, + "block7_mlp_wout_sharpness": 0.00010499530617380515, + "block11_q_sharpness": 6.572648999281228e-05, + "block11_k_sharpness": 4.5624990889336914e-05, + "block11_v_sharpness": 9.353984933113679e-05, + "block11_o_sharpness": 4.029657065984793e-05, + "block11_mlp_win_sharpness": 0.001061157905496657, + "block11_mlp_wout_sharpness": 0.0018847067840397358, + "sum_layer_numerators": 0.037934040367818715, + "block_diag_sharpness": 0.012797483289228943, + "cross_layer_sharpness": 0.010057247197640869 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8500.json new file mode 100644 index 0000000000000000000000000000000000000000..13d4fcc03528ab241447fedf46991785d8f04f34 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_8500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.628250241279602, + "total_l1_linf_norm": 13493.9384765625, + "total_spectral_norm": 1.6282503604888916, + "embed_lm_head_update_fnorm": 0.9743742942810059, + "embed_lm_head_max_l1_linf_norm": 0.3845418095588684, + "embed_lm_head_max_spectral_norm": 0.18245287239551544, + "layer_1_update_fnorm": 0.2801077365875244, + "layer_1_max_l1_linf_norm": 0.2845177948474884, + "layer_1_max_spectral_norm": 0.009031573310494423, + "layer_2_update_fnorm": 0.1914007067680359, + "layer_2_max_l1_linf_norm": 0.34010353684425354, + "layer_2_max_spectral_norm": 0.009026458486914635, + "layer_3_update_fnorm": 0.21351855993270874, + "layer_3_max_l1_linf_norm": 0.3245617151260376, + "layer_3_max_spectral_norm": 0.009029257111251354, + "layer_4_update_fnorm": 0.2949253022670746, + "layer_4_max_l1_linf_norm": 0.4184126853942871, + "layer_4_max_spectral_norm": 0.015646081417798996, + "layer_5_update_fnorm": 0.36626139283180237, + "layer_5_max_l1_linf_norm": 0.3406481146812439, + "layer_5_max_spectral_norm": 0.011363410390913486, + "layer_6_update_fnorm": 0.3860454261302948, + "layer_6_max_l1_linf_norm": 0.34915709495544434, + "layer_6_max_spectral_norm": 0.009034225717186928, + "layer_7_update_fnorm": 0.4392205476760864, + "layer_7_max_l1_linf_norm": 0.2984611690044403, + "layer_7_max_spectral_norm": 0.009039247408509254, + "layer_8_update_fnorm": 0.44197383522987366, + "layer_8_max_l1_linf_norm": 0.29894664883613586, + "layer_8_max_spectral_norm": 0.009043702855706215, + "layer_9_update_fnorm": 0.4439653754234314, + "layer_9_max_l1_linf_norm": 0.3084909915924072, + "layer_9_max_spectral_norm": 0.009039051830768585, + "layer_10_update_fnorm": 0.44601377844810486, + "layer_10_max_l1_linf_norm": 0.3330651521682739, + "layer_10_max_spectral_norm": 0.009043716825544834, + "layer_11_update_fnorm": 0.4289110004901886, + "layer_11_max_l1_linf_norm": 0.3653389811515808, + "layer_11_max_spectral_norm": 0.009041170589625835, + "layer_12_update_fnorm": 0.4502177834510803, + "layer_12_max_l1_linf_norm": 0.36529776453971863, + "layer_12_max_spectral_norm": 0.009038576856255531, + "block0_q_update_fnorm": 0.06568312644958496, + "block0_q_max_l1_linf_norm": 0.16873112320899963, + "block0_q_max_spectral_norm": 0.009027021005749702, + "block0_k_update_fnorm": 0.07347755879163742, + "block0_k_max_l1_linf_norm": 0.16551917791366577, + "block0_k_max_spectral_norm": 0.009031573310494423, + "block0_v_update_fnorm": 0.10047701001167297, + "block0_v_max_l1_linf_norm": 0.13220059871673584, + "block0_v_max_spectral_norm": 0.009025856852531433, + "block0_o_update_fnorm": 0.13094116747379303, + "block0_o_max_l1_linf_norm": 0.12934774160385132, + "block0_o_max_spectral_norm": 0.009028506465256214, + "block0_mlp_win_update_fnorm": 0.14127004146575928, + "block0_mlp_win_max_l1_linf_norm": 0.15891329944133759, + "block0_mlp_win_max_spectral_norm": 0.009027287364006042, + "block0_mlp_wout_update_fnorm": 0.14649516344070435, + "block0_mlp_wout_max_l1_linf_norm": 0.2363889515399933, + "block0_mlp_wout_max_spectral_norm": 0.009027265012264252, + "block3_q_update_fnorm": 0.025156134739518166, + "block3_q_max_l1_linf_norm": 0.07304129749536514, + "block3_q_max_spectral_norm": 0.008926132693886757, + "block3_k_update_fnorm": 0.028138000518083572, + "block3_k_max_l1_linf_norm": 0.07630693167448044, + "block3_k_max_spectral_norm": 0.008325587958097458, + "block3_v_update_fnorm": 0.08208832889795303, + "block3_v_max_l1_linf_norm": 0.10015685856342316, + "block3_v_max_spectral_norm": 0.009024526923894882, + "block3_o_update_fnorm": 0.10160008817911148, + "block3_o_max_l1_linf_norm": 0.0870688185095787, + "block3_o_max_spectral_norm": 0.00902643147855997, + "block3_mlp_win_update_fnorm": 0.16042672097682953, + "block3_mlp_win_max_l1_linf_norm": 0.13481701910495758, + "block3_mlp_win_max_spectral_norm": 0.009029698558151722, + "block3_mlp_wout_update_fnorm": 0.20542936027050018, + "block3_mlp_wout_max_l1_linf_norm": 0.3306117653846741, + "block3_mlp_wout_max_spectral_norm": 0.00903879851102829, + "block7_q_update_fnorm": 0.17764510214328766, + "block7_q_max_l1_linf_norm": 0.1548442542552948, + "block7_q_max_spectral_norm": 0.009037643671035767, + "block7_k_update_fnorm": 0.18478801846504211, + "block7_k_max_l1_linf_norm": 0.1547597348690033, + "block7_k_max_spectral_norm": 0.0090410765260458, + "block7_v_update_fnorm": 0.1375466138124466, + "block7_v_max_l1_linf_norm": 0.1535796821117401, + "block7_v_max_spectral_norm": 0.009028877131640911, + "block7_o_update_fnorm": 0.1858711987733841, + "block7_o_max_l1_linf_norm": 0.15496692061424255, + "block7_o_max_spectral_norm": 0.009042911231517792, + "block7_mlp_win_update_fnorm": 0.20918314158916473, + "block7_mlp_win_max_l1_linf_norm": 0.12019699066877365, + "block7_mlp_win_max_spectral_norm": 0.009043702855706215, + "block7_mlp_wout_update_fnorm": 0.17976118624210358, + "block7_mlp_wout_max_l1_linf_norm": 0.29894664883613586, + "block7_mlp_wout_max_spectral_norm": 0.008554385043680668, + "block11_q_update_fnorm": 0.18571515381336212, + "block11_q_max_l1_linf_norm": 0.15790614485740662, + "block11_q_max_spectral_norm": 0.009037629701197147, + "block11_k_update_fnorm": 0.1870725154876709, + "block11_k_max_l1_linf_norm": 0.1589246243238449, + "block11_k_max_spectral_norm": 0.009036163799464703, + "block11_v_update_fnorm": 0.1835646778345108, + "block11_v_max_l1_linf_norm": 0.15533724427223206, + "block11_v_max_spectral_norm": 0.009037022478878498, + "block11_o_update_fnorm": 0.18648651242256165, + "block11_o_max_l1_linf_norm": 0.15613996982574463, + "block11_o_max_spectral_norm": 0.009038576856255531, + "block11_mlp_win_update_fnorm": 0.1765311062335968, + "block11_mlp_win_max_l1_linf_norm": 0.11565112322568893, + "block11_mlp_win_max_spectral_norm": 0.008518358692526817, + "block11_mlp_wout_update_fnorm": 0.18287009000778198, + "block11_mlp_wout_max_l1_linf_norm": 0.3271602392196655, + "block11_mlp_wout_max_spectral_norm": 0.009035714901983738, + "total_sharpness": -0.006659386213868856, + "block_total_sharpness": 0.008779285475611687, + "v_norm_block": 1.3045281171798706, + "v_T_H_v_block": 0.014940531924366951, + "v_norm": 1.628250241279602, + "ip_v_neg_g_hvp": 0.02965349145233631, + "cos_v_neg_g_hvp": 0.02183842472732067, + "g_hvp_norm": 0.8339372277259827, + "ip_v_neg_g_t": 0.0533781535923481, + "cos_v_neg_g_t": 0.02743324637413025, + "g_t_norm": 1.1949925422668457, + "g_norm": 0.8339372277259827, + "hv_norm": 22.227914810180664, + "cos_v_hv": -0.00048781666555441916, + "hg_norm": 80799.7265625, + "cos_g_hg": -0.2114637792110443, + "v_parallel_norm": 0.0028622690588235855, + "v_perp_norm": 1.6282477378845215, + "embed_lm_head_v_norm": 0.9743742942810059, + "embed_lm_head_cos_v_neg_g": 0.029755013063549995, + "layer_1_v_norm": 0.2801077365875244, + "layer_1_cos_v_neg_g": 0.024586694315075874, + "layer_2_v_norm": 0.1914007067680359, + "layer_2_cos_v_neg_g": 0.032339293509721756, + "layer_3_v_norm": 0.21351857483386993, + "layer_3_cos_v_neg_g": 0.022008832544088364, + "layer_4_v_norm": 0.2949253022670746, + "layer_4_cos_v_neg_g": 0.020156947895884514, + "layer_5_v_norm": 0.36626139283180237, + "layer_5_cos_v_neg_g": 0.021264785900712013, + "layer_6_v_norm": 0.3860454261302948, + "layer_6_cos_v_neg_g": 0.025427840650081635, + "layer_7_v_norm": 0.4392205476760864, + "layer_7_cos_v_neg_g": 0.02420531027019024, + "layer_8_v_norm": 0.44197383522987366, + "layer_8_cos_v_neg_g": 0.023809410631656647, + "layer_9_v_norm": 0.4439653754234314, + "layer_9_cos_v_neg_g": 0.025402694940567017, + "layer_10_v_norm": 0.44601377844810486, + "layer_10_cos_v_neg_g": 0.025596095249056816, + "layer_11_v_norm": 0.4289110004901886, + "layer_11_cos_v_neg_g": 0.03636191040277481, + "layer_12_v_norm": 0.4502177834510803, + "layer_12_cos_v_neg_g": 0.06081216409802437, + "block0_q_v_norm": 0.06568312644958496, + "block0_q_cos_v_neg_g": 0.00285229180008173, + "block0_k_v_norm": 0.07347755879163742, + "block0_k_cos_v_neg_g": -0.002343136351555586, + "block0_v_v_norm": 0.10047701001167297, + "block0_v_cos_v_neg_g": 0.029763540253043175, + "block0_o_v_norm": 0.13094116747379303, + "block0_o_cos_v_neg_g": 0.03590651974081993, + "block0_mlp_win_v_norm": 0.14127004146575928, + "block0_mlp_win_cos_v_neg_g": 0.029722878709435463, + "block0_mlp_wout_v_norm": 0.14649516344070435, + "block0_mlp_wout_cos_v_neg_g": 0.03276507928967476, + "block3_q_v_norm": 0.025156134739518166, + "block3_q_cos_v_neg_g": 0.002635381883010268, + "block3_k_v_norm": 0.028138000518083572, + "block3_k_cos_v_neg_g": 0.015460838563740253, + "block3_v_v_norm": 0.08208832889795303, + "block3_v_cos_v_neg_g": 0.027823826298117638, + "block3_o_v_norm": 0.10160008817911148, + "block3_o_cos_v_neg_g": 0.023523952811956406, + "block3_mlp_win_v_norm": 0.16042672097682953, + "block3_mlp_win_cos_v_neg_g": 0.021927224472165108, + "block3_mlp_wout_v_norm": 0.20542936027050018, + "block3_mlp_wout_cos_v_neg_g": 0.03722576051950455, + "block7_q_v_norm": 0.17764510214328766, + "block7_q_cos_v_neg_g": 0.028990233317017555, + "block7_k_v_norm": 0.18478801846504211, + "block7_k_cos_v_neg_g": 0.07906585186719894, + "block7_v_v_norm": 0.1375466138124466, + "block7_v_cos_v_neg_g": 0.025618519634008408, + "block7_o_v_norm": 0.1858711987733841, + "block7_o_cos_v_neg_g": 0.0727609172463417, + "block7_mlp_win_v_norm": 0.20918314158916473, + "block7_mlp_win_cos_v_neg_g": 0.028464803472161293, + "block7_mlp_wout_v_norm": 0.17976118624210358, + "block7_mlp_wout_cos_v_neg_g": 0.109418123960495, + "block11_q_v_norm": 0.18571515381336212, + "block11_q_cos_v_neg_g": 0.07414636760950089, + "block11_k_v_norm": 0.1870725154876709, + "block11_k_cos_v_neg_g": 0.0956835225224495, + "block11_v_v_norm": 0.1835646778345108, + "block11_v_cos_v_neg_g": 0.042095012962818146, + "block11_o_v_norm": 0.18648651242256165, + "block11_o_cos_v_neg_g": 0.07917629182338715, + "block11_mlp_win_v_norm": 0.1765311062335968, + "block11_mlp_win_cos_v_neg_g": 0.077267125248909, + "block11_mlp_wout_v_norm": 0.18287009000778198, + "block11_mlp_wout_cos_v_neg_g": 0.07011336833238602, + "embed_lm_head_sharpness": -0.030286069959402084, + "layer_1_sharpness": -0.004932269919663668, + "layer_2_sharpness": 0.015236364677548409, + "layer_3_sharpness": 0.011149921454489231, + "layer_4_sharpness": 0.002661687321960926, + "layer_5_sharpness": 0.0022441267501562834, + "layer_6_sharpness": 0.002258226741105318, + "layer_7_sharpness": 0.001465670415200293, + "layer_8_sharpness": 0.001517510856501758, + "layer_9_sharpness": 0.0009357274393551052, + "layer_10_sharpness": 0.0005102147115394473, + "layer_11_sharpness": 0.000625771819613874, + "layer_12_sharpness": 0.0007365287165157497, + "block0_q_sharpness": -0.11802031099796295, + "block0_k_sharpness": 0.0023436909541487694, + "block0_v_sharpness": 0.014837159775197506, + "block0_o_sharpness": -0.0030649262480437756, + "block0_mlp_win_sharpness": 0.0015322064282372594, + "block0_mlp_wout_sharpness": 0.001605978817678988, + "block3_q_sharpness": 6.898245374031831e-06, + "block3_k_sharpness": 0.006370619870722294, + "block3_v_sharpness": 0.005839121527969837, + "block3_o_sharpness": 0.0025981271173805, + "block3_mlp_win_sharpness": 0.0005032655317336321, + "block3_mlp_wout_sharpness": 0.00032838559127412736, + "block7_q_sharpness": 0.00011755251762224361, + "block7_k_sharpness": 8.659968443680555e-05, + "block7_v_sharpness": 0.0047443220391869545, + "block7_o_sharpness": 5.69006624573376e-05, + "block7_mlp_win_sharpness": 0.0007344739278778434, + "block7_mlp_wout_sharpness": 0.0001127864743466489, + "block11_q_sharpness": 7.389650272671133e-05, + "block11_k_sharpness": 4.732681190944277e-05, + "block11_v_sharpness": 9.132897685049102e-05, + "block11_o_sharpness": 5.121441790834069e-05, + "block11_mlp_win_sharpness": 0.0005410220474004745, + "block11_mlp_wout_sharpness": 0.0011413565371185541, + "sum_layer_numerators": 0.002678143168859177, + "block_diag_sharpness": 0.0015737179617212917, + "cross_layer_sharpness": 0.007205567513890395 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9000.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9000.json new file mode 100644 index 0000000000000000000000000000000000000000..87fd8b505dcf861202a1415f38f1fb2ef34f9842 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9000.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 1.0718005895614624, + "total_l1_linf_norm": 8838.271484375, + "total_spectral_norm": 1.071800708770752, + "embed_lm_head_update_fnorm": 0.6584128737449646, + "embed_lm_head_max_l1_linf_norm": 0.19561226665973663, + "embed_lm_head_max_spectral_norm": 0.12762139737606049, + "layer_1_update_fnorm": 0.16410177946090698, + "layer_1_max_l1_linf_norm": 0.22041571140289307, + "layer_1_max_spectral_norm": 0.006021612323820591, + "layer_2_update_fnorm": 0.10441904515028, + "layer_2_max_l1_linf_norm": 0.2874220013618469, + "layer_2_max_spectral_norm": 0.006348103750497103, + "layer_3_update_fnorm": 0.13438677787780762, + "layer_3_max_l1_linf_norm": 0.2514808177947998, + "layer_3_max_spectral_norm": 0.0060196202248334885, + "layer_4_update_fnorm": 0.18552860617637634, + "layer_4_max_l1_linf_norm": 0.26811960339546204, + "layer_4_max_spectral_norm": 0.009869200177490711, + "layer_5_update_fnorm": 0.2180766463279724, + "layer_5_max_l1_linf_norm": 0.2731751501560211, + "layer_5_max_spectral_norm": 0.010274576023221016, + "layer_6_update_fnorm": 0.2500404715538025, + "layer_6_max_l1_linf_norm": 0.25315961241722107, + "layer_6_max_spectral_norm": 0.006434362381696701, + "layer_7_update_fnorm": 0.2888685464859009, + "layer_7_max_l1_linf_norm": 0.23625029623508453, + "layer_7_max_spectral_norm": 0.00602769386023283, + "layer_8_update_fnorm": 0.2924811542034149, + "layer_8_max_l1_linf_norm": 0.21941134333610535, + "layer_8_max_spectral_norm": 0.006029780954122543, + "layer_9_update_fnorm": 0.2952619194984436, + "layer_9_max_l1_linf_norm": 0.2456505298614502, + "layer_9_max_spectral_norm": 0.006029149983078241, + "layer_10_update_fnorm": 0.29591667652130127, + "layer_10_max_l1_linf_norm": 0.2559751868247986, + "layer_10_max_spectral_norm": 0.006030088756233454, + "layer_11_update_fnorm": 0.2846170961856842, + "layer_11_max_l1_linf_norm": 0.26908981800079346, + "layer_11_max_spectral_norm": 0.006030185613781214, + "layer_12_update_fnorm": 0.30019110441207886, + "layer_12_max_l1_linf_norm": 0.2836725115776062, + "layer_12_max_spectral_norm": 0.006256353575736284, + "block0_q_update_fnorm": 0.04502516984939575, + "block0_q_max_l1_linf_norm": 0.11202731728553772, + "block0_q_max_spectral_norm": 0.006020267028361559, + "block0_k_update_fnorm": 0.05116015300154686, + "block0_k_max_l1_linf_norm": 0.10602354258298874, + "block0_k_max_spectral_norm": 0.006018288433551788, + "block0_v_update_fnorm": 0.053196147084236145, + "block0_v_max_l1_linf_norm": 0.08986921608448029, + "block0_v_max_spectral_norm": 0.006017433013767004, + "block0_o_update_fnorm": 0.08250656723976135, + "block0_o_max_l1_linf_norm": 0.08134585618972778, + "block0_o_max_spectral_norm": 0.006021612323820591, + "block0_mlp_win_update_fnorm": 0.07807519286870956, + "block0_mlp_win_max_l1_linf_norm": 0.10357925295829773, + "block0_mlp_win_max_spectral_norm": 0.00601908378303051, + "block0_mlp_wout_update_fnorm": 0.0805884599685669, + "block0_mlp_wout_max_l1_linf_norm": 0.13261760771274567, + "block0_mlp_wout_max_spectral_norm": 0.006019663065671921, + "block3_q_update_fnorm": 0.011686531826853752, + "block3_q_max_l1_linf_norm": 0.03702183812856674, + "block3_q_max_spectral_norm": 0.004424683749675751, + "block3_k_update_fnorm": 0.016277000308036804, + "block3_k_max_l1_linf_norm": 0.04445093125104904, + "block3_k_max_spectral_norm": 0.00519889360293746, + "block3_v_update_fnorm": 0.05001160129904747, + "block3_v_max_l1_linf_norm": 0.06379688531160355, + "block3_v_max_spectral_norm": 0.006018342450261116, + "block3_o_update_fnorm": 0.062015894800424576, + "block3_o_max_l1_linf_norm": 0.05257910490036011, + "block3_o_max_spectral_norm": 0.0060194144025444984, + "block3_mlp_win_update_fnorm": 0.09952162951231003, + "block3_mlp_win_max_l1_linf_norm": 0.09498081356287003, + "block3_mlp_win_max_spectral_norm": 0.006021133158355951, + "block3_mlp_wout_update_fnorm": 0.1323828548192978, + "block3_mlp_wout_max_l1_linf_norm": 0.21049976348876953, + "block3_mlp_wout_max_spectral_norm": 0.0060262009501457214, + "block7_q_update_fnorm": 0.11801917105913162, + "block7_q_max_l1_linf_norm": 0.1048392653465271, + "block7_q_max_spectral_norm": 0.006026561371982098, + "block7_k_update_fnorm": 0.12289157509803772, + "block7_k_max_l1_linf_norm": 0.10447119921445847, + "block7_k_max_spectral_norm": 0.006027488503605127, + "block7_v_update_fnorm": 0.08752197027206421, + "block7_v_max_l1_linf_norm": 0.10159189254045486, + "block7_v_max_spectral_norm": 0.0060209548100829124, + "block7_o_update_fnorm": 0.12377900630235672, + "block7_o_max_l1_linf_norm": 0.10487401485443115, + "block7_o_max_spectral_norm": 0.006029780954122543, + "block7_mlp_win_update_fnorm": 0.13823573291301727, + "block7_mlp_win_max_l1_linf_norm": 0.08021664619445801, + "block7_mlp_win_max_spectral_norm": 0.00602596765384078, + "block7_mlp_wout_update_fnorm": 0.11985554546117783, + "block7_mlp_wout_max_l1_linf_norm": 0.19893746078014374, + "block7_mlp_wout_max_spectral_norm": 0.005708208307623863, + "block11_q_update_fnorm": 0.1239149272441864, + "block11_q_max_l1_linf_norm": 0.10533846169710159, + "block11_q_max_spectral_norm": 0.006028722506016493, + "block11_k_update_fnorm": 0.12494466453790665, + "block11_k_max_l1_linf_norm": 0.1068546324968338, + "block11_k_max_spectral_norm": 0.0060280379839241505, + "block11_v_update_fnorm": 0.12150997668504715, + "block11_v_max_l1_linf_norm": 0.10431816428899765, + "block11_v_max_spectral_norm": 0.006025440059602261, + "block11_o_update_fnorm": 0.12413901835680008, + "block11_o_max_l1_linf_norm": 0.10349956154823303, + "block11_o_max_spectral_norm": 0.006027743685990572, + "block11_mlp_win_update_fnorm": 0.11732621490955353, + "block11_mlp_win_max_l1_linf_norm": 0.08414952456951141, + "block11_mlp_win_max_spectral_norm": 0.005680595524609089, + "block11_mlp_wout_update_fnorm": 0.12301266193389893, + "block11_mlp_wout_max_l1_linf_norm": 0.21712027490139008, + "block11_mlp_wout_max_spectral_norm": 0.006016165018081665, + "total_sharpness": 0.016189223155379295, + "block_total_sharpness": 0.021557219326496124, + "v_norm_block": 0.8457240462303162, + "v_T_H_v_block": 0.015418783761560917, + "v_norm": 1.0718005895614624, + "ip_v_neg_g_hvp": 0.018212493509054184, + "cos_v_neg_g_hvp": 0.022560348734259605, + "g_hvp_norm": 0.7531988024711609, + "ip_v_neg_g_t": 0.03686511144042015, + "cos_v_neg_g_t": 0.02047288604080677, + "g_t_norm": 1.6800510883331299, + "g_norm": 0.7531988024711609, + "hv_norm": 9.698585510253906, + "cos_v_hv": 0.0017890877788886428, + "hg_norm": 5548.98046875, + "cos_g_hg": -0.008386821486055851, + "v_parallel_norm": 0.0016258592950180173, + "v_perp_norm": 1.071799397468567, + "embed_lm_head_v_norm": 0.6584128737449646, + "embed_lm_head_cos_v_neg_g": 0.04847899079322815, + "layer_1_v_norm": 0.16410177946090698, + "layer_1_cos_v_neg_g": 0.012786291539669037, + "layer_2_v_norm": 0.10441904515028, + "layer_2_cos_v_neg_g": -0.007179418578743935, + "layer_3_v_norm": 0.1343867927789688, + "layer_3_cos_v_neg_g": 0.02230132929980755, + "layer_4_v_norm": 0.18552860617637634, + "layer_4_cos_v_neg_g": 0.017981065437197685, + "layer_5_v_norm": 0.2180766463279724, + "layer_5_cos_v_neg_g": 0.027729930356144905, + "layer_6_v_norm": 0.2500405013561249, + "layer_6_cos_v_neg_g": 0.026923775672912598, + "layer_7_v_norm": 0.2888685464859009, + "layer_7_cos_v_neg_g": 0.028022339567542076, + "layer_8_v_norm": 0.2924811542034149, + "layer_8_cos_v_neg_g": 0.0267450213432312, + "layer_9_v_norm": 0.2952619194984436, + "layer_9_cos_v_neg_g": 0.029377708211541176, + "layer_10_v_norm": 0.29591667652130127, + "layer_10_cos_v_neg_g": 0.03310598433017731, + "layer_11_v_norm": 0.2846170961856842, + "layer_11_cos_v_neg_g": 0.04568124935030937, + "layer_12_v_norm": 0.30019110441207886, + "layer_12_cos_v_neg_g": 0.07793385535478592, + "block0_q_v_norm": 0.04502516984939575, + "block0_q_cos_v_neg_g": 0.011886391788721085, + "block0_k_v_norm": 0.05116015300154686, + "block0_k_cos_v_neg_g": 0.014505366794764996, + "block0_v_v_norm": 0.053196147084236145, + "block0_v_cos_v_neg_g": 0.012723037041723728, + "block0_o_v_norm": 0.08250656723976135, + "block0_o_cos_v_neg_g": 0.022492414340376854, + "block0_mlp_win_v_norm": 0.07807519286870956, + "block0_mlp_win_cos_v_neg_g": 0.0096547557041049, + "block0_mlp_wout_v_norm": 0.0805884599685669, + "block0_mlp_wout_cos_v_neg_g": 0.01272050105035305, + "block3_q_v_norm": 0.011686531826853752, + "block3_q_cos_v_neg_g": -0.009675828740000725, + "block3_k_v_norm": 0.016277000308036804, + "block3_k_cos_v_neg_g": 0.02479284442961216, + "block3_v_v_norm": 0.05001160129904747, + "block3_v_cos_v_neg_g": 0.0033582928590476513, + "block3_o_v_norm": 0.062015894800424576, + "block3_o_cos_v_neg_g": 0.024164743721485138, + "block3_mlp_win_v_norm": 0.09952162951231003, + "block3_mlp_win_cos_v_neg_g": 0.025703346356749535, + "block3_mlp_wout_v_norm": 0.1323828548192978, + "block3_mlp_wout_cos_v_neg_g": 0.045005377382040024, + "block7_q_v_norm": 0.11801917105913162, + "block7_q_cos_v_neg_g": 0.03222300484776497, + "block7_k_v_norm": 0.12289157509803772, + "block7_k_cos_v_neg_g": 0.08271114528179169, + "block7_v_v_norm": 0.08752197027206421, + "block7_v_cos_v_neg_g": 0.026501420885324478, + "block7_o_v_norm": 0.12377900630235672, + "block7_o_cos_v_neg_g": 0.09019298106431961, + "block7_mlp_win_v_norm": 0.13823573291301727, + "block7_mlp_win_cos_v_neg_g": 0.03166421875357628, + "block7_mlp_wout_v_norm": 0.11985554546117783, + "block7_mlp_wout_cos_v_neg_g": 0.12962980568408966, + "block11_q_v_norm": 0.1239149272441864, + "block11_q_cos_v_neg_g": 0.08053243160247803, + "block11_k_v_norm": 0.12494466453790665, + "block11_k_cos_v_neg_g": 0.10960783064365387, + "block11_v_v_norm": 0.12150997668504715, + "block11_v_cos_v_neg_g": 0.048164065927267075, + "block11_o_v_norm": 0.12413901835680008, + "block11_o_cos_v_neg_g": 0.09042773395776749, + "block11_mlp_win_v_norm": 0.11732621490955353, + "block11_mlp_win_cos_v_neg_g": 0.10445000231266022, + "block11_mlp_wout_v_norm": 0.12301266193389893, + "block11_mlp_wout_cos_v_neg_g": 0.0900922566652298, + "embed_lm_head_sharpness": -0.0023908691946417093, + "layer_1_sharpness": 0.2247011363506317, + "layer_2_sharpness": 0.13994969427585602, + "layer_3_sharpness": 0.01751812733709812, + "layer_4_sharpness": 0.003362491726875305, + "layer_5_sharpness": 0.004523777402937412, + "layer_6_sharpness": 0.0029226269107311964, + "layer_7_sharpness": 0.0021100027952343225, + "layer_8_sharpness": 0.0018145806388929486, + "layer_9_sharpness": 0.0011300994083285332, + "layer_10_sharpness": 0.0006713619804941118, + "layer_11_sharpness": 0.000758516020141542, + "layer_12_sharpness": 0.0010388742666691542, + "block0_q_sharpness": 0.06874721497297287, + "block0_k_sharpness": -0.02380337007343769, + "block0_v_sharpness": 0.39877453446388245, + "block0_o_sharpness": 0.18427802622318268, + "block0_mlp_win_sharpness": -0.002126818522810936, + "block0_mlp_wout_sharpness": 0.006010756362229586, + "block3_q_sharpness": -2.2144276954350062e-05, + "block3_k_sharpness": 0.0167290847748518, + "block3_v_sharpness": 0.008374580182135105, + "block3_o_sharpness": 0.0038006403483450413, + "block3_mlp_win_sharpness": 0.0010416426230221987, + "block3_mlp_wout_sharpness": 0.0004715278628282249, + "block7_q_sharpness": 0.00015469231584575027, + "block7_k_sharpness": 9.453501843381673e-05, + "block7_v_sharpness": 0.005843010731041431, + "block7_o_sharpness": 8.327160321641713e-05, + "block7_mlp_win_sharpness": 0.0008302732021547854, + "block7_mlp_wout_sharpness": 0.00012043778406223282, + "block11_q_sharpness": 0.00016209317254833877, + "block11_k_sharpness": 9.124436473939568e-05, + "block11_v_sharpness": 0.00011974151129834354, + "block11_o_sharpness": 4.924349195789546e-05, + "block11_mlp_win_sharpness": 0.0008483564597554505, + "block11_mlp_wout_sharpness": 0.0012718311045318842, + "sum_layer_numerators": 0.009050632002405122, + "block_diag_sharpness": 0.012653816989296452, + "cross_layer_sharpness": 0.008903402337199672 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9500.json b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9500.json new file mode 100644 index 0000000000000000000000000000000000000000..a85668ad8f44961cf2a6e2a96528c6133343a48f --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/sharpness_step_9500.json @@ -0,0 +1,248 @@ +{ + "total_update_fnorm": 0.5245451331138611, + "total_l1_linf_norm": 4258.34619140625, + "total_spectral_norm": 0.5245450735092163, + "embed_lm_head_update_fnorm": 0.3217172622680664, + "embed_lm_head_max_l1_linf_norm": 0.11167322099208832, + "embed_lm_head_max_spectral_norm": 0.08476372808218002, + "layer_1_update_fnorm": 0.056799083948135376, + "layer_1_max_l1_linf_norm": 0.15473006665706635, + "layer_1_max_spectral_norm": 0.003347964957356453, + "layer_2_update_fnorm": 0.0454864539206028, + "layer_2_max_l1_linf_norm": 0.1609925776720047, + "layer_2_max_spectral_norm": 0.0035769061651080847, + "layer_3_update_fnorm": 0.05756290256977081, + "layer_3_max_l1_linf_norm": 0.16674035787582397, + "layer_3_max_spectral_norm": 0.003716718405485153, + "layer_4_update_fnorm": 0.081295907497406, + "layer_4_max_l1_linf_norm": 0.17139476537704468, + "layer_4_max_spectral_norm": 0.004927706439048052, + "layer_5_update_fnorm": 0.10458546876907349, + "layer_5_max_l1_linf_norm": 0.146102637052536, + "layer_5_max_spectral_norm": 0.0052140397019684315, + "layer_6_update_fnorm": 0.12609438598155975, + "layer_6_max_l1_linf_norm": 0.13585984706878662, + "layer_6_max_spectral_norm": 0.0033418170642107725, + "layer_7_update_fnorm": 0.14522218704223633, + "layer_7_max_l1_linf_norm": 0.1235981285572052, + "layer_7_max_spectral_norm": 0.003017901675775647, + "layer_8_update_fnorm": 0.1469893902540207, + "layer_8_max_l1_linf_norm": 0.12612256407737732, + "layer_8_max_spectral_norm": 0.0030166374053806067, + "layer_9_update_fnorm": 0.14806479215621948, + "layer_9_max_l1_linf_norm": 0.1330423653125763, + "layer_9_max_spectral_norm": 0.00301821599714458, + "layer_10_update_fnorm": 0.1488383263349533, + "layer_10_max_l1_linf_norm": 0.1385517716407776, + "layer_10_max_spectral_norm": 0.0030693085864186287, + "layer_11_update_fnorm": 0.1427977979183197, + "layer_11_max_l1_linf_norm": 0.14281781017780304, + "layer_11_max_spectral_norm": 0.0031591090373694897, + "layer_12_update_fnorm": 0.1497577428817749, + "layer_12_max_l1_linf_norm": 0.13793030381202698, + "layer_12_max_spectral_norm": 0.0030768578872084618, + "block0_q_update_fnorm": 0.01904013566672802, + "block0_q_max_l1_linf_norm": 0.05172570422291756, + "block0_q_max_spectral_norm": 0.0030130574014037848, + "block0_k_update_fnorm": 0.021832115948200226, + "block0_k_max_l1_linf_norm": 0.05380196124315262, + "block0_k_max_spectral_norm": 0.0030118345748633146, + "block0_v_update_fnorm": 0.017926599830389023, + "block0_v_max_l1_linf_norm": 0.03684811294078827, + "block0_v_max_spectral_norm": 0.00301294750533998, + "block0_o_update_fnorm": 0.02891155146062374, + "block0_o_max_l1_linf_norm": 0.036686114966869354, + "block0_o_max_spectral_norm": 0.0030127745121717453, + "block0_mlp_win_update_fnorm": 0.024882374331355095, + "block0_mlp_win_max_l1_linf_norm": 0.04328750818967819, + "block0_mlp_win_max_spectral_norm": 0.0029942570254206657, + "block0_mlp_wout_update_fnorm": 0.02427060902118683, + "block0_mlp_wout_max_l1_linf_norm": 0.039837684482336044, + "block0_mlp_wout_max_spectral_norm": 0.003010963089764118, + "block3_q_update_fnorm": 0.006608208175748587, + "block3_q_max_l1_linf_norm": 0.018854599446058273, + "block3_q_max_spectral_norm": 0.002685679355636239, + "block3_k_update_fnorm": 0.009190358221530914, + "block3_k_max_l1_linf_norm": 0.024680623784661293, + "block3_k_max_spectral_norm": 0.0028279332909733057, + "block3_v_update_fnorm": 0.022045183926820755, + "block3_v_max_l1_linf_norm": 0.03033960610628128, + "block3_v_max_spectral_norm": 0.00301206368021667, + "block3_o_update_fnorm": 0.02910931035876274, + "block3_o_max_l1_linf_norm": 0.02516327053308487, + "block3_o_max_spectral_norm": 0.003012832487002015, + "block3_mlp_win_update_fnorm": 0.040775179862976074, + "block3_mlp_win_max_l1_linf_norm": 0.04813140258193016, + "block3_mlp_win_max_spectral_norm": 0.003012773348018527, + "block3_mlp_wout_update_fnorm": 0.058436423540115356, + "block3_mlp_wout_max_l1_linf_norm": 0.09360004216432571, + "block3_mlp_wout_max_spectral_norm": 0.003013609442859888, + "block7_q_update_fnorm": 0.05856752768158913, + "block7_q_max_l1_linf_norm": 0.05183789134025574, + "block7_q_max_spectral_norm": 0.0030164101626724005, + "block7_k_update_fnorm": 0.06150934845209122, + "block7_k_max_l1_linf_norm": 0.05259689688682556, + "block7_k_max_spectral_norm": 0.0030166374053806067, + "block7_v_update_fnorm": 0.045980680733919144, + "block7_v_max_l1_linf_norm": 0.051576077938079834, + "block7_v_max_spectral_norm": 0.003013545647263527, + "block7_o_update_fnorm": 0.06201951205730438, + "block7_o_max_l1_linf_norm": 0.05209812521934509, + "block7_o_max_spectral_norm": 0.003016541711986065, + "block7_mlp_win_update_fnorm": 0.06918112933635712, + "block7_mlp_win_max_l1_linf_norm": 0.03987476974725723, + "block7_mlp_win_max_spectral_norm": 0.003015299793332815, + "block7_mlp_wout_update_fnorm": 0.06023475155234337, + "block7_mlp_wout_max_l1_linf_norm": 0.09954158216714859, + "block7_mlp_wout_max_spectral_norm": 0.002855523256585002, + "block11_q_update_fnorm": 0.06199973076581955, + "block11_q_max_l1_linf_norm": 0.05372248589992523, + "block11_q_max_spectral_norm": 0.0030168097000569105, + "block11_k_update_fnorm": 0.062472596764564514, + "block11_k_max_l1_linf_norm": 0.053195610642433167, + "block11_k_max_spectral_norm": 0.003015776164829731, + "block11_v_update_fnorm": 0.06074710935354233, + "block11_v_max_l1_linf_norm": 0.051355134695768356, + "block11_v_max_spectral_norm": 0.003015755442902446, + "block11_o_update_fnorm": 0.06214576214551926, + "block11_o_max_l1_linf_norm": 0.05199788883328438, + "block11_o_max_spectral_norm": 0.0030154709238559008, + "block11_mlp_win_update_fnorm": 0.05876392871141434, + "block11_mlp_win_max_l1_linf_norm": 0.042376380413770676, + "block11_mlp_win_max_spectral_norm": 0.002843452850356698, + "block11_mlp_wout_update_fnorm": 0.06045972928404808, + "block11_mlp_wout_max_l1_linf_norm": 0.10720904916524887, + "block11_mlp_wout_max_spectral_norm": 0.0030135579872876406, + "total_sharpness": -0.03922087699174881, + "block_total_sharpness": -0.03483561426401138, + "v_norm_block": 0.4143012464046478, + "v_T_H_v_block": -0.005979377310723066, + "v_norm": 0.5245451331138611, + "ip_v_neg_g_hvp": 0.008640144020318985, + "cos_v_neg_g_hvp": 0.015339899808168411, + "g_hvp_norm": 1.0737806558609009, + "ip_v_neg_g_t": 0.015456978231668472, + "cos_v_neg_g_t": 0.011220589280128479, + "g_t_norm": 2.6261894702911377, + "g_norm": 1.0737806558609009, + "hv_norm": 10.95451831817627, + "cos_v_hv": -0.0018780489917844534, + "hg_norm": 173454.3125, + "cos_g_hg": -0.12037073820829391, + "v_parallel_norm": 0.000924820953514427, + "v_perp_norm": 0.524544358253479, + "embed_lm_head_v_norm": 0.3217172622680664, + "embed_lm_head_cos_v_neg_g": 0.01682109571993351, + "layer_1_v_norm": 0.056799083948135376, + "layer_1_cos_v_neg_g": -0.02519378624856472, + "layer_2_v_norm": 0.0454864539206028, + "layer_2_cos_v_neg_g": -0.014097793959081173, + "layer_3_v_norm": 0.05756290256977081, + "layer_3_cos_v_neg_g": 0.033794838935136795, + "layer_4_v_norm": 0.081295907497406, + "layer_4_cos_v_neg_g": 0.04226517304778099, + "layer_5_v_norm": 0.10458546876907349, + "layer_5_cos_v_neg_g": 0.03057996556162834, + "layer_6_v_norm": 0.12609438598155975, + "layer_6_cos_v_neg_g": 0.025632480159401894, + "layer_7_v_norm": 0.14522218704223633, + "layer_7_cos_v_neg_g": 0.023446915671229362, + "layer_8_v_norm": 0.1469893902540207, + "layer_8_cos_v_neg_g": 0.022990530356764793, + "layer_9_v_norm": 0.14806479215621948, + "layer_9_cos_v_neg_g": 0.026297500357031822, + "layer_10_v_norm": 0.1488383263349533, + "layer_10_cos_v_neg_g": 0.028035638853907585, + "layer_11_v_norm": 0.1427977979183197, + "layer_11_cos_v_neg_g": 0.038467105478048325, + "layer_12_v_norm": 0.1497577428817749, + "layer_12_cos_v_neg_g": 0.07153300940990448, + "block0_q_v_norm": 0.01904013566672802, + "block0_q_cos_v_neg_g": -0.01705254055559635, + "block0_k_v_norm": 0.021832115948200226, + "block0_k_cos_v_neg_g": 0.0004676705866586417, + "block0_v_v_norm": 0.017926599830389023, + "block0_v_cos_v_neg_g": -0.08104493468999863, + "block0_o_v_norm": 0.02891155146062374, + "block0_o_cos_v_neg_g": 0.004458756186068058, + "block0_mlp_win_v_norm": 0.024882374331355095, + "block0_mlp_win_cos_v_neg_g": -0.006319252774119377, + "block0_mlp_wout_v_norm": 0.02427060902118683, + "block0_mlp_wout_cos_v_neg_g": -0.02497810497879982, + "block3_q_v_norm": 0.006608208175748587, + "block3_q_cos_v_neg_g": -0.004985820036381483, + "block3_k_v_norm": 0.009190358221530914, + "block3_k_cos_v_neg_g": 0.10573392361402512, + "block3_v_v_norm": 0.022045183926820755, + "block3_v_cos_v_neg_g": 0.06030939519405365, + "block3_o_v_norm": 0.02910931035876274, + "block3_o_cos_v_neg_g": 0.04318522289395332, + "block3_mlp_win_v_norm": 0.040775179862976074, + "block3_mlp_win_cos_v_neg_g": 0.043477993458509445, + "block3_mlp_wout_v_norm": 0.058436423540115356, + "block3_mlp_wout_cos_v_neg_g": 0.052013278007507324, + "block7_q_v_norm": 0.05856752768158913, + "block7_q_cos_v_neg_g": 0.02670237421989441, + "block7_k_v_norm": 0.06150934845209122, + "block7_k_cos_v_neg_g": 0.06610368192195892, + "block7_v_v_norm": 0.045980680733919144, + "block7_v_cos_v_neg_g": 0.021685490384697914, + "block7_o_v_norm": 0.06201951205730438, + "block7_o_cos_v_neg_g": 0.07744912803173065, + "block7_mlp_win_v_norm": 0.06918112933635712, + "block7_mlp_win_cos_v_neg_g": 0.02820383943617344, + "block7_mlp_wout_v_norm": 0.06023475155234337, + "block7_mlp_wout_cos_v_neg_g": 0.11445057392120361, + "block11_q_v_norm": 0.06199973076581955, + "block11_q_cos_v_neg_g": 0.0716070681810379, + "block11_k_v_norm": 0.062472596764564514, + "block11_k_cos_v_neg_g": 0.10100886225700378, + "block11_v_v_norm": 0.06074710935354233, + "block11_v_cos_v_neg_g": 0.04316055029630661, + "block11_o_v_norm": 0.06214576214551926, + "block11_o_cos_v_neg_g": 0.08596464991569519, + "block11_mlp_win_v_norm": 0.05876392871141434, + "block11_mlp_win_cos_v_neg_g": 0.09251543879508972, + "block11_mlp_wout_v_norm": 0.06045972928404808, + "block11_mlp_wout_cos_v_neg_g": 0.0860455185174942, + "embed_lm_head_sharpness": -0.006130332127213478, + "layer_1_sharpness": -3.976931571960449, + "layer_2_sharpness": -0.46336063742637634, + "layer_3_sharpness": 0.07547177374362946, + "layer_4_sharpness": 0.024668073281645775, + "layer_5_sharpness": 0.01457066461443901, + "layer_6_sharpness": 0.004990678746253252, + "layer_7_sharpness": 0.002484080148860812, + "layer_8_sharpness": 0.002389239612966776, + "layer_9_sharpness": 0.001321416930295527, + "layer_10_sharpness": 0.0006396071403287351, + "layer_11_sharpness": 0.0006939032464288175, + "layer_12_sharpness": 0.0007562427781522274, + "block0_q_sharpness": -0.02076411247253418, + "block0_k_sharpness": -0.007622422184795141, + "block0_v_sharpness": -20.993606567382812, + "block0_o_sharpness": -0.15640132129192352, + "block0_mlp_win_sharpness": -0.07626879215240479, + "block0_mlp_wout_sharpness": -0.001668210024945438, + "block3_q_sharpness": -0.0005820826627314091, + "block3_k_sharpness": 0.027858225628733635, + "block3_v_sharpness": 0.020648231729865074, + "block3_o_sharpness": 0.010385089553892612, + "block3_mlp_win_sharpness": 0.01584775187075138, + "block3_mlp_wout_sharpness": 0.002840842353180051, + "block7_q_sharpness": 0.00016951124416664243, + "block7_k_sharpness": 8.410122973145917e-05, + "block7_v_sharpness": 0.007156344596296549, + "block7_o_sharpness": 8.337210601894185e-05, + "block7_mlp_win_sharpness": 0.0013473894214257598, + "block7_mlp_wout_sharpness": 0.00011723364877980202, + "block11_q_sharpness": 0.00016262302233371884, + "block11_k_sharpness": 0.00011131598148494959, + "block11_v_sharpness": 0.00015542418987024575, + "block11_o_sharpness": 4.3076553993159905e-05, + "block11_mlp_win_sharpness": 0.00035148661118000746, + "block11_mlp_wout_sharpness": 0.0016055937157943845, + "sum_layer_numerators": -0.012958731529366874, + "block_diag_sharpness": -0.07549705532690548, + "cross_layer_sharpness": 0.0406614410628941 +} \ No newline at end of file diff --git a/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/training_log.txt b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/training_log.txt new file mode 100644 index 0000000000000000000000000000000000000000..01e53fc509a2a204bcf85b3e04bc0bb2f3365980 --- /dev/null +++ b/layer_wise_new_code_rand/opt_muon_alr_0.001_mlr_0.01_seed_45_0e58633d-c658-4b9d-ba8a-a8f1876052d5/training_log.txt @@ -0,0 +1,11788 @@ +""" +Reference code for GPT-2 training and inference with Sharpness Analysis. +Will save the model weights into files, to be read from C as initialization. + +References: +1) the official GPT-2 TensorFlow implementation released by OpenAI: +https://github.com/openai/gpt-2/blob/master/src/model.py +2) huggingface/transformers PyTorch implementation: +https://github.com/huggingface/transformers/blob/main/src/transformers/models/gpt2/modeling_gpt2.py + +Example launches to only benchmark the speed of bfloat16 compiled GPU training: +1 GPU: +python train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +you can also turn on flash-attention by appending --flash=1 +4 GPU: +torchrun --standalone --nproc_per_node=4 train_gpt2.py --write_tensors=0 --num_iterations=50 --sequence_length=1024 --compile=1 --tensorcores=1 --dtype=bfloat16 +""" +import sys +with open(sys.argv[0]) as f: + code = f.read() # read the code of this file ASAP, for logging + +import os +import math +import glob +import struct +import inspect +from contextlib import nullcontext +from dataclasses import dataclass +import random + +import numpy as np +import torch +from torch import Tensor +import torch.nn as nn +from torch.nn import functional as F +import torch._inductor.config as config +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.distributed import init_process_group, destroy_process_group +from torch.distributed.optim import ZeroRedundancyOptimizer +import torch.distributed as dist +from torch.amp import autocast +import copy +import gc +import uuid +import json +from pathlib import Path + +# Import Muon optimizer +import sys +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/optimizers") +from MUON_fix import Muon + +# Import GPT model +sys.path.append("/home/aiops/zhangfz/MUON_sharpness/modded-nanogpt/models") +import nano_GPT_qkvonorm_pure +from nano_GPT_qkvonorm_pure import GPT, GPTConfig + +# Import debug utilities +# from debug_utils import setup_debugpy + +# ----------------------------------------------------------------------------- +# Our own simple Distributed Data Loader + +def _peek_data_shard(filename): + # only reads the header, returns header data + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + if header[0] != 20240520: + print("ERROR: magic number mismatch in the data .bin file!") + print("---> HINT: Are you passing in a correct file with --input_bin?") + print("---> HINT: Dataset encoding changed recently, re-run data prepro or refer again to README") + print("---> HINT: For example re-run: `python dev/data/tinyshakespeare.py`, then re-try") + exit(1) + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + return ntok # for now just return the number of tokens + +def _load_data_shard(filename): + with open(filename, "rb") as f: + # first read the header, which is 256 int32 integers (4 bytes each) + header = np.frombuffer(f.read(256*4), dtype=np.int32) + assert header[0] == 20240520, "magic number mismatch in the data .bin file" + assert header[1] == 1, "unsupported version" + ntok = header[2] # number of tokens (claimed) + # the rest of it are tokens, stored as uint16 + tokens = np.frombuffer(f.read(), dtype=np.uint16) + assert len(tokens) == ntok, "number of tokens read does not match header?" + return tokens + +class DistributedDataLoader: + def __init__(self, filename_pattern, B, T, process_rank, num_processes, + shuffle_files=False, random_seed=None): + self.process_rank = process_rank + self.num_processes = num_processes + self.B = B + self.T = T + self.shuffle_files = shuffle_files + self.random_seed = random_seed + self._rng = random.Random(random_seed) if shuffle_files and random_seed is not None else None + + # glob files that match the pattern + self.files = sorted(glob.glob(filename_pattern)) + assert len(self.files) > 0, f"did not find any files that match the pattern {filename_pattern}" + if self.shuffle_files: + self._shuffle_files() + + # load and validate all data shards, count number of tokens in total + ntok_total = 0 + for fname in self.files: + shard_ntok = _peek_data_shard(fname) + assert shard_ntok >= num_processes * B * T + 1 + ntok_total += shard_ntok + self.ntok_total = ntok_total + print0(f"DataLoader: total number of tokens: {ntok_total:,} across {len(self.files)} files") + + # kick things off + self.current_shard = None + self.reset() + + def reset(self): + # we're being a bit clever here: if we already had shard 0 loaded, + # then don't do the work to reload it, just reset the pointer + if self.current_shard != 0: + self.current_shard = 0 + self.tokens = _load_data_shard(self.files[self.current_shard]) + self.current_position = self.process_rank * self.B * self.T + + def advance(self): # advance to next data shard + next_shard = (self.current_shard + 1) % len(self.files) + if next_shard == 0 and self.shuffle_files: + self._shuffle_files() + self.current_shard = next_shard + self.current_position = self.process_rank * self.B * self.T + self.tokens = _load_data_shard(self.files[self.current_shard]) + + def next_batch(self): + B = self.B + T = self.T + buf = self.tokens[self.current_position : self.current_position+B*T+1] + buf = torch.tensor(buf.astype(np.int32), dtype=torch.long) + x = (buf[:-1]).view(B, T) # inputs + y = (buf[1:]).view(B, T) # targets + # advance the start pointer in current shard + self.current_position += B * T * self.num_processes + # if loading the next batch would be out of bounds advance the shard + if self.current_position + (B * T * self.num_processes + 1) > len(self.tokens): + self.advance() + return x, y + + def _shuffle_files(self): + if self._rng is not None: + self._rng.shuffle(self.files) + else: + random.shuffle(self.files) + +# ----------------------------------------------------------------------------- +# Python -> C bridge utilities for saving params/grads/activations to .bin files + +def write_fp32(tensor, file): + t = tensor.detach().cpu().to(torch.float32) + b = t.numpy().tobytes() + file.write(b) + +def write_bf16(tensor, file): + t = tensor.detach().cpu().to(torch.bfloat16) + # numpy doesn't have bf16 datatype so we have to trick it + t = t.view(torch.int16) # trick: reinterpret as int16 + b = t.numpy().tobytes() + file.write(b) + +def write_tensors(model_tensors, L, file, dtype): + # writes the GPT-2 model's weights to a binary file + assert dtype in {"float32", "bfloat16"} + write_fun = write_fp32 if dtype == "float32" else write_bf16 + write_fun(model_tensors["transformer.wte.weight"], file) # (V, C) + write_fun(model_tensors["transformer.wpe.weight"], file) # (T, C) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_1.bias"], file) + for i in range(L): # (L, 3C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.weight"], file) + for i in range(L): # (L, 3C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_attn.bias"], file) + for i in range(L): # (L, C, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.attn.c_proj.bias"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.ln_2.bias"], file) + for i in range(L): # (L, 4C, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.weight"], file) + for i in range(L): # (L, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_fc.bias"], file) + for i in range(L): # (L, C, 4C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.weight"], file) + for i in range(L): # (L, C) + write_fun(model_tensors[f"transformer.h.{i}.mlp.c_proj.bias"], file) + write_fun(model_tensors["transformer.ln_f.weight"], file) # (C, ) + write_fun(model_tensors["transformer.ln_f.bias"], file) # (C, ) + +@torch.no_grad() +def pad_vocab(tensor, multiple=128, value=0): + """ + The dimension of the vocab size in GPT-2 is 50,257 + which is unfortunately a very unfriendly number for a lot of + matrix operations on the GPU. So we pad it to the nearest + friendlier multiple, e.g. 50,304 if multiple=128 when we + export the weights into C land. This is a NOOP algorithmically + and is only done to make the tensor operations more efficient. + """ + assert tensor.ndim == 2 + V, C = tensor.shape + assert V == 50257, "just being defensive here" + # calculate padded vocab size by rounding up to nearest multiple + Vp = ((V + multiple - 1) // multiple) * multiple + # pad the tensor + pad_rows = Vp - V + padded = tensor if pad_rows == 0 else F.pad(tensor, (0, 0, 0, pad_rows), value=value) + assert padded.shape == (Vp, C) + return padded + +def write_model(model, filename, dtype): + # everything we need to instantiate the model + # 1) header is: version int, GPTConfig ints, padding to 1024 bytes + assert dtype in {"float32", "bfloat16"} # float16 todo maybe later + version = { + "float32": 3, # 3: all tensors are fp32, padded vocab + "bfloat16": 5, # 5: all tensors are bf16, padded vocab + }[dtype] + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240326 # magic + header[1] = version # checkpoint version + header[2] = model.config.block_size + header[3] = model.config.vocab_size + header[4] = model.config.n_layer + header[5] = model.config.n_head + header[6] = model.config.n_embd + # 2) the parameters follow the header + params = {name: param.cpu() for name, param in model.named_parameters()} + # pad the vocab to a multiple of 128 here at export, for efficiency in C + wte = params["transformer.wte.weight"] # (V, C) + wte_padded = pad_vocab(wte) # (Vp, C) + params["transformer.wte.weight"] = wte_padded # (Vp, C) + print(f"padded vocab size from {wte.size(0)} to {wte_padded.size(0)}") + header[7] = wte_padded.size(0) # padded vocab size store in header + # now write to file + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) # header + write_tensors(params, model.config.n_layer, file, dtype) # params + print(f"wrote {filename}") + +def write_state(model, x, y, logits, loss, filename): + # the state is used for debugging. + # it contains information about the input, logits, loss, and the parameter gradients + # this can be used for checking the computation correctness in C + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240327 # magic + header[1] = 2 # run state version = 2 (1 -> 2 for padded vocab changes) + header[2] = x.size(0) # batch size of the batch, B + header[3] = x.size(1) # temporal extent of the batch, T + grads = {name: param.grad.cpu() for name, param in model.named_parameters()} + # pad the vocab grads here as well, to mirror write_model + wte_grad = grads["transformer.wte.weight"] # (V, C) + wte_grad_padded = pad_vocab(wte_grad, value=0) # (Vp, C) # TODO later maybe pad with nan? + grads["transformer.wte.weight"] = wte_grad_padded # (Vp, C) + print(f"padded vocab size in reference grads from {wte_grad.size(0)} to {wte_grad_padded.size(0)}") + with open(filename, "wb") as file: + # header + file.write(header.numpy().tobytes()) + # input x + file.write(x.cpu().numpy().astype("int32").tobytes()) # (B, T) + # targets y + file.write(y.cpu().numpy().astype("int32").tobytes()) # (B, T) + # logits (result of the model forward pass) + write_fp32(logits.cpu(), file) + # loss (single float, result of the cross entropy loss) + write_fp32(loss.cpu(), file) + # gradients + write_tensors(grads, model.config.n_layer, file, "float32") + print(f"wrote {filename}") + +def write_tokenizer(enc, filename): + n = enc.max_token_value + 1 + header = torch.zeros(256, dtype=torch.int32) + header[0] = 20240328 # magic + header[1] = 2 # tokenizer version = 2 (1 -> 2: includes EOT token) + header[2] = n # number of tokens + header[3] = enc.eot_token # EOT token + with open(filename, "wb") as file: + file.write(header.numpy().tobytes()) + for i in range(n): + b = enc.decode_bytes([i]) + length = len(b) + assert length < 256, f"Token length exceeds 255: {length}" + file.write(struct.pack(" 0]) + + if all_updates_flat.numel() > 0: + total_l1_linf_norm = torch.sum(torch.abs(all_updates_flat)) + analysis_results["total_l1_linf_norm"] = total_l1_linf_norm.item() + + total_spectral_norm = torch.norm(all_updates_flat, p=2) + analysis_results["total_spectral_norm"] = total_spectral_norm.item() + else: + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + del all_updates_flat + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating total norms: {e}") + analysis_results["total_l1_linf_norm"] = 0.0 + analysis_results["total_spectral_norm"] = 0.0 + + # --- 3. Setup layer parameter groups (adapt to new model structure) --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up layer parameter groups...") + + all_param_groups = {} + + + all_param_groups["embed_lm_head"] = list(model.lm_head.parameters()) + + blocks = model.transformer.h + + for i, block in enumerate(blocks): + layer_name = f"layer_{i+1}" + all_param_groups[layer_name] = list(block.parameters()) + + # Add fine-grained params for selected layers (0, 3, 7, 11) + selected_layers = [0, 3, 7, 11] + for layer_idx in selected_layers: + block = blocks[layer_idx] + prefix = f"block{layer_idx}" + # Attention: Q, K, V, O + all_param_groups[f"{prefix}_q"] = [block.attn.q_w.weight] + all_param_groups[f"{prefix}_k"] = [block.attn.k_w.weight] + all_param_groups[f"{prefix}_v"] = [block.attn.v_w.weight] + all_param_groups[f"{prefix}_o"] = [block.attn.c_proj.weight] + # MLP: c_fc (win) and c_proj (wout) + all_param_groups[f"{prefix}_mlp_win"] = [block.mlp.c_fc.weight] + all_param_groups[f"{prefix}_mlp_wout"] = [block.mlp.c_proj.weight] + + # --- 4. Calculate layer-wise update norms --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise update norms...") + + param_to_idx = {id(p): i for i, p in enumerate(model.parameters())} + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + # Get indices for this group + indices = [param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx] + if not indices: + continue + + # Calculate Frobenius norm for this group + group_update_norm_sq = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + dist.all_reduce(group_update_norm_sq, op=dist.ReduceOp.AVG) + analysis_results[f"{group_name}_update_fnorm"] = torch.sqrt(group_update_norm_sq).item() + + # Calculate Max-of-Max and Spectral norms for this group + group_l1_linf_norms = [] + group_spectral_norms = [] + + for i in indices: + if i < len(update_direction_v) and update_direction_v[i].numel() > 0: + try: + l1_linf_norm = calculate_l1_to_linf_norm(update_direction_v[i]) + group_l1_linf_norms.append(l1_linf_norm.item()) + + spectral_norm = calculate_spectral_norm(update_direction_v[i]) + group_spectral_norms.append(spectral_norm.item()) + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error calculating norms for group {group_name}, param {i}: {e}") + group_l1_linf_norms.append(0.0) + group_spectral_norms.append(0.0) + + if group_l1_linf_norms: + analysis_results[f"{group_name}_max_l1_linf_norm"] = max(group_l1_linf_norms) + else: + analysis_results[f"{group_name}_max_l1_linf_norm"] = 0.0 + + if group_spectral_norms: + analysis_results[f"{group_name}_max_spectral_norm"] = max(group_spectral_norms) + else: + analysis_results[f"{group_name}_max_spectral_norm"] = 0.0 + + # --- 5. Setup for HVP calculation on TRAIN data --- + print0(f"[Enhanced Sharpness @ Step {step}] Setting up HVP calculation in {ptdtype} on TRAIN data...") + + original_flash = nano_GPT_qkvonorm_pure.FLASH + nano_GPT_qkvonorm_pure.FLASH = 0 + print0(f"[Enhanced Sharpness @ Step {step}] Disabled FLASH attention for HVP (was {original_flash})") + + # Get block parameter indices for cross-layer analysis (need this before loop) + block_param_indices = set() + for group_name, param_group in all_param_groups.items(): + if group_name.startswith("layer_"): + for p in param_group: + if id(p) in param_to_idx: + block_param_indices.add(param_to_idx[id(p)]) + + # Initialize accumulators for all quantities we need + grads_hvp = None + hvp_v_total = None + hvp_v_block = None + hvp_g_accum = None + layer_hvp_accum = {} + + + group_names_to_process = [gn for gn, pg in all_param_groups.items() + if pg and any(id(p) in param_to_idx for p in pg)] + + if last_training_batches is not None and len(last_training_batches) > 0: + + batch_iterator = [(x, y) for x, y in last_training_batches] + n_batches = len(batch_iterator) + print0(f"[Enhanced Sharpness @ Step {step}] Using {n_batches} microbatches for HVP (out of {grad_accum_steps} training microbatches)") + restore_loader = False + else: + # Fallback: use new batches from train_loader (should rarely happen) + print0(f"[Enhanced Sharpness @ Step {step}] WARNING: last_training_batches is None/empty, using {grad_accum_steps} new batches (inconsistent)") + saved_current_shard = train_loader.current_shard + saved_current_position = train_loader.current_position + n_batches = grad_accum_steps # Use same number as training for consistency + batch_iterator = [] + shard_was_changed = False + for _ in range(n_batches): + x_hvp, y_hvp = train_loader.next_batch() + batch_iterator.append((x_hvp, y_hvp)) + shard_was_changed = shard_was_changed or (train_loader.current_shard != saved_current_shard) + restore_loader = True + + + print0(f"[Enhanced Sharpness @ Step {step}] Computing HVPs for {n_batches} microbatches") + for mb_idx, (x_hvp, y_hvp) in enumerate(batch_iterator): + x_hvp, y_hvp = x_hvp.to(device), y_hvp.to(device) + + + _, loss_mb = model(x_hvp, y_hvp, return_logits=False) + grads_mb = torch.autograd.grad(loss_mb, model.parameters(), create_graph=True, allow_unused=True) + + # Compute H·v (total sharpness) + v_dot_g_total = sum(torch.sum(g * v) for g, v in zip(grads_mb, update_direction_v) if g is not None) + + if not isinstance(v_dot_g_total, torch.Tensor): + v_dot_g_total = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_total_mb = torch.autograd.grad(v_dot_g_total, model.parameters(), retain_graph=True, allow_unused=True) + + # Compute H·v_block (block-only sharpness) + if block_param_indices: + v_dot_g_block = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in block_param_indices if grads_mb[i] is not None) + if not isinstance(v_dot_g_block, torch.Tensor): + v_dot_g_block = torch.tensor(0.0, device=device, requires_grad=True) + hvp_v_block_mb = torch.autograd.grad(v_dot_g_block, model.parameters(), retain_graph=True, allow_unused=True) + else: + + hvp_v_block_mb = [None] * len(list(model.parameters())) + + + g_dot_g = sum(torch.sum(g * g) for g in grads_mb if g is not None) + if not isinstance(g_dot_g, torch.Tensor): + g_dot_g = torch.tensor(0.0, device=device, requires_grad=True) + + + hvp_g_mb_raw = torch.autograd.grad(g_dot_g, model.parameters(), + retain_graph=True, allow_unused=True) + hvp_g_mb = [h / 2.0 if h is not None else None for h in hvp_g_mb_raw] + + # Compute per-layer H_kk·v_k (for layer-wise sharpness) + for group_idx, group_name in enumerate(group_names_to_process): + param_group = all_param_groups[group_name] + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + is_last_layer = (group_idx == len(group_names_to_process) - 1) + is_last_microbatch = (mb_idx == n_batches - 1) + need_retain = not (is_last_layer and is_last_microbatch) + + try: + v_dot_g_layer = sum(torch.sum(grads_mb[i] * update_direction_v[i]) + for i in indices if grads_mb[i] is not None) + + if not isinstance(v_dot_g_layer, torch.Tensor): + v_dot_g_layer = torch.tensor(0.0, device=device, requires_grad=True) + + hvp_layer_mb = torch.autograd.grad(v_dot_g_layer, model.parameters(), + retain_graph=need_retain, + allow_unused=True) + + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_layer_mb] + else: + layer_hvp_accum[group_name] = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + ] + + # Accumulate layer HVP + # if group_name not in layer_hvp_accum: + # layer_hvp_accum[group_name] = [h.detach() / n_batches if h is not None else None for h in hvp_layer_mb] + # else: + # layer_hvp_accum[group_name] = [ + # (h_acc + h.detach() / n_batches) if (h is not None and h_acc is not None) + # else (h.detach() / n_batches if h is not None else h_acc) + # for h_acc, h in zip(layer_hvp_accum[group_name], hvp_layer_mb) + # ] + # del hvp_layer_mb, v_dot_g_layer + # torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error computing layer HVP for '{group_name}' in microbatch {mb_idx}: {e}") + if group_name not in layer_hvp_accum: + layer_hvp_accum[group_name] = None + + # 6. Accumulate all quantities + if grads_hvp is None: + grads_hvp = [(g.detach() / n_batches).cpu() if g is not None else None for g in grads_mb] + hvp_v_total = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_total_mb] + hvp_v_block = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_v_block_mb] + hvp_g_accum = [(h.detach() / n_batches).cpu() if h is not None else None for h in hvp_g_mb] + else: + grads_hvp = [ + (g_acc + (g.detach() / n_batches).cpu()) if (g is not None and g_acc is not None) + else ((g.detach() / n_batches).cpu() if g is not None else g_acc) + for g_acc, g in zip(grads_hvp, grads_mb) + ] + hvp_v_total = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_total, hvp_v_total_mb) + ] + hvp_v_block = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_v_block, hvp_v_block_mb) + ] + hvp_g_accum = [ + (h_acc + (h.detach() / n_batches).cpu()) if (h is not None and h_acc is not None) + else ((h.detach() / n_batches).cpu() if h is not None else h_acc) + for h_acc, h in zip(hvp_g_accum, hvp_g_mb) + ] + + + + if mb_idx % max(1, n_batches // 4) == 0: + print0(f"[Enhanced Sharpness @ Step {step}] Processed microbatch {mb_idx + 1}/{n_batches}") + + + if restore_loader: + train_loader.current_shard = saved_current_shard + train_loader.current_position = saved_current_position + if shard_was_changed: + train_loader.tokens = _load_data_shard(train_loader.files[train_loader.current_shard]) + + print0(f"[Enhanced Sharpness @ Step {step}] Finished computing all HVPs for {n_batches} microbatches") + grads_hvp = [g.to(device) if g is not None else None for g in grads_hvp] + hvp_v_total = [h.to(device) if h is not None else None for h in hvp_v_total] + hvp_v_block = [h.to(device) if h is not None else None for h in hvp_v_block] + hvp_g_accum = [h.to(device) if h is not None else None for h in hvp_g_accum] + for group_name in layer_hvp_accum: + if layer_hvp_accum[group_name] is not None: + layer_hvp_accum[group_name] = [h.to(device) if h is not None else None for h in layer_hvp_accum[group_name]] + # --- Calculate TOTAL sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating TOTAL sharpness...") + # hvp_v_total is already computed in the loop above + vhp_dot_v_total = sum(torch.sum(hvp * v) for hvp, v in zip(hvp_v_total, update_direction_v) if hvp is not None) + v_norm_sq_total = sum(torch.sum(v * v) for v in update_direction_v) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_total, torch.Tensor): + vhp_dot_v_total = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_total, torch.Tensor): + v_norm_sq_total = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_total, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_total, op=dist.ReduceOp.AVG) + + if v_norm_sq_total.item() > 1e-12: + analysis_results["total_sharpness"] = (vhp_dot_v_total / v_norm_sq_total).item() + else: + analysis_results["total_sharpness"] = 0.0 + + + print0(f"[Enhanced Sharpness @ Step {step}] Calculating BLOCK-ONLY total sharpness...") + # hvp_v_block is already computed in the loop above + if block_param_indices: # Only compute if there are block parameters + # Compute v_block^T H v_block (only sum over block indices) + vhp_dot_v_block = sum(torch.sum(hvp_v_block[i] * update_direction_v[i]) + for i in block_param_indices if hvp_v_block[i] is not None) + + v_norm_sq_block = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in block_param_indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_block, torch.Tensor): + vhp_dot_v_block = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_block, torch.Tensor): + v_norm_sq_block = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_block, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_block, op=dist.ReduceOp.AVG) + + if v_norm_sq_block.item() > 1e-12: + analysis_results["block_total_sharpness"] = (vhp_dot_v_block / v_norm_sq_block).item() + else: + analysis_results["block_total_sharpness"] = 0.0 + + analysis_results["v_norm_block"] = torch.sqrt(v_norm_sq_block).item() + analysis_results["v_T_H_v_block"] = vhp_dot_v_block.item() + else: + # No block parameters + analysis_results["block_total_sharpness"] = 0.0 + analysis_results["v_norm_block"] = 0.0 + analysis_results["v_T_H_v_block"] = 0.0 + + torch.cuda.empty_cache() + + # ---- Alignment metrics between update v and (negative) gradient g ---- + eps = 1e-12 + v_norm = torch.sqrt(v_norm_sq_total + eps) + analysis_results["v_norm"] = v_norm.item() + + # --- Version 1: g_hvp --- + ip_v_neg_g_hvp = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, grads_hvp) if g is not None) + g_hvp_norm_sq = sum(torch.sum(g * g) for g in grads_hvp if g is not None) + + if not isinstance(ip_v_neg_g_hvp, torch.Tensor): + ip_v_neg_g_hvp = torch.tensor(0.0, device=device) + if not isinstance(g_hvp_norm_sq, torch.Tensor): + g_hvp_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_v_neg_g_hvp, op=dist.ReduceOp.AVG) + dist.all_reduce(g_hvp_norm_sq, op=dist.ReduceOp.AVG) + g_hvp_norm = torch.sqrt(g_hvp_norm_sq + eps) + analysis_results["ip_v_neg_g_hvp"] = ip_v_neg_g_hvp.item() + analysis_results["cos_v_neg_g_hvp"] = (ip_v_neg_g_hvp / (v_norm * g_hvp_norm + eps)).item() + analysis_results["g_hvp_norm"] = g_hvp_norm.item() + + # --- Version 2: g_t (original gradient that produced v) --- + # last_training_gradient is the actual gradient from training that led to the update v + if last_training_gradient is not None: + ip_v_neg_g_t = sum(torch.sum(v * (-g)) for v, g in zip(update_direction_v, last_training_gradient) if g is not None) + g_t_norm_sq = sum(torch.sum(g * g) for g in last_training_gradient if g is not None) + dist.all_reduce(ip_v_neg_g_t, op=dist.ReduceOp.AVG) + dist.all_reduce(g_t_norm_sq, op=dist.ReduceOp.AVG) + g_t_norm = torch.sqrt(g_t_norm_sq + eps) + analysis_results["ip_v_neg_g_t"] = ip_v_neg_g_t.item() + analysis_results["cos_v_neg_g_t"] = (ip_v_neg_g_t / (v_norm * g_t_norm + eps)).item() + analysis_results["g_t_norm"] = g_t_norm.item() + else: + print0(f"[Enhanced Sharpness @ Step {step}] Warning: last_training_gradient is None, skipping g_t metrics") + + # Keep backward compatibility aliases (g_norm uses g_hvp for now) + g_norm_sq = g_hvp_norm_sq + g_norm = g_hvp_norm + analysis_results["g_norm"] = g_norm.item() + + # ---- Cosine between v and Hv (curvature pull along v) ---- + hv_norm_sq = sum(torch.sum(hvp * hvp) for hvp in hvp_v_total if hvp is not None) + if not isinstance(hv_norm_sq, torch.Tensor): + hv_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(hv_norm_sq, op=dist.ReduceOp.AVG) + hv_norm = torch.sqrt(hv_norm_sq + eps) + ip_v_hv = vhp_dot_v_total # already reduced AVG + analysis_results["hv_norm"] = hv_norm.item() + analysis_results["cos_v_hv"] = (ip_v_hv / (v_norm * hv_norm + eps)).item() + + # ---- Cosine between g and Hg ---- + # hvp_g_accum is already computed in the loop above + ip_g_hg = sum(torch.sum(g * hg) for g, hg in zip(grads_hvp, hvp_g_accum) if (g is not None and hg is not None)) + hg_norm_sq = sum(torch.sum(hg * hg) for hg in hvp_g_accum if hg is not None) + if not isinstance(ip_g_hg, torch.Tensor): + ip_g_hg = torch.tensor(0.0, device=device) + if not isinstance(hg_norm_sq, torch.Tensor): + hg_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(ip_g_hg, op=dist.ReduceOp.AVG) + dist.all_reduce(hg_norm_sq, op=dist.ReduceOp.AVG) + hg_norm = torch.sqrt(hg_norm_sq + eps) + analysis_results["hg_norm"] = hg_norm.item() + analysis_results["cos_g_hg"] = (ip_g_hg / (g_norm * hg_norm + eps)).item() if g_norm.item() > 0 else 0.0 + + # ---- Decompose v into parallel / perpendicular to -g ---- + if g_norm.item() > 0: + v_parallel = [(torch.sum(v * (-g)) / (g_norm_sq + eps)) * (-g) if g is not None else torch.zeros_like(v) + for v, g in zip(update_direction_v, grads_hvp)] + v_parallel_norm_sq = sum(torch.sum(vp * vp) for vp in v_parallel) + if not isinstance(v_parallel_norm_sq, torch.Tensor): + v_parallel_norm_sq = torch.tensor(0.0, device=device) + dist.all_reduce(v_parallel_norm_sq, op=dist.ReduceOp.AVG) + v_parallel_norm = torch.sqrt(v_parallel_norm_sq + eps) + v_perp_norm = torch.sqrt(torch.clamp(v_norm_sq_total - v_parallel_norm_sq, min=0.0) + eps) + analysis_results["v_parallel_norm"] = v_parallel_norm.item() + analysis_results["v_perp_norm"] = v_perp_norm.item() + + # ---- Per-layer additions: cos_v_neg_g_layer, v_norm_layer ---- + for group_name, param_group in all_param_groups.items(): + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + v_norm_sq_layer = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) for i in indices) + g_norm_sq_layer = sum(torch.sum(grads_hvp[i] * grads_hvp[i]) for i in indices if grads_hvp[i] is not None) + ip_v_neg_g_layer = sum(torch.sum(update_direction_v[i] * (-grads_hvp[i])) + for i in indices if grads_hvp[i] is not None) + # Ensure they are tensors + if not isinstance(v_norm_sq_layer, torch.Tensor): + v_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(g_norm_sq_layer, torch.Tensor): + g_norm_sq_layer = torch.tensor(0.0, device=device) + if not isinstance(ip_v_neg_g_layer, torch.Tensor): + ip_v_neg_g_layer = torch.tensor(0.0, device=device) + dist.all_reduce(v_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(g_norm_sq_layer, op=dist.ReduceOp.AVG) + dist.all_reduce(ip_v_neg_g_layer, op=dist.ReduceOp.AVG) + v_norm_layer = torch.sqrt(v_norm_sq_layer + eps) + g_norm_layer = torch.sqrt(g_norm_sq_layer + eps) + analysis_results[f"{group_name}_v_norm"] = v_norm_layer.item() + if g_norm_layer.item() > 0: + analysis_results[f"{group_name}_cos_v_neg_g"] = (ip_v_neg_g_layer / (v_norm_layer * g_norm_layer + eps)).item() + + # --- 7. Calculate layer-wise sharpness --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating layer-wise sharpness...") + print0(f"[Enhanced Sharpness @ Step {step}] Processing {len(all_param_groups)} layers for sharpness...") + + for group_name, param_group in all_param_groups.items(): + if not param_group: + continue + + print0(f"[Enhanced Sharpness @ Step {step}] Processing '{group_name}'...") + indices = {param_to_idx[id(p)] for p in param_group if id(p) in param_to_idx} + if not indices: + continue + + try: + if group_name not in layer_hvp_accum or layer_hvp_accum[group_name] is None: + print0(f"[Enhanced Sharpness @ Step {step}] No HVP data for '{group_name}', skipping") + analysis_results[f"{group_name}_sharpness"] = 0.0 + continue + + hvp_group_result = layer_hvp_accum[group_name] + + vhp_dot_v_group = sum(torch.sum(hvp_group_result[i] * update_direction_v[i]) + for i in indices if hvp_group_result[i] is not None) + v_norm_sq_group = sum(torch.sum(update_direction_v[i] * update_direction_v[i]) + for i in indices) + + # Ensure they are tensors + if not isinstance(vhp_dot_v_group, torch.Tensor): + vhp_dot_v_group = torch.tensor(0.0, device=device) + if not isinstance(v_norm_sq_group, torch.Tensor): + v_norm_sq_group = torch.tensor(0.0, device=device) + + dist.all_reduce(vhp_dot_v_group, op=dist.ReduceOp.AVG) + dist.all_reduce(v_norm_sq_group, op=dist.ReduceOp.AVG) + + if v_norm_sq_group.item() > 1e-12: + analysis_results[f"{group_name}_sharpness"] = (vhp_dot_v_group / v_norm_sq_group).item() + else: + analysis_results[f"{group_name}_sharpness"] = 0.0 + + except torch.OutOfMemoryError as e: + print0(f"[Enhanced Sharpness @ Step {step}] OOM error for '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + torch.cuda.empty_cache() + except Exception as e: + print0(f"[Enhanced Sharpness @ Step {step}] Error processing '{group_name}': {e}") + analysis_results[f"{group_name}_sharpness"] = 0.0 + + # --- Calculate block-diagonal approximation and cross-layer interaction --- + print0(f"[Enhanced Sharpness @ Step {step}] Calculating block-diagonal and cross-layer sharpness...") + + sum_layer_numerators = 0.0 + for layer in range(1, NUM_LAYERS + 1): + layer_name = f"layer_{layer}" + if f"{layer_name}_sharpness" in analysis_results and f"{layer_name}_v_norm" in analysis_results: + s_k = analysis_results[f"{layer_name}_sharpness"] + v_k_norm = analysis_results[f"{layer_name}_v_norm"] + sum_layer_numerators += s_k * (v_k_norm ** 2) + + analysis_results["sum_layer_numerators"] = sum_layer_numerators + + # Block-diagonal sharpness (using block ||v||²) + v_norm_block = analysis_results.get("v_norm_block", 0) + v_norm_sq_block_val = v_norm_block ** 2 if v_norm_block else 1e-12 + + if v_norm_sq_block_val > 1e-12: + analysis_results["block_diag_sharpness"] = sum_layer_numerators / v_norm_sq_block_val + else: + analysis_results["block_diag_sharpness"] = 0.0 + + # Cross-layer interaction = block_total - block_diag + block_total = analysis_results.get("block_total_sharpness", 0) + block_diag = analysis_results.get("block_diag_sharpness", 0) + analysis_results["cross_layer_sharpness"] = block_total - block_diag + + print0(f"[Enhanced Sharpness @ Step {step}] block_total={block_total:.6f}, block_diag={block_diag:.6f}, cross_layer={block_total - block_diag:.6f}") + + # --- 8. Cleanup --- + nano_GPT_qkvonorm_pure.FLASH = original_flash + print0(f"[Enhanced Sharpness @ Step {step}] Restored FLASH attention to {original_flash}") + + print0(f"[Enhanced Sharpness @ Step {step}] Restoring parameters back to θ_{{t+1}}...") + with torch.no_grad(): + for p, v in zip(model.parameters(), update_direction_v): + p.data.add_(v) + + if prev_training_mode: + model.train() + else: + model.eval() + + # Thorough cleanup of all temporary variables + del update_direction_v, grads_hvp + del hvp_v_total, hvp_v_block, hvp_g_accum, layer_hvp_accum + del vhp_dot_v_total, v_norm_sq_total + del vhp_dot_v_block, v_norm_sq_block + if 'all_param_groups' in locals(): + del all_param_groups + if 'param_to_idx' in locals(): + del param_to_idx + + # Synchronize CUDA operations before cleanup + if device == "cuda": + torch.cuda.synchronize() + + gc.collect() + torch.cuda.empty_cache() + + print0(f"[Enhanced Sharpness @ Step {step}] Analysis complete. Generated {len(analysis_results)} metrics.") + return analysis_results + +def format_comprehensive_results(results): + """ + Format the comprehensive analysis results for logging. + """ + log_parts = [] + + # Total sharpness + if 'total_sharpness' in results: + log_parts.append(f"total_sharp:{results['total_sharpness']:.4e}") + + # Layer-wise sharpness - dynamically detect number of layers + layer_sharpness = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_sharpness" + if layer_key in results: + layer_sharpness.append(f"L{layer_num}_sharp:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_sharpness: + log_parts.append(" ".join(layer_sharpness)) + + # Total update norms + total_norms = [] + if 'total_update_fnorm' in results: + total_norms.append(f"total_fnorm:{results['total_update_fnorm']:.4e}") + if 'total_l1_linf_norm' in results: + total_norms.append(f"total_l1_linf:{results['total_l1_linf_norm']:.4e}") + if 'total_spectral_norm' in results: + total_norms.append(f"total_spectral:{results['total_spectral_norm']:.4e}") + + if total_norms: + log_parts.append(" ".join(total_norms)) + + # Layer-wise update norms (Frobenius) + layer_fnorms = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_update_fnorm" + if layer_key in results: + layer_fnorms.append(f"L{layer_num}_fnorm:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_fnorms: + log_parts.append(" ".join(layer_fnorms)) + + # Layer-wise update norms (Max-of-Max) + layer_l1_linf = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_l1_linf_norm" + if layer_key in results: + layer_l1_linf.append(f"L{layer_num}_l1linf:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_l1_linf: + log_parts.append(" ".join(layer_l1_linf)) + + # Layer-wise update norms (Spectral) + layer_spectral = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_max_spectral_norm" + if layer_key in results: + layer_spectral.append(f"L{layer_num}_spectral:{results[layer_key]:.4e}") + layer_num += 1 + else: + break + + if layer_spectral: + log_parts.append(" ".join(layer_spectral)) + + # Alignment and curvature metrics (global) + misc_parts = [] + if 'v_norm' in results: + misc_parts.append(f"v_norm:{results['v_norm']:.4e}") + + # Version 1: g_hvp (new batch, computed at θ_t during HVP calculation) + if 'cos_v_neg_g_hvp' in results: + misc_parts.append(f"cos_v_-g_hvp:{results['cos_v_neg_g_hvp']:.4e}") + if 'g_hvp_norm' in results: + misc_parts.append(f"g_hvp_norm:{results['g_hvp_norm']:.4e}") + + # Version 2: g_t (original gradient that produced v) + if 'cos_v_neg_g_t' in results: + misc_parts.append(f"cos_v_-g_t:{results['cos_v_neg_g_t']:.4e}") + if 'g_t_norm' in results: + misc_parts.append(f"g_t_norm:{results['g_t_norm']:.4e}") + + if 'hv_norm' in results: + misc_parts.append(f"hv_norm:{results['hv_norm']:.4e}") + if 'cos_v_hv' in results: + misc_parts.append(f"cos_v_hv:{results['cos_v_hv']:.4e}") + if 'hg_norm' in results: + misc_parts.append(f"hg_norm:{results['hg_norm']:.4e}") + if 'cos_g_hg' in results: + misc_parts.append(f"cos_g_hg:{results['cos_g_hg']:.4e}") + if 'v_parallel_norm' in results: + misc_parts.append(f"v_par:{results['v_parallel_norm']:.4e}") + if 'v_perp_norm' in results: + misc_parts.append(f"v_perp:{results['v_perp_norm']:.4e}") + if misc_parts: + log_parts.append(" ".join(misc_parts)) + + # Per-layer alignment metrics (cos_v_neg_g and v_norm per layer) + layer_cos = [] + layer_num = 1 + while True: + layer_key = f"layer_{layer_num}_cos_v_neg_g" + layer_vn_key = f"layer_{layer_num}_v_norm" + if layer_key in results: + layer_cos.append(f"L{layer_num}_cos_v_neg_g:{results[layer_key]:.4e}") + if layer_vn_key in results: + layer_cos.append(f"L{layer_num}_v_norm:{results[layer_vn_key]:.4e}") + if layer_key not in results and layer_vn_key not in results: + break + layer_num += 1 + if layer_cos: + log_parts.append(" ".join(layer_cos)) + + return " ".join(log_parts) + +# ----------------------------------------------------------------------------- +# int main + +def print0(*args, **kwargs): + # modified print that only prints from the master process + # if this is not a distributed run, it's just a print + if int(os.environ.get("RANK", 0)) == 0: + print(*args, **kwargs) + +if __name__ == "__main__": + import time + import argparse + import tiktoken + print0(f"Running pytorch {torch.version.__version__}") + + # default settings will overfit a tiny batch of data + # and save model weights and debug state to disk on the first iteration + parser = argparse.ArgumentParser() + # file system input / output + parser.add_argument("--input_bin", type=str, default="dev/data/tinyshakespeare/tiny_shakespeare_val.bin", help="input .bin to train on") + parser.add_argument("--input_val_bin", type=str, default="", help="input .bin to eval validation loss on") + parser.add_argument("--output_dir", type=str, default="", help="output directory to which to write logs and checkpoints") + parser.add_argument("--model", type=str, default="gpt2", help="gpt2|gpt2-medium|gpt2-large|gpt2-xl|d8|d12|d24|d36|d48") + # token layout for each step of the optimization + parser.add_argument("--batch_size", type=int, default=4, help="batch size, in units of #batch dimensions") + parser.add_argument("--sequence_length", type=int, default=64, help="sequence length") + parser.add_argument("--total_batch_size", type=int, default=256, help="total desired batch size, in units of #tokens") + # workload (number of steps) + parser.add_argument("--num_iterations", type=int, default=10, help="number of iterations to run") + parser.add_argument("--inference_only", type=int, default=0, help="only run inference") + # optimization + parser.add_argument("--adam_lr", type=float, default=1e-4, help="learning rate warmup iterations") + parser.add_argument("--warmup_iters", type=int, default=0, help="learning rate warmup iterations") + parser.add_argument("--lr_decay_frac", type=float, default=1.0, help="learning rate warmup iterations") + parser.add_argument("--weight_decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--grad_clip", type=float, default=1.0, help="maximum gradient magnitude") + # evaluation + parser.add_argument("--val_loss_every", type=int, default=0, help="every how mant steps to evaluate val loss?") + parser.add_argument("--val_max_steps", type=int, default=20, help="how many batches of val to average?") + parser.add_argument("--sample_every", type=int, default=0, help="how often to sample from the model?") + # debugging + parser.add_argument("--overfit_single_batch", type=int, default=0, help="overfit just one batch of data") + parser.add_argument("--shuffle_files", action="store_true") + # numerics + parser.add_argument("--tensorcores", type=int, default=0, help="use tensorcores") + # memory management + parser.add_argument("--device", type=str, default="", help="by default we autodetect, or set it here") + parser.add_argument("--compile", type=int, default=0, help="torch.compile the model") + parser.add_argument("--flash", type=int, default=0, help="use flash attention") + parser.add_argument("--dtype", type=str, default="float32", help="float32|float16|bfloat16") + parser.add_argument("--zero_stage", type=int, default=0, help="zero redundancy optimizer stage (0/1/2/3)") + # Muon optimizer specific arguments + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer to use: adam|muon") + parser.add_argument("--muon_lr", type=float, default=0.02, help="learning rate for Muon optimizer") + parser.add_argument("--muon_momentum", type=float, default=0.95, help="momentum for Muon optimizer") + parser.add_argument("--muon_weight_decay", type=float, default=0.00, help="weight decay for Muon optimizer") + parser.add_argument("--muon_ns_steps", type=int, default=5, help="number of Newton-Schulz steps for Muon") + parser.add_argument("--muon_nesterov", type=bool, default=False, help="use Nesterov momentum for Muon (0/1)") + # python -> C bridge + parser.add_argument("--write_tensors", type=int, default=1, help="write tensors to disk") + parser.add_argument("--seed", type=int, default=42, help="random seed") + # Sharpness analysis arguments + parser.add_argument("--analyze_sharpness", action="store_true", help="Enable comprehensive sharpness analysis") + parser.add_argument("--sharpness_analysis_interval", type=int, default=500, help="Interval for sharpness analysis") + args = parser.parse_args() + + # args error checking and convenience variables + B, T = args.batch_size, args.sequence_length + assert 1 <= T <= 1024 + assert args.dtype in {"float32", "float16", "bfloat16"} + assert args.model in {"gpt2", "gpt2-medium", "gpt2-large", "gpt2-xl", "d8", "d12", "d24", "d36", "d48"} + assert args.optimizer in {"adam", "muon"} + + set_seed(args.seed) + + # set up DDP (distributed data parallel). torchrun sets this env variable + ddp = int(os.environ.get('RANK', -1)) != -1 # is this a ddp run? + if ddp: + # use of DDP atm demands CUDA, we set the device appropriately according to rank + assert torch.cuda.is_available(), "for now i think we need CUDA for DDP" + init_process_group(backend='nccl') + ddp_rank = int(os.environ['RANK']) + ddp_local_rank = int(os.environ['LOCAL_RANK']) + ddp_world_size = int(os.environ['WORLD_SIZE']) + device = f'cuda:{ddp_local_rank}' + torch.cuda.set_device(device) + master_process = ddp_rank == 0 # this process will do logging, checkpointing etc. + seed_offset = 0 # each process gets the exact same seed + zero_stage = args.zero_stage + else: + ddp_rank = 0 + ddp_local_rank = 0 + zero_stage = 0 + ddp_world_size = 1 + master_process = True + seed_offset = 0 + # select the device + if args.device: + # provided explicitly by the user + device = args.device + else: + # attempt to autodetect the device + device = "cpu" + if torch.cuda.is_available(): + device = "cuda" + elif hasattr(torch.backends, "mps") and torch.backends.mps.is_available(): + device = "mps" + print(f"using device: {device}") + device_type = 'cuda' if 'cuda' in device else 'cpu' + + # Setup debugpy for remote debugging (only activates if DEBUGPY env var is set) + # setup_debugpy(rank=ddp_rank, force=True) + + # calculate gradient accumulation from the desired total batch size and the current run configuration + tokens_per_fwdbwd = B * T * ddp_world_size + assert args.total_batch_size % tokens_per_fwdbwd == 0 + grad_accum_steps = args.total_batch_size // tokens_per_fwdbwd + print0(f"total desired batch size: {args.total_batch_size}") + print0(f"=> calculated gradient accumulation steps: {grad_accum_steps}") + + # set up a context manager following the desired dtype and device + ptdtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + ctx = torch.amp.autocast(device_type=device_type, dtype=ptdtype) if device_type == "cuda" else nullcontext() + + # rng / reproducibility + torch.manual_seed(42) + if torch.cuda.is_available(): + torch.cuda.manual_seed(42) + + # set the torch precision mode to use TensorFloat32 (TF32) for matmuls + # docs https://pytorch.org/docs/stable/generated/torch.set_float32_matmul_precision.html + if args.tensorcores: + torch.set_float32_matmul_precision('high') + + # turn on/off flash attention + assert args.flash in {0, 1} + nano_GPT_qkvonorm_pure.FLASH = args.flash # Set module-level FLASH for training + + # init (and write) the tokenizer + enc = tiktoken.get_encoding("gpt2") + if master_process and args.write_tensors: # tokenizer is technically not tensors but ok + write_tokenizer(enc, "gpt2_tokenizer.bin") + + # init the model, either from scratch or from OpenAI pretrained checkpoint + if args.model[0] == "d": + # from scratch (random weights) + model_config = { + "d8": GPTConfig(block_size=1024, vocab_size=50257, n_layer=8, n_head=8, n_embd=512), + "d12": GPTConfig(block_size=1024, vocab_size=50257, n_layer=12, n_head=12, n_embd=768), + "d24": GPTConfig(block_size=1024, vocab_size=50257, n_layer=24, n_head=16, n_embd=1024), + "d36": GPTConfig(block_size=1024, vocab_size=50257, n_layer=36, n_head=20, n_embd=1280), + "d48": GPTConfig(block_size=1024, vocab_size=50257, n_layer=48, n_head=25, n_embd=1600), + }[args.model] + model = GPT(model_config) + else: + # load the GPT-2 model weights + model = GPT.from_pretrained(args.model) + model.train() + model.to(device) + + # Save uncompiled model reference for sharpness analysis (needs double backward) + raw_model_uncompiled = model + + if args.compile: + if hasattr(config, "coordinate_descent_tuning"): + config.coordinate_descent_tuning = True # suggested by @Chillee + print0("compiling the model...") + model = torch.compile(model) + + # ------------------------------------------------------------------------- + # Our own version of a simple DistributedDataLoader + + # load tokens + train_loader = DistributedDataLoader( + args.input_bin, B, T, ddp_rank, ddp_world_size, + shuffle_files=args.shuffle_files, random_seed=args.seed + ) + val_loader = None + if args.input_val_bin: + val_loader = DistributedDataLoader(args.input_val_bin, B, T, ddp_rank, ddp_world_size) + + # ------------------------------------------------------------------------- + # PyTorch -> C bridge: save some weights and state for C to load later as reference + + # do one forward pass to generate ground truth for our C tests + if master_process and args.write_tensors and (not args.inference_only): + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + logits, loss = model(x, y, return_logits=True) # Need logits for write_state + loss.backward() + # save model params, in both float32 and bfloat16 + model_to_size = {"gpt2": "124M", "gpt2-medium": "355M", "gpt2-large": "774M", "gpt2-xl": "1558M"} + model_to_size.update({f"d{d}": f"d{d}" for d in [12, 24, 36, 48]}) + model_size_str = model_to_size[args.model] # e.g. "124M", or "d12" + write_model(model, f"gpt2_{model_size_str}.bin", dtype="float32") + write_model(model, f"gpt2_{model_size_str}_bf16.bin", dtype="bfloat16") + # save x, y, logits, loss, and parameter gradients, for debugging C + # always store these in fp32 to have an accurate reference (?) + write_state(model, x, y, logits, loss, f"gpt2_{model_size_str}_debug_state.bin") + # reset the train_loader for the optimization below + train_loader.reset() + + # ------------------------------------------------------------------------- + # main training loop + + # here we wrap model into DDP container + if ddp: + model = DDP(model, device_ids=[ddp_local_rank]) + raw_model = model.module if ddp else model # always contains the "raw" unwrapped model + + base_module = model.module if ddp else model + # If compiled, unwrap to get the original module + if hasattr(base_module, "_orig_mod"): + base_module = base_module._orig_mod + + raw_params = list(raw_model_uncompiled.parameters()) + train_params = list(base_module.parameters()) + + assert len(raw_params) == len(train_params), \ + f"Parameter count mismatch: raw_model_uncompiled has {len(raw_params)}, training model has {len(train_params)}" + for i, (rp, tp) in enumerate(zip(raw_params, train_params)): + assert rp.data_ptr() == tp.data_ptr(), \ + f"Parameter {i} has different data_ptr: raw_model_uncompiled and training model do not share parameters!" + print0(f"[Verified] raw_model_uncompiled and training model share the same {len(raw_params)} Parameter objects") + + last_training_update = None + last_training_gradient = None # Store the original gradient that produced the update + last_training_batches = None # Store ALL microbatches (x, y) for consistent HVP calculation + + + def configure_adam(model, weight_decay, learning_rate, betas, device_type, zero_stage): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + # create optim groups. Any parameters that is 2D will be weight decayed, otherwise no. + # i.e. all weight tensors in matmuls + embeddings decay, all biases and layernorms don't. + decay_params = [p for n, p in param_dict.items() if p.dim() >= 2] + nodecay_params = [p for n, p in param_dict.items() if p.dim() < 2] + optim_groups = [ + {'params': decay_params, 'weight_decay': weight_decay}, + {'params': nodecay_params, 'weight_decay': 0.0} + ] + num_decay_params = sum(p.numel() for p in decay_params) + num_nodecay_params = sum(p.numel() for p in nodecay_params) + print0(f"num decayed parameter tensors: {len(decay_params)}, with {num_decay_params:,} parameters") + print0(f"num non-decayed parameter tensors: {len(nodecay_params)}, with {num_nodecay_params:,} parameters") + # Create AdamW optimizer and use the fused version if it is available + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW: {use_fused}") + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer") + optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=learning_rate, betas=betas, fused=use_fused) + optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW") + optimizer = torch.optim.AdamW(optim_groups, lr=learning_rate, betas=betas, fused=use_fused) + return [optimizer] + + def configure_muon(model, weight_decay, adam_lr, muon_lr, momentum, nesterov, ns_steps, device_type, zero_stage, ddp_rank, ddp_world_size): + # start with all of the candidate parameters + param_dict = {pn: p for pn, p in model.named_parameters()} + # filter out those that do not require grad + param_dict = {pn: p for pn, p in param_dict.items() if p.requires_grad} + + # For Muon, we need to separate 2D parameters (which can be orthogonalized) + # from other parameters (which should use standard optimization) + muon_params = [] # 2D parameters for Muon + other_params = [] # other parameters for AdamW + + muon_name = [] + other_name = [] + for n, p in param_dict.items(): + if "wte.weight" in n : + other_params.append(p) + other_name.append(n) + continue + + if p.dim() >= 2: # 2D parameters (weight matrices) + muon_params.append(p) + muon_name.append(n) + else: # 1D parameters (biases, embeddings, etc.) + other_params.append(p) + other_name.append(n) + + # print("================================================\n") + # print(f"Muon parameters: {muon_name}\n") + # print(f"Other parameters: {other_name}\n") + # print("================================================\n") + + print0(f"Muon parameters (2D): {len(muon_params)} tensors") + print0(f"Other parameters (non-2D): {len(other_params)} tensors") + + # Create Muon optimizer for 2D parameters + muon_optimizer = None + if muon_params: + muon_optimizer = Muon( + params=muon_params, + lr=muon_lr, + weight_decay=weight_decay, + momentum=momentum, + nesterov=nesterov, + ns_steps=ns_steps, + rank=ddp_rank, + world_size=ddp_world_size + ) + + # Create AdamW optimizer for non-2D parameters + adam_optimizer = None + if other_params: + # create optim groups for AdamW + # decay_params = [p for p in other_params if p.dim() >= 2] + # nodecay_params = [p for p in other_params if p.dim() < 2] + optim_groups = [ + {'params': other_params, 'weight_decay': weight_decay}, + # {'params': nodecay_params, 'weight_decay': 0.0} + ] + + # Create AdamW optimizer + fused_available = 'fused' in inspect.signature(torch.optim.AdamW).parameters + use_fused = fused_available and device_type == 'cuda' + print0(f"using fused AdamW for non-Muon params: {use_fused}") + + if zero_stage == 1: + print0("using ZeroRedundancyOptimizer for non-Muon params") + adam_optimizer = ZeroRedundancyOptimizer(**optim_groups[0], optimizer_class=torch.optim.AdamW, + lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + # adam_optimizer.add_param_group(optim_groups[1]) + else: + print0("using regular AdamW for non-Muon params") + adam_optimizer = torch.optim.AdamW(optim_groups, lr=adam_lr, betas=(0.9, 0.95), fused=use_fused) + + return [muon_optimizer, adam_optimizer] + + # init the optimizer + if args.optimizer == "adam": + optimizers = configure_adam(model=raw_model_uncompiled, weight_decay=args.weight_decay, + learning_rate=args.adam_lr, betas=(0.9, 0.95), + device_type=device, zero_stage=zero_stage) + elif args.optimizer == "muon": + optimizers = configure_muon( + model=raw_model_uncompiled, + weight_decay=args.muon_weight_decay, + muon_lr=args.muon_lr, + adam_lr=args.adam_lr, + momentum=args.muon_momentum, + nesterov=bool(args.muon_nesterov), + ns_steps=args.muon_ns_steps, + device_type=device, + zero_stage=zero_stage, + ddp_rank=ddp_rank, + ddp_world_size=ddp_world_size + ) + # We'll use muon_optimizer and adam_optimizer separately + + # learning rate decay scheduler (cosine with warmup) + def get_lr(it,base_lr): + # if args.optimizer == "adam": + # base_lr = args.adam_lr + # else: # muon + # base_lr = args.muon_lr + min_lr = base_lr * args.lr_decay_frac + # 1) linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it+1) / args.warmup_iters + # 2) if it > lr_decay_iters, return min learning rate + if it > args.num_iterations: + return min_lr + # 3) in between, use cosine decay down to min learning rate + decay_ratio = (it - args.warmup_iters) / (args.num_iterations - args.warmup_iters) + assert 0 <= decay_ratio <= 1 + coeff = 0.5 * (1.0 + math.cos(math.pi * decay_ratio)) # coeff starts at 1 and goes to 0 + return min_lr + coeff * (base_lr - min_lr) + + def get_wsd_lr(it, base_lr): + min_lr = base_lr * args.lr_decay_frac + cooldown_iters = int(args.num_iterations * 0.2) + # 1) Warmup: linear warmup for warmup_iters steps + if it < args.warmup_iters: + return base_lr * (it + 1) / args.warmup_iters + # 3) Decay: linear decay from base_lr to min_lr in the last cooldown_iters steps + cooldown_start = args.num_iterations - cooldown_iters + if it >= cooldown_start: + decay_ratio = (it - cooldown_start) / cooldown_iters + return base_lr - decay_ratio * (base_lr - min_lr) + # 2) Stable: constant learning rate at base_lr + return base_lr + + # create the logging directory if it does not exist + logfile = None + run_dir_path = None + + file_name = f"mode_{args.optimizer}_adam_lr_{args.adam_lr}_muon_lr_{args.muon_lr}_seed_{args.seed}.log" + if args.output_dir: + base_log_dir = Path(args.output_dir) + base_log_dir.mkdir(parents=True, exist_ok=True) + + # Create run-specific directory + # Generate UUID on master process and broadcast to all ranks + if master_process: + run_uuid = uuid.uuid4() + uuid_str = str(run_uuid) + else: + uuid_str = None + + # Broadcast UUID from rank 0 to all other ranks + if ddp: + # Create a tensor to hold the UUID string length and content + if master_process: + uuid_bytes = uuid_str.encode('utf-8') + uuid_len = len(uuid_bytes) + else: + uuid_len = 0 + + # Broadcast length + uuid_len_tensor = torch.tensor(uuid_len, dtype=torch.long, device=device) + dist.broadcast(uuid_len_tensor, src=0) + + # Broadcast UUID string + if master_process: + uuid_tensor = torch.ByteTensor(list(uuid_bytes)).to(device) + else: + uuid_tensor = torch.ByteTensor([0] * uuid_len_tensor.item()).to(device) + dist.broadcast(uuid_tensor, src=0) + + # Decode on non-master processes + if not master_process: + uuid_str = bytes(uuid_tensor.cpu().numpy()).decode('utf-8') + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.UUID(uuid_str) + else: + run_uuid = uuid.uuid4() + + run_folder_name = f"opt_{args.optimizer}_alr_{args.adam_lr}_mlr_{args.muon_lr}_seed_{args.seed}_{run_uuid}" + run_dir_path = base_log_dir / run_folder_name + + # Only master process creates the directory + if master_process: + run_dir_path.mkdir(parents=True, exist_ok=True) + + logfile = str(run_dir_path / "training_log.txt") + + # Save configuration + if master_process: + config_to_save = { + "cli_args": vars(args), + "run_uuid": str(run_uuid), + "script_code_logged_at_start": True + } + config_file_path = run_dir_path / "config.json" + with open(config_file_path, "w") as f: + json.dump(config_to_save, f, indent=4) + print0(f"Saved configuration to: {config_file_path}") + + if master_process and logfile: + with open(logfile, "w") as f: + pass # Create/clear the file + with open(logfile, "a") as f: + f.write(code) + + if device == "cuda": + torch.cuda.reset_peak_memory_stats() + timings = [] + norm = -1.0 # dummy value to print in inference-only mode + for step in range(args.num_iterations + 1): + t0 = time.time() + last_step = (step == args.num_iterations) + + # once in a while evaluate the validation dataset + if (args.val_loss_every > 0 \ + and (step % args.val_loss_every == 0 or last_step)) \ + and (val_loader is not None): + model.eval() + val_loader.reset() + with torch.no_grad(): + val_loss = 0.0 + for _ in range(args.val_max_steps): + x, y = val_loader.next_batch() + x, y = x.to(device), y.to(device) + _, loss = model(x, y, return_logits=False) + val_loss += loss.item() + val_loss /= args.val_max_steps + + # --- Comprehensive Sharpness Analysis --- + sharpness_log_str = "" + # Skip step 0 since we don't have a previous training update yet + if args.analyze_sharpness and step > 0 and (step % args.sharpness_analysis_interval == 0 or last_step): + print0(f"[Sharpness @ Step {step}] Starting comprehensive sharpness analysis...") + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + comprehensive_results = calculate_comprehensive_sharpness( + model=raw_model_uncompiled, # Use uncompiled model for HVP (double backward) + model_for_forward=model, # Use compiled+DDP model for forward pass + optimizers=optimizers, + step=step, + train_loader=train_loader, + val_loader=val_loader, + rank=ddp_rank, + world_size=ddp_world_size, + device=device, + B=B, + T=T, + ptdtype=ptdtype, + grad_accum_steps=grad_accum_steps, # Pass grad accumulation steps to scale loss correctly + last_training_update=last_training_update, # Pass the real update captured from training + last_training_gradient=last_training_gradient, # Pass the original gradient g_t + last_training_batches=last_training_batches # Pass ALL microbatches for consistent HVP + ) + sharpness_log_str = format_comprehensive_results(comprehensive_results) + + # Save sharpness results to file + if master_process and run_dir_path: + sharpness_file = run_dir_path / f"sharpness_step_{step}.json" + with open(sharpness_file, "w") as f: + json.dump(comprehensive_results, f, indent=4) + print0(f"[Sharpness @ Step {step}] Results saved to {sharpness_file}") + + # Clean up memory after sharpness analysis + del comprehensive_results + # Ensure all CUDA operations are complete before cleaning up + if device == "cuda": + torch.cuda.synchronize() + torch.cuda.empty_cache() + gc.collect() + if ddp: + dist.barrier() # Sync all ranks after cleanup + print0(f"[Step {step}] Memory cleaned up after sharpness analysis") + + # log to console and to file + if sharpness_log_str: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f} | {sharpness_log_str}") + else: + print0(f"step {step}/{args.num_iterations} | val loss {val_loss:.6f}") + + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d validation loss:%f" % (step, val_loss)) + if sharpness_log_str: + f.write(" %s" % sharpness_log_str) + f.write("\n") + + # once in a while perform model inference on the master process + if (args.sample_every > 0 \ + and (step % args.sample_every == 0 or last_step)) \ + and master_process: + model.eval() + # before we end, let's also do one round of inference + # we'll kick off the generation with "<|endoftext|>", which designates the start of a new sequence + start_ids = [enc.eot_token] + xg = (torch.tensor(start_ids, dtype=torch.long, device=device)[None, ...]) + max_new_tokens = 32 + temperature = 1.0 + top_k = 40 + yg = raw_model.generate(xg, max_new_tokens, temperature=temperature, top_k=top_k) + print0('---------------') + print0(enc.decode(yg[0].tolist())) + print0('---------------') + + # bit confusing: we want to make sure to eval and sample on 0th iteration + # but also after the very last iteration. so we loop for step <= num_iterations + # instead of just < num_iterations (one extra due to <=), only to do + # the validation/sampling one last time, and then we break right here as we're done. + if last_step: + break + + # --------------- TRAINING SECTION BEGIN ----------------- + model.train() + # Zero gradients for the appropriate optimizer(s) + + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + optimizer.zero_grad(set_to_none=True) + elif isinstance(optimizer, Muon): + optimizer.zero_grad() + # if args.optimizer == "adam": + # optimizer.zero_grad(set_to_none=True) + # else: # muon + # if muon_optimizer is not None: + # muon_optimizer.zero_grad() + # if adam_optimizer is not None: + # adam_optimizer.zero_grad(set_to_none=True) + # if we are trying to overfit a single batch, we reset the loader here + if args.overfit_single_batch: + train_loader.reset() + # micro-batch loop where we do gradient accumulation to reach desired total batch size + lossf = 0.0 # for getting the mean loss (as simple float) over the accumulation steps + + # Pre-check if we need to collect microbatches for sharpness analysis + next_step = step + 1 + will_analyze_sharpness_next = args.analyze_sharpness and next_step > 0 and ( + (next_step % args.sharpness_analysis_interval == 0) or + (next_step == args.num_iterations) + ) + + + microbatches_this_step = [] if will_analyze_sharpness_next else None + + for micro_step in range(grad_accum_steps): + # fetch a batch + x, y = train_loader.next_batch() + x, y = x.to(device), y.to(device) + + # Store ALL microbatches for memory-efficient HVP calculation + if will_analyze_sharpness_next: + microbatches_this_step.append((x.detach().clone(), y.detach().clone())) + + if ddp: + model.require_backward_grad_sync = (micro_step == grad_accum_steps - 1) + # forward pass + with ctx: + _, loss = model(x, y, return_logits=False) + loss = loss / grad_accum_steps + lossf += loss.detach() # keep track of the mean loss + # backward pass + if not args.inference_only: + loss.backward() + if ddp: + dist.all_reduce(lossf, op=dist.ReduceOp.AVG) + lossf = lossf.item() + + #no clipping + # norm = torch.nn.utils.clip_grad_norm_(raw_model_uncompiled.parameters(), float('inf')) + + + if will_analyze_sharpness_next: + # Use raw_model_uncompiled's parameter order so it matches sharpness analysis codepaths. + # (DDP/torch.compile wrappers can be a footgun if parameter iteration order ever diverges.) + print(raw_model_uncompiled.transformer.h[0].attn.q_w.weight[:5,:5]) + params_before_optimizer_step = [p.detach().clone() for p in raw_model_uncompiled.parameters()] + # Save the original gradient g_t that will produce the update v + last_training_gradient = [ + p.grad.detach().clone() if p.grad is not None else torch.zeros_like(p) + for p in raw_model_uncompiled.parameters() + ] + # Capture ALL microbatches for consistent HVP calculation + # This ensures H is computed on the exact same objective as g_t and v + last_training_batches = microbatches_this_step # Already cloned above + else: + params_before_optimizer_step = None + last_training_batches = None + + # Update learning rate and step optimizers + for optimizer in optimizers: + if isinstance(optimizer, ZeroRedundancyOptimizer) or isinstance(optimizer, torch.optim.AdamW): + adam_lr = get_wsd_lr(step,args.adam_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = adam_lr + optimizer.step() + elif isinstance(optimizer, Muon): + muon_lr = get_wsd_lr(step,args.muon_lr) + for param_group in optimizer.param_groups: + param_group['lr'] = muon_lr + optimizer.step() + else: + raise ValueError(f"Unsupported optimizer: {type(optimizer)}") + + + if params_before_optimizer_step is not None: + # Clean up old update to save memory + if last_training_update is not None: + del last_training_update + + last_training_update = [ + p.detach() - p_before + for p_before, p in zip(params_before_optimizer_step, raw_model_uncompiled.parameters()) + ] + del params_before_optimizer_step + + # --------------- TRAINING SECTION END ------------------- + + # wait on the CPU for all device work to end so we get accurate per-iteration timings below + if device == "mps": + torch.mps.synchronize() + elif device == "cuda": + torch.cuda.synchronize() + # time and print + t1 = time.time() + # the 0th iteration is often an outlier (much slower) => skip logging it + tokens_per_second = grad_accum_steps * ddp_world_size * B * T / (t1-t0) + print0(f"step {step+1:4d}/{args.num_iterations} | train loss {lossf:.6f} | norm {norm:.4f} | ({(t1-t0)*1000:.2f} ms | {tokens_per_second:.0f} tok/s)") + # log to logile + if master_process and logfile is not None: + with open(logfile, "a") as f: + f.write("step:%d train loss:%f\n" % (step, lossf)) + + # keep track of smooth timings, last 20 iterations + if step > 0 and step > args.num_iterations - 20: + timings.append(t1-t0) + + # print the average of the last 20 timings, to get something smooth-ish + timings = timings[-20:] + print0(f"final {len(timings)} iters avg: {np.mean(timings)*1000:.3f}ms") + print0(f"peak memory consumption: {torch.cuda.max_memory_allocated() // 1024 // 1024} MiB") + + # ------------------------------------------------------------------------- + # clean up nice + if ddp: + destroy_process_group()step:0 validation loss:11.020915 +step:0 train loss:11.022921 +step:1 train loss:11.020821 +step:2 train loss:11.015633 +step:3 train loss:11.003447 +step:4 train loss:10.996918 +step:5 train loss:10.983182 +step:6 train loss:10.961089 +step:7 train loss:10.952547 +step:8 train loss:10.924550 +step:9 train loss:10.900063 +step:10 train loss:10.877802 +step:11 train loss:10.842884 +step:12 train loss:10.815554 +step:13 train loss:10.779120 +step:14 train loss:10.745684 +step:15 train loss:10.705143 +step:16 train loss:10.667948 +step:17 train loss:10.621958 +step:18 train loss:10.579474 +step:19 train loss:10.531895 +step:20 train loss:10.489979 +step:21 train loss:10.432461 +step:22 train loss:10.380466 +step:23 train loss:10.327282 +step:24 train loss:10.261503 +step:25 train loss:10.221999 +step:26 train loss:10.170702 +step:27 train loss:10.114900 +step:28 train loss:10.041381 +step:29 train loss:10.023357 +step:30 train loss:9.931408 +step:31 train loss:9.875450 +step:32 train loss:9.790439 +step:33 train loss:9.751458 +step:34 train loss:9.709426 +step:35 train loss:9.630258 +step:36 train loss:9.562742 +step:37 train loss:9.487716 +step:38 train loss:9.457472 +step:39 train loss:9.366207 +step:40 train loss:9.275597 +step:41 train loss:9.234056 +step:42 train loss:9.180874 +step:43 train loss:9.122473 +step:44 train loss:9.050372 +step:45 train loss:8.988840 +step:46 train loss:8.931592 +step:47 train loss:8.900712 +step:48 train loss:8.799797 +step:49 train loss:8.719195 +step:50 train loss:8.645908 +step:51 train loss:8.607292 +step:52 train loss:8.575952 +step:53 train loss:8.516706 +step:54 train loss:8.473334 +step:55 train loss:8.417694 +step:56 train loss:8.342415 +step:57 train loss:8.284663 +step:58 train loss:8.229758 +step:59 train loss:8.174835 +step:60 train loss:8.132859 +step:61 train loss:8.067553 +step:62 train loss:7.999375 +step:63 train loss:7.989566 +step:64 train loss:7.945280 +step:65 train loss:7.839226 +step:66 train loss:7.820951 +step:67 train loss:7.748955 +step:68 train loss:7.729859 +step:69 train loss:7.697703 +step:70 train loss:7.638080 +step:71 train loss:7.552741 +step:72 train loss:7.549035 +step:73 train loss:7.537744 +step:74 train loss:7.449339 +step:75 train loss:7.469927 +step:76 train loss:7.379778 +step:77 train loss:7.396785 +step:78 train loss:7.386037 +step:79 train loss:7.317619 +step:80 train loss:7.278395 +step:81 train loss:7.331000 +step:82 train loss:7.274020 +step:83 train loss:7.221478 +step:84 train loss:7.191485 +step:85 train loss:7.162227 +step:86 train loss:7.111941 +step:87 train loss:7.151890 +step:88 train loss:7.162732 +step:89 train loss:7.115613 +step:90 train loss:7.130004 +step:91 train loss:7.065244 +step:92 train loss:7.079117 +step:93 train loss:7.038570 +step:94 train loss:7.054668 +step:95 train loss:7.012733 +step:96 train loss:6.992846 +step:97 train loss:6.960783 +step:98 train loss:6.924885 +step:99 train loss:6.944777 +step:100 train loss:6.800900 +step:101 train loss:6.839316 +step:102 train loss:6.868488 +step:103 train loss:6.876469 +step:104 train loss:6.837489 +step:105 train loss:6.848731 +step:106 train loss:6.807307 +step:107 train loss:6.918686 +step:108 train loss:6.828009 +step:109 train loss:6.759518 +step:110 train loss:6.721057 +step:111 train loss:6.781910 +step:112 train loss:6.840425 +step:113 train loss:6.713260 +step:114 train loss:6.738246 +step:115 train loss:6.742345 +step:116 train loss:6.767486 +step:117 train loss:6.712100 +step:118 train loss:6.668729 +step:119 train loss:6.699640 +step:120 train loss:6.685084 +step:121 train loss:6.529016 +step:122 train loss:6.470387 +step:123 train loss:6.655928 +step:124 train loss:6.522992 +step:125 train loss:6.517405 +step:126 train loss:6.588136 +step:127 train loss:6.487228 +step:128 train loss:6.488886 +step:129 train loss:6.521224 +step:130 train loss:6.478678 +step:131 train loss:6.424615 +step:132 train loss:6.480562 +step:133 train loss:6.418433 +step:134 train loss:6.463514 +step:135 train loss:6.423513 +step:136 train loss:6.473093 +step:137 train loss:6.472595 +step:138 train loss:6.398807 +step:139 train loss:6.339911 +step:140 train loss:6.334379 +step:141 train loss:6.338220 +step:142 train loss:6.345374 +step:143 train loss:6.400818 +step:144 train loss:6.343410 +step:145 train loss:6.403069 +step:146 train loss:6.383256 +step:147 train loss:6.337461 +step:148 train loss:6.339786 +step:149 train loss:6.278154 +step:150 train loss:6.349926 +step:151 train loss:6.288527 +step:152 train loss:6.270667 +step:153 train loss:6.268290 +step:154 train loss:6.325766 +step:155 train loss:6.246966 +step:156 train loss:6.251171 +step:157 train loss:6.239710 +step:158 train loss:6.282377 +step:159 train loss:6.289295 +step:160 train loss:6.228106 +step:161 train loss:6.258774 +step:162 train loss:6.227435 +step:163 train loss:6.222734 +step:164 train loss:6.225013 +step:165 train loss:6.225900 +step:166 train loss:6.125985 +step:167 train loss:6.208813 +step:168 train loss:6.183660 +step:169 train loss:6.167822 +step:170 train loss:6.165107 +step:171 train loss:6.163725 +step:172 train loss:6.187918 +step:173 train loss:6.188816 +step:174 train loss:6.145245 +step:175 train loss:6.154038 +step:176 train loss:6.089854 +step:177 train loss:6.078705 +step:178 train loss:6.143832 +step:179 train loss:6.045702 +step:180 train loss:6.130635 +step:181 train loss:6.147377 +step:182 train loss:6.116632 +step:183 train loss:6.084702 +step:184 train loss:6.081503 +step:185 train loss:6.120261 +step:186 train loss:6.082143 +step:187 train loss:6.109676 +step:188 train loss:6.104267 +step:189 train loss:6.114576 +step:190 train loss:6.085770 +step:191 train loss:6.063839 +step:192 train loss:5.962750 +step:193 train loss:6.063873 +step:194 train loss:5.955626 +step:195 train loss:5.936303 +step:196 train loss:5.991346 +step:197 train loss:6.041763 +step:198 train loss:6.027599 +step:199 train loss:6.032074 +step:200 train loss:6.007285 +step:201 train loss:6.015654 +step:202 train loss:5.969965 +step:203 train loss:6.005560 +step:204 train loss:5.977498 +step:205 train loss:5.989109 +step:206 train loss:6.005339 +step:207 train loss:5.972935 +step:208 train loss:5.993797 +step:209 train loss:5.966638 +step:210 train loss:5.931464 +step:211 train loss:5.964770 +step:212 train loss:5.955126 +step:213 train loss:5.950873 +step:214 train loss:5.919292 +step:215 train loss:5.919187 +step:216 train loss:6.022571 +step:217 train loss:5.897845 +step:218 train loss:5.911914 +step:219 train loss:5.951565 +step:220 train loss:5.914874 +step:221 train loss:5.865646 +step:222 train loss:5.919004 +step:223 train loss:5.908193 +step:224 train loss:5.911183 +step:225 train loss:5.831373 +step:226 train loss:5.907085 +step:227 train loss:5.882356 +step:228 train loss:5.913959 +step:229 train loss:5.914559 +step:230 train loss:5.853307 +step:231 train loss:5.829775 +step:232 train loss:5.851436 +step:233 train loss:5.824432 +step:234 train loss:5.839914 +step:235 train loss:5.810059 +step:236 train loss:5.836509 +step:237 train loss:5.855529 +step:238 train loss:5.802719 +step:239 train loss:5.810011 +step:240 train loss:5.744736 +step:241 train loss:5.827317 +step:242 train loss:5.805853 +step:243 train loss:5.813365 +step:244 train loss:5.808172 +step:245 train loss:5.801849 +step:246 train loss:5.745200 +step:247 train loss:5.790133 +step:248 train loss:5.753191 +step:249 train loss:5.780046 +step:250 validation loss:5.816328 +step:250 train loss:5.795261 +step:251 train loss:5.792654 +step:252 train loss:5.778447 +step:253 train loss:5.766611 +step:254 train loss:5.721929 +step:255 train loss:5.732292 +step:256 train loss:5.669412 +step:257 train loss:5.770794 +step:258 train loss:5.818102 +step:259 train loss:5.727236 +step:260 train loss:5.761465 +step:261 train loss:5.709581 +step:262 train loss:5.729639 +step:263 train loss:5.665124 +step:264 train loss:5.690053 +step:265 train loss:5.757213 +step:266 train loss:5.707440 +step:267 train loss:5.748967 +step:268 train loss:5.719793 +step:269 train loss:5.699202 +step:270 train loss:5.664993 +step:271 train loss:5.702517 +step:272 train loss:5.658258 +step:273 train loss:5.659257 +step:274 train loss:5.686350 +step:275 train loss:5.728751 +step:276 train loss:5.717369 +step:277 train loss:5.658054 +step:278 train loss:5.597902 +step:279 train loss:5.650140 +step:280 train loss:5.627337 +step:281 train loss:5.646513 +step:282 train loss:5.545564 +step:283 train loss:5.621266 +step:284 train loss:5.675294 +step:285 train loss:5.616650 +step:286 train loss:5.679158 +step:287 train loss:5.633984 +step:288 train loss:5.688132 +step:289 train loss:5.622052 +step:290 train loss:5.583234 +step:291 train loss:5.679172 +step:292 train loss:5.623349 +step:293 train loss:5.619033 +step:294 train loss:5.599607 +step:295 train loss:5.613659 +step:296 train loss:5.536049 +step:297 train loss:5.570344 +step:298 train loss:5.587377 +step:299 train loss:5.668669 +step:300 train loss:5.561022 +step:301 train loss:5.532135 +step:302 train loss:5.569682 +step:303 train loss:5.571075 +step:304 train loss:5.535334 +step:305 train loss:5.529088 +step:306 train loss:5.581438 +step:307 train loss:5.563235 +step:308 train loss:5.538514 +step:309 train loss:5.584793 +step:310 train loss:5.621078 +step:311 train loss:5.524879 +step:312 train loss:5.592443 +step:313 train loss:5.545939 +step:314 train loss:5.542786 +step:315 train loss:5.518475 +step:316 train loss:5.521148 +step:317 train loss:5.512180 +step:318 train loss:5.532077 +step:319 train loss:5.484022 +step:320 train loss:5.557545 +step:321 train loss:5.498025 +step:322 train loss:5.468403 +step:323 train loss:5.504451 +step:324 train loss:5.458273 +step:325 train loss:5.484417 +step:326 train loss:5.474913 +step:327 train loss:5.438378 +step:328 train loss:5.422674 +step:329 train loss:5.481445 +step:330 train loss:5.518897 +step:331 train loss:5.507393 +step:332 train loss:5.379970 +step:333 train loss:5.434879 +step:334 train loss:5.401068 +step:335 train loss:5.426919 +step:336 train loss:5.440549 +step:337 train loss:5.478957 +step:338 train loss:5.382010 +step:339 train loss:5.430655 +step:340 train loss:5.425172 +step:341 train loss:5.397118 +step:342 train loss:5.424765 +step:343 train loss:5.414907 +step:344 train loss:5.403872 +step:345 train loss:5.419561 +step:346 train loss:5.377923 +step:347 train loss:5.375593 +step:348 train loss:5.292452 +step:349 train loss:5.400406 +step:350 train loss:5.316769 +step:351 train loss:5.389049 +step:352 train loss:5.337438 +step:353 train loss:5.293894 +step:354 train loss:5.392811 +step:355 train loss:5.326373 +step:356 train loss:5.344811 +step:357 train loss:5.374504 +step:358 train loss:5.327759 +step:359 train loss:5.335831 +step:360 train loss:5.314835 +step:361 train loss:5.298489 +step:362 train loss:5.332264 +step:363 train loss:5.320851 +step:364 train loss:5.358675 +step:365 train loss:5.289647 +step:366 train loss:5.267687 +step:367 train loss:5.278588 +step:368 train loss:5.227837 +step:369 train loss:5.316350 +step:370 train loss:5.283669 +step:371 train loss:5.237283 +step:372 train loss:5.294704 +step:373 train loss:5.262270 +step:374 train loss:5.272594 +step:375 train loss:5.246758 +step:376 train loss:5.238419 +step:377 train loss:5.265995 +step:378 train loss:5.239898 +step:379 train loss:5.231759 +step:380 train loss:5.224850 +step:381 train loss:5.211470 +step:382 train loss:5.198726 +step:383 train loss:5.220330 +step:384 train loss:5.185499 +step:385 train loss:5.196482 +step:386 train loss:5.149651 +step:387 train loss:5.210923 +step:388 train loss:5.186747 +step:389 train loss:5.148387 +step:390 train loss:5.145937 +step:391 train loss:5.157642 +step:392 train loss:5.176226 +step:393 train loss:5.073219 +step:394 train loss:5.132836 +step:395 train loss:5.110964 +step:396 train loss:5.148408 +step:397 train loss:5.035789 +step:398 train loss:5.178942 +step:399 train loss:5.130303 +step:400 train loss:5.158546 +step:401 train loss:5.051038 +step:402 train loss:5.093302 +step:403 train loss:5.066086 +step:404 train loss:5.122470 +step:405 train loss:5.102380 +step:406 train loss:5.044540 +step:407 train loss:5.084762 +step:408 train loss:5.028526 +step:409 train loss:5.093008 +step:410 train loss:5.029311 +step:411 train loss:5.101529 +step:412 train loss:5.040868 +step:413 train loss:5.001968 +step:414 train loss:4.992946 +step:415 train loss:5.090611 +step:416 train loss:5.042084 +step:417 train loss:5.048348 +step:418 train loss:5.033709 +step:419 train loss:5.048375 +step:420 train loss:5.022093 +step:421 train loss:5.044594 +step:422 train loss:4.994659 +step:423 train loss:4.998639 +step:424 train loss:5.028719 +step:425 train loss:4.933351 +step:426 train loss:5.049797 +step:427 train loss:4.884748 +step:428 train loss:5.018205 +step:429 train loss:4.961301 +step:430 train loss:5.046351 +step:431 train loss:4.900217 +step:432 train loss:4.985667 +step:433 train loss:4.853951 +step:434 train loss:4.964170 +step:435 train loss:4.884192 +step:436 train loss:4.998846 +step:437 train loss:4.917317 +step:438 train loss:5.004312 +step:439 train loss:4.933650 +step:440 train loss:5.036841 +step:441 train loss:4.896150 +step:442 train loss:4.900848 +step:443 train loss:4.941721 +step:444 train loss:4.828649 +step:445 train loss:4.887764 +step:446 train loss:4.899159 +step:447 train loss:4.911426 +step:448 train loss:4.871447 +step:449 train loss:4.955162 +step:450 train loss:4.812306 +step:451 train loss:4.898589 +step:452 train loss:4.834489 +step:453 train loss:4.839830 +step:454 train loss:4.809048 +step:455 train loss:4.848721 +step:456 train loss:4.845518 +step:457 train loss:4.806039 +step:458 train loss:4.813917 +step:459 train loss:4.822393 +step:460 train loss:4.840486 +step:461 train loss:4.869081 +step:462 train loss:4.771165 +step:463 train loss:4.744082 +step:464 train loss:4.927023 +step:465 train loss:4.779448 +step:466 train loss:4.829957 +step:467 train loss:4.635223 +step:468 train loss:4.795365 +step:469 train loss:4.750010 +step:470 train loss:4.757794 +step:471 train loss:4.703683 +step:472 train loss:4.812765 +step:473 train loss:4.747907 +step:474 train loss:4.824240 +step:475 train loss:4.691861 +step:476 train loss:4.736965 +step:477 train loss:4.643213 +step:478 train loss:4.709308 +step:479 train loss:4.678800 +step:480 train loss:4.737453 +step:481 train loss:4.768280 +step:482 train loss:4.736516 +step:483 train loss:4.713468 +step:484 train loss:4.598658 +step:485 train loss:4.768483 +step:486 train loss:4.607977 +step:487 train loss:4.709641 +step:488 train loss:4.687940 +step:489 train loss:4.715833 +step:490 train loss:4.645610 +step:491 train loss:4.668044 +step:492 train loss:4.601334 +step:493 train loss:4.683230 +step:494 train loss:4.630867 +step:495 train loss:4.614232 +step:496 train loss:4.595072 +step:497 train loss:4.612548 +step:498 train loss:4.606590 +step:499 train loss:4.638294 +step:500 validation loss:4.606588 total_sharp:4.1979e-02 L1_sharp:3.0135e-02 L2_sharp:1.2833e-02 L3_sharp:1.3658e-02 L4_sharp:1.3074e-02 L5_sharp:9.3215e-03 L6_sharp:6.6791e-03 L7_sharp:4.5355e-03 L8_sharp:3.1150e-03 L9_sharp:2.4653e-03 L10_sharp:1.6275e-03 L11_sharp:1.2507e-03 L12_sharp:1.1379e-03 total_fnorm:1.7238e+00 total_l1_linf:1.4890e+04 total_spectral:1.7238e+00 L1_fnorm:4.3300e-01 L2_fnorm:4.0905e-01 L3_fnorm:3.9037e-01 L4_fnorm:3.8000e-01 L5_fnorm:4.0284e-01 L6_fnorm:3.9802e-01 L7_fnorm:4.1380e-01 L8_fnorm:4.0642e-01 L9_fnorm:4.1970e-01 L10_fnorm:4.1971e-01 L11_fnorm:4.2533e-01 L12_fnorm:4.2519e-01 L1_l1linf:3.1715e-01 L2_l1linf:3.1631e-01 L3_l1linf:3.1624e-01 L4_l1linf:3.1401e-01 L5_l1linf:3.0620e-01 L6_l1linf:3.0391e-01 L7_l1linf:2.9992e-01 L8_l1linf:3.0046e-01 L9_l1linf:3.0352e-01 L10_l1linf:2.9982e-01 L11_l1linf:2.9955e-01 L12_l1linf:2.9641e-01 L1_spectral:8.6058e-03 L2_spectral:8.6071e-03 L3_spectral:8.6078e-03 L4_spectral:8.6076e-03 L5_spectral:8.6079e-03 L6_spectral:8.6116e-03 L7_spectral:8.6087e-03 L8_spectral:8.6152e-03 L9_spectral:8.6153e-03 L10_spectral:8.6147e-03 L11_spectral:8.6150e-03 L12_spectral:8.6040e-03 v_norm:1.7238e+00 cos_v_-g_hvp:5.9667e-02 g_hvp_norm:9.9410e-01 cos_v_-g_t:6.4165e-02 g_t_norm:9.2733e-01 hv_norm:1.8832e+00 cos_v_hv:3.8424e-02 hg_norm:7.4239e+01 cos_g_hg:6.0060e-01 v_par:4.4765e-03 v_perp:1.7238e+00 L1_cos_v_neg_g:6.3124e-02 L1_v_norm:4.3300e-01 L2_cos_v_neg_g:6.5567e-02 L2_v_norm:4.0905e-01 L3_cos_v_neg_g:5.8234e-02 L3_v_norm:3.9037e-01 L4_cos_v_neg_g:6.7563e-02 L4_v_norm:3.8000e-01 L5_cos_v_neg_g:6.9669e-02 L5_v_norm:4.0284e-01 L6_cos_v_neg_g:7.8255e-02 L6_v_norm:3.9802e-01 L7_cos_v_neg_g:8.2509e-02 L7_v_norm:4.1380e-01 L8_cos_v_neg_g:8.4642e-02 L8_v_norm:4.0642e-01 L9_cos_v_neg_g:8.4333e-02 L9_v_norm:4.1970e-01 L10_cos_v_neg_g:8.1008e-02 L10_v_norm:4.1971e-01 L11_cos_v_neg_g:7.5329e-02 L11_v_norm:4.2533e-01 L12_cos_v_neg_g:6.7376e-02 L12_v_norm:4.2519e-01 +step:500 train loss:4.613463 +step:501 train loss:4.669950 +step:502 train loss:4.591934 +step:503 train loss:4.633685 +step:504 train loss:4.594184 +step:505 train loss:4.655378 +step:506 train loss:4.585622 +step:507 train loss:4.647350 +step:508 train loss:4.574367 +step:509 train loss:4.655839 +step:510 train loss:4.569477 +step:511 train loss:4.610000 +step:512 train loss:4.564253 +step:513 train loss:4.586570 +step:514 train loss:4.574918 +step:515 train loss:4.618653 +step:516 train loss:4.484642 +step:517 train loss:4.555951 +step:518 train loss:4.604783 +step:519 train loss:4.653584 +step:520 train loss:4.561220 +step:521 train loss:4.586725 +step:522 train loss:4.546618 +step:523 train loss:4.608681 +step:524 train loss:4.637136 +step:525 train loss:4.647077 +step:526 train loss:4.546080 +step:527 train loss:4.534827 +step:528 train loss:4.543410 +step:529 train loss:4.532687 +step:530 train loss:4.552215 +step:531 train loss:4.538115 +step:532 train loss:4.646249 +step:533 train loss:4.521087 +step:534 train loss:4.608849 +step:535 train loss:4.483604 +step:536 train loss:4.525682 +step:537 train loss:4.440423 +step:538 train loss:4.563902 +step:539 train loss:4.462854 +step:540 train loss:4.532026 +step:541 train loss:4.483685 +step:542 train loss:4.553107 +step:543 train loss:4.463493 +step:544 train loss:4.524426 +step:545 train loss:4.495427 +step:546 train loss:4.513041 +step:547 train loss:4.441515 +step:548 train loss:4.495592 +step:549 train loss:4.444658 +step:550 train loss:4.547647 +step:551 train loss:4.419370 +step:552 train loss:4.562847 +step:553 train loss:4.408446 +step:554 train loss:4.536633 +step:555 train loss:4.531498 +step:556 train loss:4.487931 +step:557 train loss:4.498453 +step:558 train loss:4.435263 +step:559 train loss:4.473882 +step:560 train loss:4.429234 +step:561 train loss:4.412793 +step:562 train loss:4.403595 +step:563 train loss:4.421565 +step:564 train loss:4.464937 +step:565 train loss:4.447397 +step:566 train loss:4.389891 +step:567 train loss:4.477491 +step:568 train loss:4.391387 +step:569 train loss:4.417830 +step:570 train loss:4.421912 +step:571 train loss:4.422393 +step:572 train loss:4.407171 +step:573 train loss:4.465175 +step:574 train loss:4.714194 +step:575 train loss:4.470146 +step:576 train loss:4.404300 +step:577 train loss:4.390185 +step:578 train loss:4.457293 +step:579 train loss:4.391754 +step:580 train loss:4.456929 +step:581 train loss:4.450438 +step:582 train loss:4.384397 +step:583 train loss:4.437064 +step:584 train loss:4.408889 +step:585 train loss:4.433208 +step:586 train loss:4.362161 +step:587 train loss:4.409060 +step:588 train loss:4.414593 +step:589 train loss:4.422497 +step:590 train loss:4.476802 +step:591 train loss:4.407677 +step:592 train loss:4.406985 +step:593 train loss:4.377565 +step:594 train loss:4.385502 +step:595 train loss:4.398401 +step:596 train loss:4.429598 +step:597 train loss:4.363650 +step:598 train loss:4.354620 +step:599 train loss:4.358323 +step:600 train loss:4.433920 +step:601 train loss:4.372046 +step:602 train loss:4.442732 +step:603 train loss:4.387529 +step:604 train loss:4.399002 +step:605 train loss:4.340259 +step:606 train loss:4.369765 +step:607 train loss:4.387508 +step:608 train loss:4.374933 +step:609 train loss:4.291783 +step:610 train loss:4.385869 +step:611 train loss:4.464414 +step:612 train loss:4.309504 +step:613 train loss:4.366384 +step:614 train loss:4.320626 +step:615 train loss:4.432477 +step:616 train loss:4.337967 +step:617 train loss:4.317798 +step:618 train loss:4.390124 +step:619 train loss:4.346376 +step:620 train loss:4.405039 +step:621 train loss:4.354918 +step:622 train loss:4.343876 +step:623 train loss:4.309594 +step:624 train loss:4.345010 +step:625 train loss:4.364746 +step:626 train loss:4.359136 +step:627 train loss:4.290547 +step:628 train loss:4.351779 +step:629 train loss:4.355966 +step:630 train loss:4.293881 +step:631 train loss:4.326372 +step:632 train loss:4.314896 +step:633 train loss:4.284111 +step:634 train loss:4.242576 +step:635 train loss:4.325339 +step:636 train loss:4.275675 +step:637 train loss:4.338369 +step:638 train loss:4.338712 +step:639 train loss:4.342557 +step:640 train loss:4.269474 +step:641 train loss:4.327238 +step:642 train loss:4.318354 +step:643 train loss:4.453706 +step:644 train loss:4.326326 +step:645 train loss:4.277289 +step:646 train loss:4.293076 +step:647 train loss:4.313630 +step:648 train loss:4.423753 +step:649 train loss:4.261224 +step:650 train loss:4.273642 +step:651 train loss:4.328390 +step:652 train loss:4.346463 +step:653 train loss:4.298755 +step:654 train loss:4.320062 +step:655 train loss:4.315135 +step:656 train loss:4.284001 +step:657 train loss:4.257486 +step:658 train loss:4.344824 +step:659 train loss:4.253205 +step:660 train loss:4.193532 +step:661 train loss:4.194016 +step:662 train loss:4.278429 +step:663 train loss:4.272737 +step:664 train loss:4.262394 +step:665 train loss:4.286598 +step:666 train loss:4.267905 +step:667 train loss:4.173980 +step:668 train loss:4.252844 +step:669 train loss:4.244947 +step:670 train loss:4.281229 +step:671 train loss:4.274428 +step:672 train loss:4.236081 +step:673 train loss:4.317554 +step:674 train loss:4.405800 +step:675 train loss:4.257406 +step:676 train loss:4.229743 +step:677 train loss:4.276217 +step:678 train loss:4.295142 +step:679 train loss:4.342677 +step:680 train loss:4.199414 +step:681 train loss:4.241729 +step:682 train loss:4.293094 +step:683 train loss:4.372728 +step:684 train loss:4.267596 +step:685 train loss:4.231494 +step:686 train loss:4.235825 +step:687 train loss:4.281968 +step:688 train loss:4.188022 +step:689 train loss:4.268441 +step:690 train loss:4.264315 +step:691 train loss:4.182486 +step:692 train loss:4.360467 +step:693 train loss:4.288316 +step:694 train loss:4.234622 +step:695 train loss:4.199727 +step:696 train loss:4.211987 +step:697 train loss:4.242115 +step:698 train loss:4.282092 +step:699 train loss:4.201316 +step:700 train loss:4.245176 +step:701 train loss:4.253967 +step:702 train loss:4.243613 +step:703 train loss:4.250560 +step:704 train loss:4.228271 +step:705 train loss:4.231932 +step:706 train loss:4.259185 +step:707 train loss:4.250085 +step:708 train loss:4.242716 +step:709 train loss:4.229219 +step:710 train loss:4.146379 +step:711 train loss:4.227118 +step:712 train loss:4.230005 +step:713 train loss:4.215606 +step:714 train loss:4.154632 +step:715 train loss:4.268359 +step:716 train loss:4.245831 +step:717 train loss:4.175551 +step:718 train loss:4.223734 +step:719 train loss:4.301386 +step:720 train loss:4.154765 +step:721 train loss:4.204145 +step:722 train loss:4.247676 +step:723 train loss:4.147726 +step:724 train loss:4.135684 +step:725 train loss:4.146744 +step:726 train loss:4.209855 +step:727 train loss:4.169102 +step:728 train loss:4.218634 +step:729 train loss:4.166778 +step:730 train loss:4.223695 +step:731 train loss:4.205228 +step:732 train loss:4.203116 +step:733 train loss:4.185437 +step:734 train loss:4.231447 +step:735 train loss:4.236103 +step:736 train loss:4.149580 +step:737 train loss:4.200006 +step:738 train loss:4.127312 +step:739 train loss:4.204676 +step:740 train loss:4.164049 +step:741 train loss:4.224426 +step:742 train loss:4.114206 +step:743 train loss:4.150336 +step:744 train loss:4.155209 +step:745 train loss:4.189342 +step:746 train loss:4.218722 +step:747 train loss:4.042517 +step:748 train loss:4.246119 +step:749 train loss:4.179387 +step:750 validation loss:4.130571 +step:750 train loss:4.133445 +step:751 train loss:4.232626 +step:752 train loss:4.140989 +step:753 train loss:4.093230 +step:754 train loss:4.205975 +step:755 train loss:4.168912 +step:756 train loss:4.133090 +step:757 train loss:4.176220 +step:758 train loss:4.182308 +step:759 train loss:4.124004 +step:760 train loss:4.195796 +step:761 train loss:4.117381 +step:762 train loss:4.244617 +step:763 train loss:4.172451 +step:764 train loss:4.187607 +step:765 train loss:4.112031 +step:766 train loss:4.173641 +step:767 train loss:4.182228 +step:768 train loss:4.199548 +step:769 train loss:4.242532 +step:770 train loss:4.202199 +step:771 train loss:4.179122 +step:772 train loss:4.153873 +step:773 train loss:4.142137 +step:774 train loss:4.108412 +step:775 train loss:4.143109 +step:776 train loss:4.129071 +step:777 train loss:4.171246 +step:778 train loss:4.144404 +step:779 train loss:4.171749 +step:780 train loss:4.156650 +step:781 train loss:4.107701 +step:782 train loss:4.162833 +step:783 train loss:4.114699 +step:784 train loss:4.155516 +step:785 train loss:4.192843 +step:786 train loss:4.125945 +step:787 train loss:4.187590 +step:788 train loss:4.139005 +step:789 train loss:4.104787 +step:790 train loss:4.127796 +step:791 train loss:4.111421 +step:792 train loss:4.144787 +step:793 train loss:4.135950 +step:794 train loss:4.152700 +step:795 train loss:4.132368 +step:796 train loss:4.201657 +step:797 train loss:4.161357 +step:798 train loss:4.128032 +step:799 train loss:4.137398 +step:800 train loss:4.049296 +step:801 train loss:4.137420 +step:802 train loss:4.089923 +step:803 train loss:4.095024 +step:804 train loss:4.125876 +step:805 train loss:4.116480 +step:806 train loss:4.069298 +step:807 train loss:4.140013 +step:808 train loss:4.135777 +step:809 train loss:4.079997 +step:810 train loss:4.159920 +step:811 train loss:4.125040 +step:812 train loss:4.154561 +step:813 train loss:4.053251 +step:814 train loss:4.148884 +step:815 train loss:4.141123 +step:816 train loss:4.106732 +step:817 train loss:4.133108 +step:818 train loss:4.073630 +step:819 train loss:4.136212 +step:820 train loss:4.162306 +step:821 train loss:4.150321 +step:822 train loss:4.071243 +step:823 train loss:4.091571 +step:824 train loss:4.040239 +step:825 train loss:4.121866 +step:826 train loss:4.072750 +step:827 train loss:4.104772 +step:828 train loss:4.103628 +step:829 train loss:4.057941 +step:830 train loss:4.120494 +step:831 train loss:4.098225 +step:832 train loss:4.094443 +step:833 train loss:4.122223 +step:834 train loss:4.112187 +step:835 train loss:4.092748 +step:836 train loss:4.119724 +step:837 train loss:4.091068 +step:838 train loss:4.039634 +step:839 train loss:4.111226 +step:840 train loss:4.073056 +step:841 train loss:4.153865 +step:842 train loss:4.093770 +step:843 train loss:4.087418 +step:844 train loss:4.095786 +step:845 train loss:4.088391 +step:846 train loss:4.083157 +step:847 train loss:4.070800 +step:848 train loss:4.326423 +step:849 train loss:4.057986 +step:850 train loss:4.087751 +step:851 train loss:4.100433 +step:852 train loss:4.071201 +step:853 train loss:4.081114 +step:854 train loss:4.095806 +step:855 train loss:4.090581 +step:856 train loss:4.025374 +step:857 train loss:4.056641 +step:858 train loss:4.118325 +step:859 train loss:4.117168 +step:860 train loss:4.032022 +step:861 train loss:4.053068 +step:862 train loss:4.150507 +step:863 train loss:4.052011 +step:864 train loss:4.102753 +step:865 train loss:4.054427 +step:866 train loss:3.981939 +step:867 train loss:4.056461 +step:868 train loss:4.029351 +step:869 train loss:4.033643 +step:870 train loss:4.077318 +step:871 train loss:4.101369 +step:872 train loss:3.991645 +step:873 train loss:4.100266 +step:874 train loss:4.072460 +step:875 train loss:4.017768 +step:876 train loss:4.071817 +step:877 train loss:4.019897 +step:878 train loss:4.030184 +step:879 train loss:4.057693 +step:880 train loss:4.075561 +step:881 train loss:4.077952 +step:882 train loss:4.057257 +step:883 train loss:3.981591 +step:884 train loss:4.089554 +step:885 train loss:4.007885 +step:886 train loss:4.053277 +step:887 train loss:4.083564 +step:888 train loss:4.086427 +step:889 train loss:4.020980 +step:890 train loss:4.051488 +step:891 train loss:4.049764 +step:892 train loss:4.043773 +step:893 train loss:4.032475 +step:894 train loss:4.008998 +step:895 train loss:4.056482 +step:896 train loss:4.034411 +step:897 train loss:4.049810 +step:898 train loss:4.039113 +step:899 train loss:3.984885 +step:900 train loss:4.028465 +step:901 train loss:4.006880 +step:902 train loss:4.028611 +step:903 train loss:4.049582 +step:904 train loss:4.012668 +step:905 train loss:4.016956 +step:906 train loss:4.063762 +step:907 train loss:3.975857 +step:908 train loss:4.009459 +step:909 train loss:4.080301 +step:910 train loss:4.059471 +step:911 train loss:4.054864 +step:912 train loss:4.032146 +step:913 train loss:4.065558 +step:914 train loss:4.061028 +step:915 train loss:4.017610 +step:916 train loss:4.183540 +step:917 train loss:4.040453 +step:918 train loss:4.105834 +step:919 train loss:4.038365 +step:920 train loss:4.025355 +step:921 train loss:4.006507 +step:922 train loss:3.958550 +step:923 train loss:3.960631 +step:924 train loss:3.964332 +step:925 train loss:4.043291 +step:926 train loss:4.052156 +step:927 train loss:4.056966 +step:928 train loss:4.094941 +step:929 train loss:4.074646 +step:930 train loss:3.978888 +step:931 train loss:4.016965 +step:932 train loss:4.071127 +step:933 train loss:4.041296 +step:934 train loss:3.981688 +step:935 train loss:3.957862 +step:936 train loss:3.985264 +step:937 train loss:4.053936 +step:938 train loss:3.994452 +step:939 train loss:4.012679 +step:940 train loss:4.036640 +step:941 train loss:3.989533 +step:942 train loss:3.987751 +step:943 train loss:4.014359 +step:944 train loss:4.033015 +step:945 train loss:4.045807 +step:946 train loss:4.035227 +step:947 train loss:4.044936 +step:948 train loss:3.971289 +step:949 train loss:4.104496 +step:950 train loss:4.016124 +step:951 train loss:3.999799 +step:952 train loss:4.002316 +step:953 train loss:4.013534 +step:954 train loss:3.991349 +step:955 train loss:4.031461 +step:956 train loss:3.981845 +step:957 train loss:4.020658 +step:958 train loss:3.919389 +step:959 train loss:3.998367 +step:960 train loss:4.005068 +step:961 train loss:3.987183 +step:962 train loss:3.932442 +step:963 train loss:3.985764 +step:964 train loss:3.911364 +step:965 train loss:3.940648 +step:966 train loss:3.906698 +step:967 train loss:3.925544 +step:968 train loss:3.996570 +step:969 train loss:4.007578 +step:970 train loss:4.067130 +step:971 train loss:3.949920 +step:972 train loss:4.075563 +step:973 train loss:3.982767 +step:974 train loss:3.887302 +step:975 train loss:4.034405 +step:976 train loss:3.991146 +step:977 train loss:3.950807 +step:978 train loss:3.909849 +step:979 train loss:3.908443 +step:980 train loss:3.966973 +step:981 train loss:3.859931 +step:982 train loss:3.947640 +step:983 train loss:3.925992 +step:984 train loss:3.913874 +step:985 train loss:3.968848 +step:986 train loss:3.939776 +step:987 train loss:3.996508 +step:988 train loss:3.980823 +step:989 train loss:3.953206 +step:990 train loss:3.925348 +step:991 train loss:3.969437 +step:992 train loss:3.903090 +step:993 train loss:4.009359 +step:994 train loss:3.872056 +step:995 train loss:3.969031 +step:996 train loss:3.911437 +step:997 train loss:3.914406 +step:998 train loss:3.975405 +step:999 train loss:4.087520 +step:1000 validation loss:3.925907 total_sharp:8.0614e-03 L1_sharp:1.0526e-02 L2_sharp:4.4477e-03 L3_sharp:1.2797e-03 L4_sharp:1.7305e-03 L5_sharp:1.8669e-03 L6_sharp:1.9211e-03 L7_sharp:1.7558e-03 L8_sharp:1.3324e-03 L9_sharp:8.0788e-04 L10_sharp:6.5125e-04 L11_sharp:6.0815e-04 L12_sharp:7.4679e-04 total_fnorm:2.4117e+00 total_l1_linf:2.0642e+04 total_spectral:2.4117e+00 L1_fnorm:6.0891e-01 L2_fnorm:5.4024e-01 L3_fnorm:5.3648e-01 L4_fnorm:5.4598e-01 L5_fnorm:5.7474e-01 L6_fnorm:5.8086e-01 L7_fnorm:5.9117e-01 L8_fnorm:5.9110e-01 L9_fnorm:5.9866e-01 L10_fnorm:6.0017e-01 L11_fnorm:5.9966e-01 L12_fnorm:6.0100e-01 L1_l1linf:4.3226e-01 L2_l1linf:4.1971e-01 L3_l1linf:4.2700e-01 L4_l1linf:3.9483e-01 L5_l1linf:4.0132e-01 L6_l1linf:4.0691e-01 L7_l1linf:4.0230e-01 L8_l1linf:4.1008e-01 L9_l1linf:4.0257e-01 L10_l1linf:4.0546e-01 L11_l1linf:4.0030e-01 L12_l1linf:3.9824e-01 L1_spectral:1.2061e-02 L2_spectral:1.2066e-02 L3_spectral:1.2064e-02 L4_spectral:1.2044e-02 L5_spectral:1.2046e-02 L6_spectral:1.2060e-02 L7_spectral:1.2041e-02 L8_spectral:1.2045e-02 L9_spectral:1.2047e-02 L10_spectral:1.2043e-02 L11_spectral:1.2045e-02 L12_spectral:1.2045e-02 v_norm:2.4117e+00 cos_v_-g_hvp:6.3725e-02 g_hvp_norm:4.8431e-01 cos_v_-g_t:7.2045e-02 g_t_norm:4.3151e-01 hv_norm:6.4654e-01 cos_v_hv:3.0070e-02 hg_norm:2.1058e+01 cos_g_hg:3.1255e-01 v_par:1.0420e-02 v_perp:2.4116e+00 L1_cos_v_neg_g:5.7465e-02 L1_v_norm:6.0891e-01 L2_cos_v_neg_g:4.5652e-02 L2_v_norm:5.4024e-01 L3_cos_v_neg_g:3.9326e-02 L3_v_norm:5.3648e-01 L4_cos_v_neg_g:4.8538e-02 L4_v_norm:5.4598e-01 L5_cos_v_neg_g:6.1101e-02 L5_v_norm:5.7474e-01 L6_cos_v_neg_g:6.6490e-02 L6_v_norm:5.8086e-01 L7_cos_v_neg_g:7.1576e-02 L7_v_norm:5.9117e-01 L8_cos_v_neg_g:7.2736e-02 L8_v_norm:5.9110e-01 L9_cos_v_neg_g:7.5165e-02 L9_v_norm:5.9866e-01 L10_cos_v_neg_g:8.1857e-02 L10_v_norm:6.0017e-01 L11_cos_v_neg_g:8.7677e-02 L11_v_norm:5.9966e-01 L12_cos_v_neg_g:9.2359e-02 L12_v_norm:6.0100e-01 +step:1000 train loss:3.880336 +step:1001 train loss:4.009061 +step:1002 train loss:3.897688 +step:1003 train loss:4.002393 +step:1004 train loss:3.934666 +step:1005 train loss:3.835951 +step:1006 train loss:3.953260 +step:1007 train loss:3.919691 +step:1008 train loss:3.953136 +step:1009 train loss:3.989683 +step:1010 train loss:3.950383 +step:1011 train loss:3.971180 +step:1012 train loss:3.935426 +step:1013 train loss:3.945621 +step:1014 train loss:3.918659 +step:1015 train loss:3.865110 +step:1016 train loss:4.032865 +step:1017 train loss:3.937170 +step:1018 train loss:3.902714 +step:1019 train loss:4.015496 +step:1020 train loss:3.938219 +step:1021 train loss:3.942230 +step:1022 train loss:4.089704 +step:1023 train loss:3.891135 +step:1024 train loss:3.955738 +step:1025 train loss:3.939601 +step:1026 train loss:3.983268 +step:1027 train loss:3.934512 +step:1028 train loss:3.896089 +step:1029 train loss:3.905915 +step:1030 train loss:3.938529 +step:1031 train loss:3.873255 +step:1032 train loss:3.912158 +step:1033 train loss:3.949548 +step:1034 train loss:3.931162 +step:1035 train loss:3.953260 +step:1036 train loss:3.883814 +step:1037 train loss:3.854261 +step:1038 train loss:4.059120 +step:1039 train loss:3.898353 +step:1040 train loss:3.919494 +step:1041 train loss:3.935893 +step:1042 train loss:3.921252 +step:1043 train loss:3.985342 +step:1044 train loss:3.907289 +step:1045 train loss:3.920512 +step:1046 train loss:3.948771 +step:1047 train loss:3.916109 +step:1048 train loss:3.915992 +step:1049 train loss:3.854287 +step:1050 train loss:3.946120 +step:1051 train loss:3.961586 +step:1052 train loss:3.913567 +step:1053 train loss:3.914163 +step:1054 train loss:3.931247 +step:1055 train loss:3.908765 +step:1056 train loss:3.901185 +step:1057 train loss:3.925174 +step:1058 train loss:3.891436 +step:1059 train loss:3.981872 +step:1060 train loss:3.873156 +step:1061 train loss:3.851213 +step:1062 train loss:3.968404 +step:1063 train loss:3.833603 +step:1064 train loss:4.008304 +step:1065 train loss:3.931086 +step:1066 train loss:3.849455 +step:1067 train loss:3.928551 +step:1068 train loss:3.891603 +step:1069 train loss:3.958707 +step:1070 train loss:3.943570 +step:1071 train loss:3.880527 +step:1072 train loss:3.893458 +step:1073 train loss:3.949479 +step:1074 train loss:3.841784 +step:1075 train loss:3.953510 +step:1076 train loss:3.889036 +step:1077 train loss:3.902622 +step:1078 train loss:3.914624 +step:1079 train loss:3.891545 +step:1080 train loss:3.943057 +step:1081 train loss:3.916467 +step:1082 train loss:3.846560 +step:1083 train loss:3.917484 +step:1084 train loss:3.923924 +step:1085 train loss:3.844040 +step:1086 train loss:3.944486 +step:1087 train loss:3.894585 +step:1088 train loss:3.910333 +step:1089 train loss:3.896349 +step:1090 train loss:3.912975 +step:1091 train loss:3.923350 +step:1092 train loss:3.878850 +step:1093 train loss:3.866327 +step:1094 train loss:3.889014 +step:1095 train loss:3.838522 +step:1096 train loss:3.847767 +step:1097 train loss:3.892701 +step:1098 train loss:3.886559 +step:1099 train loss:3.927310 +step:1100 train loss:3.905062 +step:1101 train loss:3.882792 +step:1102 train loss:3.949051 +step:1103 train loss:3.901418 +step:1104 train loss:3.874424 +step:1105 train loss:3.872384 +step:1106 train loss:3.852678 +step:1107 train loss:3.818556 +step:1108 train loss:3.940978 +step:1109 train loss:3.913748 +step:1110 train loss:3.884363 +step:1111 train loss:3.912691 +step:1112 train loss:3.847732 +step:1113 train loss:3.877472 +step:1114 train loss:3.858796 +step:1115 train loss:3.848861 +step:1116 train loss:3.984659 +step:1117 train loss:3.922698 +step:1118 train loss:3.900059 +step:1119 train loss:3.906561 +step:1120 train loss:3.896539 +step:1121 train loss:3.901657 +step:1122 train loss:3.852509 +step:1123 train loss:3.875430 +step:1124 train loss:3.847836 +step:1125 train loss:3.873146 +step:1126 train loss:3.841282 +step:1127 train loss:3.937028 +step:1128 train loss:3.861060 +step:1129 train loss:3.911890 +step:1130 train loss:3.898462 +step:1131 train loss:3.882622 +step:1132 train loss:3.848429 +step:1133 train loss:3.853775 +step:1134 train loss:3.923055 +step:1135 train loss:3.862118 +step:1136 train loss:3.878679 +step:1137 train loss:3.904738 +step:1138 train loss:3.866230 +step:1139 train loss:3.855445 +step:1140 train loss:3.875316 +step:1141 train loss:3.906572 +step:1142 train loss:3.789084 +step:1143 train loss:3.862715 +step:1144 train loss:3.788237 +step:1145 train loss:3.840850 +step:1146 train loss:3.838138 +step:1147 train loss:3.925352 +step:1148 train loss:3.852650 +step:1149 train loss:3.826165 +step:1150 train loss:4.009877 +step:1151 train loss:3.860332 +step:1152 train loss:3.818974 +step:1153 train loss:3.864383 +step:1154 train loss:3.863269 +step:1155 train loss:3.987984 +step:1156 train loss:3.857834 +step:1157 train loss:3.867445 +step:1158 train loss:3.906651 +step:1159 train loss:3.835484 +step:1160 train loss:3.849010 +step:1161 train loss:3.898209 +step:1162 train loss:3.937666 +step:1163 train loss:3.848540 +step:1164 train loss:3.856070 +step:1165 train loss:3.812609 +step:1166 train loss:3.887737 +step:1167 train loss:3.851080 +step:1168 train loss:3.774460 +step:1169 train loss:3.872169 +step:1170 train loss:3.866125 +step:1171 train loss:3.818449 +step:1172 train loss:3.899934 +step:1173 train loss:3.841632 +step:1174 train loss:3.862940 +step:1175 train loss:3.846527 +step:1176 train loss:3.906068 +step:1177 train loss:3.857391 +step:1178 train loss:3.847516 +step:1179 train loss:3.784610 +step:1180 train loss:3.905292 +step:1181 train loss:3.827151 +step:1182 train loss:3.875933 +step:1183 train loss:3.852452 +step:1184 train loss:3.842098 +step:1185 train loss:3.906725 +step:1186 train loss:3.878459 +step:1187 train loss:3.847229 +step:1188 train loss:3.884455 +step:1189 train loss:3.857246 +step:1190 train loss:3.860355 +step:1191 train loss:3.928931 +step:1192 train loss:3.870911 +step:1193 train loss:3.805123 +step:1194 train loss:3.872452 +step:1195 train loss:3.894990 +step:1196 train loss:3.871295 +step:1197 train loss:3.873066 +step:1198 train loss:3.862118 +step:1199 train loss:3.782678 +step:1200 train loss:3.851806 +step:1201 train loss:3.788167 +step:1202 train loss:3.832344 +step:1203 train loss:3.825731 +step:1204 train loss:3.841819 +step:1205 train loss:3.940410 +step:1206 train loss:3.900687 +step:1207 train loss:3.774627 +step:1208 train loss:3.884779 +step:1209 train loss:3.785081 +step:1210 train loss:3.871162 +step:1211 train loss:3.816335 +step:1212 train loss:3.829420 +step:1213 train loss:3.889086 +step:1214 train loss:3.813905 +step:1215 train loss:3.848243 +step:1216 train loss:3.874153 +step:1217 train loss:3.791896 +step:1218 train loss:3.886776 +step:1219 train loss:3.818312 +step:1220 train loss:3.887451 +step:1221 train loss:3.846482 +step:1222 train loss:3.848531 +step:1223 train loss:3.892559 +step:1224 train loss:3.836656 +step:1225 train loss:3.883833 +step:1226 train loss:3.850254 +step:1227 train loss:3.795927 +step:1228 train loss:3.881364 +step:1229 train loss:3.814716 +step:1230 train loss:3.837128 +step:1231 train loss:3.849563 +step:1232 train loss:3.730610 +step:1233 train loss:3.852376 +step:1234 train loss:3.835386 +step:1235 train loss:3.904965 +step:1236 train loss:3.856426 +step:1237 train loss:3.918504 +step:1238 train loss:3.834745 +step:1239 train loss:3.816473 +step:1240 train loss:3.848902 +step:1241 train loss:3.843385 +step:1242 train loss:3.843096 +step:1243 train loss:3.766902 +step:1244 train loss:3.793348 +step:1245 train loss:3.877087 +step:1246 train loss:3.795773 +step:1247 train loss:3.876417 +step:1248 train loss:3.838211 +step:1249 train loss:3.801872 +step:1250 validation loss:3.823270 +step:1250 train loss:3.813494 +step:1251 train loss:3.852909 +step:1252 train loss:3.858719 +step:1253 train loss:3.884982 +step:1254 train loss:3.833067 +step:1255 train loss:3.945621 +step:1256 train loss:3.869231 +step:1257 train loss:3.867597 +step:1258 train loss:3.853631 +step:1259 train loss:3.800371 +step:1260 train loss:3.891402 +step:1261 train loss:3.792994 +step:1262 train loss:3.782237 +step:1263 train loss:3.806827 +step:1264 train loss:3.892014 +step:1265 train loss:3.809522 +step:1266 train loss:3.923701 +step:1267 train loss:3.854348 +step:1268 train loss:3.787283 +step:1269 train loss:3.887460 +step:1270 train loss:3.829677 +step:1271 train loss:3.826622 +step:1272 train loss:3.865945 +step:1273 train loss:3.806401 +step:1274 train loss:3.783794 +step:1275 train loss:3.898143 +step:1276 train loss:3.775372 +step:1277 train loss:3.821904 +step:1278 train loss:3.852892 +step:1279 train loss:3.773957 +step:1280 train loss:3.868208 +step:1281 train loss:3.846252 +step:1282 train loss:3.750768 +step:1283 train loss:3.848414 +step:1284 train loss:3.782093 +step:1285 train loss:3.890954 +step:1286 train loss:3.878515 +step:1287 train loss:3.799785 +step:1288 train loss:3.836740 +step:1289 train loss:3.809290 +step:1290 train loss:3.837656 +step:1291 train loss:3.795786 +step:1292 train loss:3.779567 +step:1293 train loss:3.759020 +step:1294 train loss:3.862194 +step:1295 train loss:3.816428 +step:1296 train loss:3.751056 +step:1297 train loss:3.819277 +step:1298 train loss:3.821359 +step:1299 train loss:3.841098 +step:1300 train loss:3.820600 +step:1301 train loss:3.896001 +step:1302 train loss:3.842200 +step:1303 train loss:3.826695 +step:1304 train loss:3.811094 +step:1305 train loss:3.838544 +step:1306 train loss:3.835920 +step:1307 train loss:3.783176 +step:1308 train loss:3.810643 +step:1309 train loss:3.813835 +step:1310 train loss:3.837844 +step:1311 train loss:3.792058 +step:1312 train loss:3.802759 +step:1313 train loss:3.752119 +step:1314 train loss:3.822243 +step:1315 train loss:3.831263 +step:1316 train loss:3.765141 +step:1317 train loss:3.843340 +step:1318 train loss:3.816533 +step:1319 train loss:3.826914 +step:1320 train loss:3.840441 +step:1321 train loss:3.836467 +step:1322 train loss:3.904759 +step:1323 train loss:3.856723 +step:1324 train loss:3.768946 +step:1325 train loss:3.852399 +step:1326 train loss:3.992553 +step:1327 train loss:3.904486 +step:1328 train loss:3.887841 +step:1329 train loss:3.785446 +step:1330 train loss:3.787679 +step:1331 train loss:3.835774 +step:1332 train loss:3.771542 +step:1333 train loss:3.809747 +step:1334 train loss:3.845582 +step:1335 train loss:3.883598 +step:1336 train loss:3.865602 +step:1337 train loss:3.818381 +step:1338 train loss:3.833392 +step:1339 train loss:3.898740 +step:1340 train loss:3.909738 +step:1341 train loss:3.878219 +step:1342 train loss:3.914813 +step:1343 train loss:3.825257 +step:1344 train loss:3.802496 +step:1345 train loss:3.803338 +step:1346 train loss:3.922492 +step:1347 train loss:3.806378 +step:1348 train loss:3.847543 +step:1349 train loss:3.884299 +step:1350 train loss:3.875289 +step:1351 train loss:3.848577 +step:1352 train loss:3.877836 +step:1353 train loss:3.847913 +step:1354 train loss:3.819336 +step:1355 train loss:3.763143 +step:1356 train loss:3.876394 +step:1357 train loss:3.810664 +step:1358 train loss:3.806615 +step:1359 train loss:3.852230 +step:1360 train loss:3.842119 +step:1361 train loss:3.863843 +step:1362 train loss:3.849347 +step:1363 train loss:3.803061 +step:1364 train loss:3.875193 +step:1365 train loss:3.779959 +step:1366 train loss:3.826455 +step:1367 train loss:3.965499 +step:1368 train loss:3.805120 +step:1369 train loss:3.815249 +step:1370 train loss:3.807624 +step:1371 train loss:3.838812 +step:1372 train loss:3.813629 +step:1373 train loss:3.899618 +step:1374 train loss:3.839976 +step:1375 train loss:3.786438 +step:1376 train loss:3.830238 +step:1377 train loss:3.825245 +step:1378 train loss:3.801935 +step:1379 train loss:3.856521 +step:1380 train loss:3.753574 +step:1381 train loss:3.853175 +step:1382 train loss:3.845469 +step:1383 train loss:3.851052 +step:1384 train loss:3.839952 +step:1385 train loss:3.964039 +step:1386 train loss:3.916442 +step:1387 train loss:3.809947 +step:1388 train loss:3.842154 +step:1389 train loss:3.819437 +step:1390 train loss:3.846803 +step:1391 train loss:3.877405 +step:1392 train loss:3.851409 +step:1393 train loss:3.818544 +step:1394 train loss:3.789101 +step:1395 train loss:3.838137 +step:1396 train loss:3.851985 +step:1397 train loss:3.774385 +step:1398 train loss:3.854863 +step:1399 train loss:3.853379 +step:1400 train loss:3.767064 +step:1401 train loss:3.859297 +step:1402 train loss:3.816484 +step:1403 train loss:3.823722 +step:1404 train loss:3.795485 +step:1405 train loss:3.865810 +step:1406 train loss:3.807834 +step:1407 train loss:3.788935 +step:1408 train loss:3.784156 +step:1409 train loss:3.829817 +step:1410 train loss:3.799109 +step:1411 train loss:3.799701 +step:1412 train loss:3.790440 +step:1413 train loss:3.921566 +step:1414 train loss:3.840489 +step:1415 train loss:3.798633 +step:1416 train loss:3.854392 +step:1417 train loss:3.806175 +step:1418 train loss:3.817099 +step:1419 train loss:3.799220 +step:1420 train loss:3.813644 +step:1421 train loss:3.825052 +step:1422 train loss:3.799031 +step:1423 train loss:3.816352 +step:1424 train loss:3.813059 +step:1425 train loss:3.863986 +step:1426 train loss:3.824058 +step:1427 train loss:3.778325 +step:1428 train loss:3.852108 +step:1429 train loss:3.813329 +step:1430 train loss:3.839161 +step:1431 train loss:3.760957 +step:1432 train loss:3.772952 +step:1433 train loss:3.837837 +step:1434 train loss:3.765255 +step:1435 train loss:3.890040 +step:1436 train loss:3.914016 +step:1437 train loss:3.828488 +step:1438 train loss:3.817633 +step:1439 train loss:3.797071 +step:1440 train loss:3.784985 +step:1441 train loss:3.792536 +step:1442 train loss:3.840368 +step:1443 train loss:3.797314 +step:1444 train loss:3.754101 +step:1445 train loss:3.772077 +step:1446 train loss:3.789459 +step:1447 train loss:3.834640 +step:1448 train loss:3.841872 +step:1449 train loss:3.813757 +step:1450 train loss:3.833107 +step:1451 train loss:3.774759 +step:1452 train loss:3.987238 +step:1453 train loss:3.840689 +step:1454 train loss:3.781955 +step:1455 train loss:3.802136 +step:1456 train loss:3.837679 +step:1457 train loss:3.824748 +step:1458 train loss:3.800802 +step:1459 train loss:3.821262 +step:1460 train loss:3.802672 +step:1461 train loss:3.737964 +step:1462 train loss:3.759838 +step:1463 train loss:3.783055 +step:1464 train loss:3.834833 +step:1465 train loss:3.814449 +step:1466 train loss:3.814651 +step:1467 train loss:3.823462 +step:1468 train loss:3.851335 +step:1469 train loss:3.761120 +step:1470 train loss:3.842216 +step:1471 train loss:3.779659 +step:1472 train loss:3.844201 +step:1473 train loss:3.783112 +step:1474 train loss:3.871860 +step:1475 train loss:3.813723 +step:1476 train loss:3.850163 +step:1477 train loss:3.754497 +step:1478 train loss:3.779656 +step:1479 train loss:3.805165 +step:1480 train loss:3.919661 +step:1481 train loss:3.799879 +step:1482 train loss:3.818553 +step:1483 train loss:3.779502 +step:1484 train loss:3.772661 +step:1485 train loss:3.794564 +step:1486 train loss:3.861452 +step:1487 train loss:3.832560 +step:1488 train loss:3.785865 +step:1489 train loss:3.841330 +step:1490 train loss:3.869178 +step:1491 train loss:3.771073 +step:1492 train loss:3.845670 +step:1493 train loss:3.785636 +step:1494 train loss:3.820246 +step:1495 train loss:3.848881 +step:1496 train loss:3.824553 +step:1497 train loss:3.839499 +step:1498 train loss:3.774577 +step:1499 train loss:3.772551 +step:1500 validation loss:3.746950 total_sharp:5.1788e-03 L1_sharp:6.2899e-03 L2_sharp:1.4197e-03 L3_sharp:1.3051e-03 L4_sharp:1.2110e-03 L5_sharp:1.1660e-03 L6_sharp:1.3956e-03 L7_sharp:1.7186e-03 L8_sharp:9.2202e-04 L9_sharp:5.6875e-04 L10_sharp:4.1394e-04 L11_sharp:4.3360e-04 L12_sharp:4.2684e-04 total_fnorm:2.4093e+00 total_l1_linf:2.0618e+04 total_spectral:2.4093e+00 L1_fnorm:5.9447e-01 L2_fnorm:5.2888e-01 L3_fnorm:5.2443e-01 L4_fnorm:5.4733e-01 L5_fnorm:5.7375e-01 L6_fnorm:5.8542e-01 L7_fnorm:5.9313e-01 L8_fnorm:5.9601e-01 L9_fnorm:6.0025e-01 L10_fnorm:6.0111e-01 L11_fnorm:6.0179e-01 L12_fnorm:6.0322e-01 L1_l1linf:4.2917e-01 L2_l1linf:4.1119e-01 L3_l1linf:4.0818e-01 L4_l1linf:3.9198e-01 L5_l1linf:4.0150e-01 L6_l1linf:4.1250e-01 L7_l1linf:4.1258e-01 L8_l1linf:4.1669e-01 L9_l1linf:4.1142e-01 L10_l1linf:4.1169e-01 L11_l1linf:4.0970e-01 L12_l1linf:3.9987e-01 L1_spectral:1.2048e-02 L2_spectral:1.2057e-02 L3_spectral:1.2057e-02 L4_spectral:1.2048e-02 L5_spectral:1.2050e-02 L6_spectral:1.2066e-02 L7_spectral:1.2044e-02 L8_spectral:1.2046e-02 L9_spectral:1.2044e-02 L10_spectral:1.2043e-02 L11_spectral:1.2046e-02 L12_spectral:1.2043e-02 v_norm:2.4093e+00 cos_v_-g_hvp:4.4863e-02 g_hvp_norm:5.1603e-01 cos_v_-g_t:5.1280e-02 g_t_norm:4.5420e-01 hv_norm:5.9123e-01 cos_v_hv:2.1104e-02 hg_norm:1.3838e+01 cos_g_hg:6.3673e-01 v_par:7.5168e-03 v_perp:2.4093e+00 L1_cos_v_neg_g:3.1459e-02 L1_v_norm:5.9447e-01 L2_cos_v_neg_g:3.6180e-02 L2_v_norm:5.2888e-01 L3_cos_v_neg_g:3.3325e-02 L3_v_norm:5.2443e-01 L4_cos_v_neg_g:3.5884e-02 L4_v_norm:5.4733e-01 L5_cos_v_neg_g:4.3375e-02 L5_v_norm:5.7375e-01 L6_cos_v_neg_g:4.5771e-02 L6_v_norm:5.8542e-01 L7_cos_v_neg_g:4.8320e-02 L7_v_norm:5.9313e-01 L8_cos_v_neg_g:4.6774e-02 L8_v_norm:5.9601e-01 L9_cos_v_neg_g:5.2719e-02 L9_v_norm:6.0025e-01 L10_cos_v_neg_g:5.7730e-02 L10_v_norm:6.0111e-01 L11_cos_v_neg_g:6.5855e-02 L11_v_norm:6.0179e-01 L12_cos_v_neg_g:8.5751e-02 L12_v_norm:6.0322e-01 +step:1500 train loss:3.801377 +step:1501 train loss:3.738930 +step:1502 train loss:3.785012 +step:1503 train loss:3.875830 +step:1504 train loss:3.732470 +step:1505 train loss:3.796480 +step:1506 train loss:3.798114 +step:1507 train loss:3.761775 +step:1508 train loss:3.699984 +step:1509 train loss:3.863765 +step:1510 train loss:3.848955 +step:1511 train loss:3.796262 +step:1512 train loss:3.797251 +step:1513 train loss:3.857128 +step:1514 train loss:3.833427 +step:1515 train loss:3.914265 +step:1516 train loss:3.792579 +step:1517 train loss:3.846510 +step:1518 train loss:3.846774 +step:1519 train loss:3.758246 +step:1520 train loss:3.774146 +step:1521 train loss:3.820030 +step:1522 train loss:3.708775 +step:1523 train loss:3.830549 +step:1524 train loss:3.831821 +step:1525 train loss:3.769813 +step:1526 train loss:3.866770 +step:1527 train loss:3.737870 +step:1528 train loss:3.803584 +step:1529 train loss:3.787833 +step:1530 train loss:3.822568 +step:1531 train loss:3.699583 +step:1532 train loss:3.745095 +step:1533 train loss:3.766438 +step:1534 train loss:3.765297 +step:1535 train loss:3.782655 +step:1536 train loss:3.725207 +step:1537 train loss:3.837321 +step:1538 train loss:3.813614 +step:1539 train loss:3.820094 +step:1540 train loss:3.801139 +step:1541 train loss:3.788892 +step:1542 train loss:3.777030 +step:1543 train loss:3.754170 +step:1544 train loss:3.700548 +step:1545 train loss:3.718642 +step:1546 train loss:3.756701 +step:1547 train loss:3.778188 +step:1548 train loss:3.760367 +step:1549 train loss:3.732909 +step:1550 train loss:3.782097 +step:1551 train loss:3.841229 +step:1552 train loss:3.836078 +step:1553 train loss:3.765466 +step:1554 train loss:3.766104 +step:1555 train loss:3.786898 +step:1556 train loss:3.733100 +step:1557 train loss:3.752980 +step:1558 train loss:3.771227 +step:1559 train loss:3.760427 +step:1560 train loss:3.822297 +step:1561 train loss:3.806575 +step:1562 train loss:3.800054 +step:1563 train loss:3.750434 +step:1564 train loss:3.710143 +step:1565 train loss:3.780218 +step:1566 train loss:3.818093 +step:1567 train loss:3.759758 +step:1568 train loss:3.814653 +step:1569 train loss:3.762945 +step:1570 train loss:3.743062 +step:1571 train loss:3.726346 +step:1572 train loss:3.757313 +step:1573 train loss:3.743053 +step:1574 train loss:3.770461 +step:1575 train loss:3.868920 +step:1576 train loss:3.758551 +step:1577 train loss:3.806749 +step:1578 train loss:3.741328 +step:1579 train loss:3.792000 +step:1580 train loss:3.746681 +step:1581 train loss:3.760010 +step:1582 train loss:3.719048 +step:1583 train loss:3.752078 +step:1584 train loss:3.697441 +step:1585 train loss:3.699449 +step:1586 train loss:3.710250 +step:1587 train loss:3.749869 +step:1588 train loss:3.731609 +step:1589 train loss:3.703152 +step:1590 train loss:3.748760 +step:1591 train loss:3.792173 +step:1592 train loss:3.744740 +step:1593 train loss:3.757196 +step:1594 train loss:3.748830 +step:1595 train loss:3.730754 +step:1596 train loss:3.837640 +step:1597 train loss:3.766760 +step:1598 train loss:3.779373 +step:1599 train loss:3.790866 +step:1600 train loss:3.789989 +step:1601 train loss:3.725295 +step:1602 train loss:3.786428 +step:1603 train loss:3.718426 +step:1604 train loss:3.772029 +step:1605 train loss:3.775900 +step:1606 train loss:3.692517 +step:1607 train loss:3.784550 +step:1608 train loss:3.733901 +step:1609 train loss:3.727167 +step:1610 train loss:3.754344 +step:1611 train loss:3.741179 +step:1612 train loss:3.793044 +step:1613 train loss:3.767894 +step:1614 train loss:3.733021 +step:1615 train loss:3.764502 +step:1616 train loss:3.711367 +step:1617 train loss:3.800550 +step:1618 train loss:3.792466 +step:1619 train loss:3.719709 +step:1620 train loss:3.799824 +step:1621 train loss:3.771854 +step:1622 train loss:3.730420 +step:1623 train loss:3.913291 +step:1624 train loss:3.706921 +step:1625 train loss:3.767115 +step:1626 train loss:3.772649 +step:1627 train loss:3.709792 +step:1628 train loss:3.733418 +step:1629 train loss:3.741176 +step:1630 train loss:3.724329 +step:1631 train loss:3.756995 +step:1632 train loss:3.753625 +step:1633 train loss:3.811446 +step:1634 train loss:3.776695 +step:1635 train loss:3.714796 +step:1636 train loss:3.790564 +step:1637 train loss:3.763335 +step:1638 train loss:3.636280 +step:1639 train loss:3.822235 +step:1640 train loss:3.700176 +step:1641 train loss:3.731943 +step:1642 train loss:3.797489 +step:1643 train loss:3.685579 +step:1644 train loss:3.736082 +step:1645 train loss:3.728922 +step:1646 train loss:3.787445 +step:1647 train loss:3.753732 +step:1648 train loss:3.686738 +step:1649 train loss:3.741086 +step:1650 train loss:3.771616 +step:1651 train loss:3.715218 +step:1652 train loss:3.819744 +step:1653 train loss:3.737937 +step:1654 train loss:3.668067 +step:1655 train loss:3.778256 +step:1656 train loss:3.706706 +step:1657 train loss:3.721596 +step:1658 train loss:3.775913 +step:1659 train loss:3.691626 +step:1660 train loss:3.747667 +step:1661 train loss:3.745203 +step:1662 train loss:3.655045 +step:1663 train loss:3.780698 +step:1664 train loss:3.720115 +step:1665 train loss:3.681990 +step:1666 train loss:3.852564 +step:1667 train loss:3.706661 +step:1668 train loss:3.757671 +step:1669 train loss:3.712160 +step:1670 train loss:3.715355 +step:1671 train loss:3.750009 +step:1672 train loss:3.768499 +step:1673 train loss:3.755915 +step:1674 train loss:3.766204 +step:1675 train loss:3.708886 +step:1676 train loss:3.696398 +step:1677 train loss:3.759571 +step:1678 train loss:3.720786 +step:1679 train loss:3.727852 +step:1680 train loss:3.684490 +step:1681 train loss:3.773225 +step:1682 train loss:3.665984 +step:1683 train loss:3.729713 +step:1684 train loss:3.697394 +step:1685 train loss:3.776021 +step:1686 train loss:3.789679 +step:1687 train loss:3.794186 +step:1688 train loss:3.814939 +step:1689 train loss:3.717007 +step:1690 train loss:3.722101 +step:1691 train loss:3.773560 +step:1692 train loss:3.723371 +step:1693 train loss:3.727053 +step:1694 train loss:3.793268 +step:1695 train loss:3.744402 +step:1696 train loss:3.737626 +step:1697 train loss:3.705781 +step:1698 train loss:3.746488 +step:1699 train loss:3.725843 +step:1700 train loss:3.808468 +step:1701 train loss:3.715279 +step:1702 train loss:3.693674 +step:1703 train loss:3.743808 +step:1704 train loss:3.717436 +step:1705 train loss:3.729945 +step:1706 train loss:3.802785 +step:1707 train loss:3.701683 +step:1708 train loss:3.693980 +step:1709 train loss:3.739042 +step:1710 train loss:3.777668 +step:1711 train loss:3.721620 +step:1712 train loss:3.747728 +step:1713 train loss:3.741876 +step:1714 train loss:3.725954 +step:1715 train loss:3.745219 +step:1716 train loss:3.751251 +step:1717 train loss:3.791641 +step:1718 train loss:3.737384 +step:1719 train loss:3.710637 +step:1720 train loss:3.815144 +step:1721 train loss:3.715067 +step:1722 train loss:3.720037 +step:1723 train loss:3.750591 +step:1724 train loss:3.726445 +step:1725 train loss:3.805088 +step:1726 train loss:3.722236 +step:1727 train loss:3.760763 +step:1728 train loss:3.793739 +step:1729 train loss:3.940164 +step:1730 train loss:3.772889 +step:1731 train loss:3.771207 +step:1732 train loss:3.741005 +step:1733 train loss:3.731781 +step:1734 train loss:3.724715 +step:1735 train loss:3.734086 +step:1736 train loss:3.781809 +step:1737 train loss:3.744848 +step:1738 train loss:3.689341 +step:1739 train loss:3.739772 +step:1740 train loss:3.719104 +step:1741 train loss:3.743338 +step:1742 train loss:3.731935 +step:1743 train loss:3.765565 +step:1744 train loss:3.731429 +step:1745 train loss:3.793867 +step:1746 train loss:3.783014 +step:1747 train loss:3.712699 +step:1748 train loss:3.701664 +step:1749 train loss:3.780169 +step:1750 validation loss:3.695146 +step:1750 train loss:3.791107 +step:1751 train loss:3.686938 +step:1752 train loss:3.725694 +step:1753 train loss:3.791415 +step:1754 train loss:3.743453 +step:1755 train loss:3.705350 +step:1756 train loss:3.721982 +step:1757 train loss:3.783819 +step:1758 train loss:3.744164 +step:1759 train loss:3.734637 +step:1760 train loss:3.811100 +step:1761 train loss:3.774087 +step:1762 train loss:3.745919 +step:1763 train loss:3.769334 +step:1764 train loss:3.773540 +step:1765 train loss:3.733784 +step:1766 train loss:3.733154 +step:1767 train loss:3.744217 +step:1768 train loss:3.656818 +step:1769 train loss:3.736092 +step:1770 train loss:3.718298 +step:1771 train loss:3.747729 +step:1772 train loss:3.788399 +step:1773 train loss:3.738399 +step:1774 train loss:3.789401 +step:1775 train loss:3.768880 +step:1776 train loss:3.785976 +step:1777 train loss:3.868155 +step:1778 train loss:3.722930 +step:1779 train loss:3.876852 +step:1780 train loss:3.790567 +step:1781 train loss:3.776144 +step:1782 train loss:3.835870 +step:1783 train loss:3.790890 +step:1784 train loss:3.750895 +step:1785 train loss:3.790369 +step:1786 train loss:3.748664 +step:1787 train loss:3.759188 +step:1788 train loss:3.764307 +step:1789 train loss:3.725159 +step:1790 train loss:3.750315 +step:1791 train loss:3.765327 +step:1792 train loss:3.741743 +step:1793 train loss:3.750780 +step:1794 train loss:3.773602 +step:1795 train loss:3.792160 +step:1796 train loss:3.717022 +step:1797 train loss:3.792752 +step:1798 train loss:3.751428 +step:1799 train loss:3.783593 +step:1800 train loss:3.753859 +step:1801 train loss:3.662672 +step:1802 train loss:3.757495 +step:1803 train loss:3.766748 +step:1804 train loss:3.757509 +step:1805 train loss:3.733736 +step:1806 train loss:3.717496 +step:1807 train loss:3.702578 +step:1808 train loss:3.768551 +step:1809 train loss:3.755523 +step:1810 train loss:3.765310 +step:1811 train loss:3.714062 +step:1812 train loss:3.717757 +step:1813 train loss:3.688633 +step:1814 train loss:3.742294 +step:1815 train loss:3.744782 +step:1816 train loss:3.710604 +step:1817 train loss:3.784669 +step:1818 train loss:3.782428 +step:1819 train loss:3.719880 +step:1820 train loss:3.706935 +step:1821 train loss:3.658062 +step:1822 train loss:3.753082 +step:1823 train loss:3.722903 +step:1824 train loss:3.757241 +step:1825 train loss:3.808871 +step:1826 train loss:3.706924 +step:1827 train loss:3.698500 +step:1828 train loss:3.742675 +step:1829 train loss:3.728727 +step:1830 train loss:3.800039 +step:1831 train loss:3.715590 +step:1832 train loss:3.732440 +step:1833 train loss:3.813188 +step:1834 train loss:3.756338 +step:1835 train loss:3.758934 +step:1836 train loss:3.713089 +step:1837 train loss:3.714119 +step:1838 train loss:3.744020 +step:1839 train loss:3.732661 +step:1840 train loss:3.745545 +step:1841 train loss:3.716247 +step:1842 train loss:3.696594 +step:1843 train loss:3.756111 +step:1844 train loss:3.752537 +step:1845 train loss:3.746042 +step:1846 train loss:3.868347 +step:1847 train loss:3.682893 +step:1848 train loss:3.834395 +step:1849 train loss:3.751469 +step:1850 train loss:3.658275 +step:1851 train loss:3.900100 +step:1852 train loss:3.759434 +step:1853 train loss:3.747972 +step:1854 train loss:3.737190 +step:1855 train loss:3.737120 +step:1856 train loss:3.898983 +step:1857 train loss:3.711968 +step:1858 train loss:3.798143 +step:1859 train loss:3.739150 +step:1860 train loss:3.732928 +step:1861 train loss:3.784794 +step:1862 train loss:3.721218 +step:1863 train loss:3.717796 +step:1864 train loss:3.761623 +step:1865 train loss:3.653660 +step:1866 train loss:3.754337 +step:1867 train loss:3.726375 +step:1868 train loss:3.804401 +step:1869 train loss:3.717289 +step:1870 train loss:3.685276 +step:1871 train loss:3.723762 +step:1872 train loss:3.692986 +step:1873 train loss:3.697836 +step:1874 train loss:3.818587 +step:1875 train loss:3.680043 +step:1876 train loss:3.801237 +step:1877 train loss:3.751307 +step:1878 train loss:3.802979 +step:1879 train loss:3.761564 +step:1880 train loss:3.696405 +step:1881 train loss:3.791359 +step:1882 train loss:3.698827 +step:1883 train loss:3.780112 +step:1884 train loss:3.757237 +step:1885 train loss:3.779091 +step:1886 train loss:3.794037 +step:1887 train loss:3.755629 +step:1888 train loss:3.733176 +step:1889 train loss:3.916007 +step:1890 train loss:3.687026 +step:1891 train loss:3.719869 +step:1892 train loss:3.772435 +step:1893 train loss:3.782691 +step:1894 train loss:3.740072 +step:1895 train loss:3.797849 +step:1896 train loss:3.773233 +step:1897 train loss:3.756047 +step:1898 train loss:3.730113 +step:1899 train loss:3.714407 +step:1900 train loss:3.725069 +step:1901 train loss:3.808951 +step:1902 train loss:3.711129 +step:1903 train loss:3.698866 +step:1904 train loss:3.730952 +step:1905 train loss:3.821277 +step:1906 train loss:3.798031 +step:1907 train loss:3.773449 +step:1908 train loss:3.695161 +step:1909 train loss:3.719745 +step:1910 train loss:3.778528 +step:1911 train loss:3.684098 +step:1912 train loss:3.719375 +step:1913 train loss:3.655391 +step:1914 train loss:3.728192 +step:1915 train loss:3.673563 +step:1916 train loss:3.643849 +step:1917 train loss:3.781817 +step:1918 train loss:3.803554 +step:1919 train loss:3.722569 +step:1920 train loss:3.681017 +step:1921 train loss:3.748320 +step:1922 train loss:3.709298 +step:1923 train loss:3.729243 +step:1924 train loss:3.670151 +step:1925 train loss:3.743641 +step:1926 train loss:3.702057 +step:1927 train loss:3.750996 +step:1928 train loss:3.730415 +step:1929 train loss:3.734906 +step:1930 train loss:3.722657 +step:1931 train loss:3.676844 +step:1932 train loss:3.730105 +step:1933 train loss:3.715130 +step:1934 train loss:3.807134 +step:1935 train loss:3.710531 +step:1936 train loss:3.718399 +step:1937 train loss:3.711563 +step:1938 train loss:3.731520 +step:1939 train loss:3.705723 +step:1940 train loss:3.679275 +step:1941 train loss:3.723570 +step:1942 train loss:3.768325 +step:1943 train loss:3.710322 +step:1944 train loss:3.764452 +step:1945 train loss:3.928021 +step:1946 train loss:3.724898 +step:1947 train loss:3.666437 +step:1948 train loss:3.707726 +step:1949 train loss:3.678621 +step:1950 train loss:3.676124 +step:1951 train loss:3.626611 +step:1952 train loss:3.714884 +step:1953 train loss:3.734551 +step:1954 train loss:3.703250 +step:1955 train loss:3.748287 +step:1956 train loss:3.701961 +step:1957 train loss:3.725637 +step:1958 train loss:3.739423 +step:1959 train loss:3.728003 +step:1960 train loss:3.717933 +step:1961 train loss:3.847585 +step:1962 train loss:3.740162 +step:1963 train loss:3.707777 +step:1964 train loss:3.740032 +step:1965 train loss:3.725900 +step:1966 train loss:3.746753 +step:1967 train loss:3.793776 +step:1968 train loss:3.671676 +step:1969 train loss:3.864546 +step:1970 train loss:3.691340 +step:1971 train loss:3.747134 +step:1972 train loss:3.709083 +step:1973 train loss:3.656608 +step:1974 train loss:3.692117 +step:1975 train loss:3.675068 +step:1976 train loss:3.704038 +step:1977 train loss:3.740490 +step:1978 train loss:3.743981 +step:1979 train loss:3.715512 +step:1980 train loss:3.737320 +step:1981 train loss:3.704699 +step:1982 train loss:3.733079 +step:1983 train loss:3.707892 +step:1984 train loss:3.730565 +step:1985 train loss:3.694627 +step:1986 train loss:3.723381 +step:1987 train loss:3.694415 +step:1988 train loss:3.722678 +step:1989 train loss:3.793240 +step:1990 train loss:3.709369 +step:1991 train loss:3.675019 +step:1992 train loss:3.766998 +step:1993 train loss:3.684398 +step:1994 train loss:3.719703 +step:1995 train loss:3.809812 +step:1996 train loss:3.723482 +step:1997 train loss:3.734150 +step:1998 train loss:3.745901 +step:1999 train loss:3.700160 +step:2000 validation loss:3.657363 total_sharp:8.6804e-03 L1_sharp:9.1697e-03 L2_sharp:2.4755e-03 L3_sharp:2.2205e-03 L4_sharp:2.1193e-03 L5_sharp:1.6244e-03 L6_sharp:1.9332e-03 L7_sharp:1.8213e-03 L8_sharp:1.7988e-03 L9_sharp:9.1633e-04 L10_sharp:5.9893e-04 L11_sharp:6.0997e-04 L12_sharp:7.7507e-04 total_fnorm:2.4009e+00 total_l1_linf:2.0543e+04 total_spectral:2.4009e+00 L1_fnorm:5.8770e-01 L2_fnorm:5.0535e-01 L3_fnorm:5.1248e-01 L4_fnorm:5.5147e-01 L5_fnorm:5.7756e-01 L6_fnorm:5.8779e-01 L7_fnorm:5.9739e-01 L8_fnorm:5.9454e-01 L9_fnorm:5.9825e-01 L10_fnorm:5.9680e-01 L11_fnorm:5.9869e-01 L12_fnorm:6.0267e-01 L1_l1linf:4.3969e-01 L2_l1linf:4.0834e-01 L3_l1linf:3.9899e-01 L4_l1linf:3.9490e-01 L5_l1linf:4.0594e-01 L6_l1linf:4.1145e-01 L7_l1linf:4.1441e-01 L8_l1linf:4.1105e-01 L9_l1linf:4.2086e-01 L10_l1linf:4.1188e-01 L11_l1linf:4.0844e-01 L12_l1linf:3.9720e-01 L1_spectral:1.2056e-02 L2_spectral:1.2054e-02 L3_spectral:1.2382e-02 L4_spectral:1.2048e-02 L5_spectral:1.2049e-02 L6_spectral:1.2059e-02 L7_spectral:1.2064e-02 L8_spectral:1.2047e-02 L9_spectral:1.2041e-02 L10_spectral:1.2039e-02 L11_spectral:1.2048e-02 L12_spectral:1.2045e-02 v_norm:2.4009e+00 cos_v_-g_hvp:3.9945e-02 g_hvp_norm:6.6242e-01 cos_v_-g_t:4.5451e-02 g_t_norm:5.8455e-01 hv_norm:9.3760e-01 cos_v_hv:2.2227e-02 hg_norm:5.9821e+01 cos_g_hg:4.4774e-01 v_par:5.9702e-03 v_perp:2.4009e+00 L1_cos_v_neg_g:2.6951e-02 L1_v_norm:5.8770e-01 L2_cos_v_neg_g:3.3472e-02 L2_v_norm:5.0535e-01 L3_cos_v_neg_g:3.7427e-02 L3_v_norm:5.1248e-01 L4_cos_v_neg_g:4.1308e-02 L4_v_norm:5.5147e-01 L5_cos_v_neg_g:4.5585e-02 L5_v_norm:5.7756e-01 L6_cos_v_neg_g:4.4855e-02 L6_v_norm:5.8779e-01 L7_cos_v_neg_g:4.6938e-02 L7_v_norm:5.9739e-01 L8_cos_v_neg_g:4.6622e-02 L8_v_norm:5.9454e-01 L9_cos_v_neg_g:4.9490e-02 L9_v_norm:5.9825e-01 L10_cos_v_neg_g:5.2061e-02 L10_v_norm:5.9680e-01 L11_cos_v_neg_g:6.2956e-02 L11_v_norm:5.9869e-01 L12_cos_v_neg_g:8.0592e-02 L12_v_norm:6.0267e-01 +step:2000 train loss:3.712387 +step:2001 train loss:3.683746 +step:2002 train loss:3.718647 +step:2003 train loss:3.755657 +step:2004 train loss:3.671530 +step:2005 train loss:3.740865 +step:2006 train loss:3.742655 +step:2007 train loss:3.620769 +step:2008 train loss:3.684218 +step:2009 train loss:3.717956 +step:2010 train loss:3.737804 +step:2011 train loss:3.678580 +step:2012 train loss:3.735212 +step:2013 train loss:3.681237 +step:2014 train loss:3.769140 +step:2015 train loss:3.762102 +step:2016 train loss:3.739594 +step:2017 train loss:3.720202 +step:2018 train loss:3.761450 +step:2019 train loss:3.694650 +step:2020 train loss:3.725699 +step:2021 train loss:3.749140 +step:2022 train loss:3.807092 +step:2023 train loss:3.721821 +step:2024 train loss:3.758967 +step:2025 train loss:3.731914 +step:2026 train loss:3.633876 +step:2027 train loss:3.623286 +step:2028 train loss:3.722398 +step:2029 train loss:3.677577 +step:2030 train loss:3.743795 +step:2031 train loss:3.781255 +step:2032 train loss:3.685851 +step:2033 train loss:3.719761 +step:2034 train loss:3.723943 +step:2035 train loss:3.725221 +step:2036 train loss:3.704606 +step:2037 train loss:3.724202 +step:2038 train loss:3.726593 +step:2039 train loss:3.751299 +step:2040 train loss:3.625129 +step:2041 train loss:3.627824 +step:2042 train loss:3.694111 +step:2043 train loss:3.703923 +step:2044 train loss:3.769024 +step:2045 train loss:3.703357 +step:2046 train loss:3.730657 +step:2047 train loss:3.669230 +step:2048 train loss:3.710063 +step:2049 train loss:3.713167 +step:2050 train loss:3.722255 +step:2051 train loss:3.746866 +step:2052 train loss:3.714028 +step:2053 train loss:3.826209 +step:2054 train loss:3.723179 +step:2055 train loss:3.733650 +step:2056 train loss:3.708344 +step:2057 train loss:3.727217 +step:2058 train loss:3.733507 +step:2059 train loss:3.693838 +step:2060 train loss:3.663811 +step:2061 train loss:3.702959 +step:2062 train loss:3.695203 +step:2063 train loss:3.759504 +step:2064 train loss:3.714343 +step:2065 train loss:3.752632 +step:2066 train loss:3.712675 +step:2067 train loss:3.837272 +step:2068 train loss:3.685273 +step:2069 train loss:3.635464 +step:2070 train loss:3.665812 +step:2071 train loss:3.667345 +step:2072 train loss:3.704075 +step:2073 train loss:3.750190 +step:2074 train loss:3.661268 +step:2075 train loss:3.723593 +step:2076 train loss:3.702747 +step:2077 train loss:3.675171 +step:2078 train loss:3.714983 +step:2079 train loss:3.726572 +step:2080 train loss:3.674978 +step:2081 train loss:3.728976 +step:2082 train loss:3.709747 +step:2083 train loss:3.739865 +step:2084 train loss:3.690063 +step:2085 train loss:3.689851 +step:2086 train loss:3.746018 +step:2087 train loss:3.740836 +step:2088 train loss:3.739992 +step:2089 train loss:3.753791 +step:2090 train loss:3.688966 +step:2091 train loss:3.718397 +step:2092 train loss:3.723500 +step:2093 train loss:3.678687 +step:2094 train loss:3.767221 +step:2095 train loss:3.646704 +step:2096 train loss:3.755760 +step:2097 train loss:3.689225 +step:2098 train loss:3.672062 +step:2099 train loss:3.719790 +step:2100 train loss:3.712615 +step:2101 train loss:3.739007 +step:2102 train loss:3.710147 +step:2103 train loss:3.720759 +step:2104 train loss:3.720898 +step:2105 train loss:3.692481 +step:2106 train loss:3.748688 +step:2107 train loss:3.663516 +step:2108 train loss:3.688185 +step:2109 train loss:3.678757 +step:2110 train loss:3.700908 +step:2111 train loss:3.787450 +step:2112 train loss:3.658488 +step:2113 train loss:3.664121 +step:2114 train loss:3.716988 +step:2115 train loss:3.686627 +step:2116 train loss:3.726948 +step:2117 train loss:3.663383 +step:2118 train loss:3.657582 +step:2119 train loss:3.661637 +step:2120 train loss:3.662996 +step:2121 train loss:3.689129 +step:2122 train loss:3.675779 +step:2123 train loss:3.730432 +step:2124 train loss:3.735989 +step:2125 train loss:3.691315 +step:2126 train loss:3.706174 +step:2127 train loss:3.711031 +step:2128 train loss:3.694535 +step:2129 train loss:3.758990 +step:2130 train loss:3.746202 +step:2131 train loss:3.642544 +step:2132 train loss:3.674532 +step:2133 train loss:3.676217 +step:2134 train loss:3.733941 +step:2135 train loss:3.756520 +step:2136 train loss:3.872008 +step:2137 train loss:3.700929 +step:2138 train loss:3.665745 +step:2139 train loss:3.725325 +step:2140 train loss:3.658702 +step:2141 train loss:3.684900 +step:2142 train loss:3.649670 +step:2143 train loss:3.679412 +step:2144 train loss:3.756778 +step:2145 train loss:3.706655 +step:2146 train loss:3.642493 +step:2147 train loss:3.670927 +step:2148 train loss:3.716280 +step:2149 train loss:3.667791 +step:2150 train loss:3.737145 +step:2151 train loss:3.727799 +step:2152 train loss:3.670300 +step:2153 train loss:3.679940 +step:2154 train loss:3.647397 +step:2155 train loss:3.656096 +step:2156 train loss:3.795582 +step:2157 train loss:3.692764 +step:2158 train loss:3.713480 +step:2159 train loss:3.824607 +step:2160 train loss:3.712257 +step:2161 train loss:3.709640 +step:2162 train loss:3.763515 +step:2163 train loss:3.743830 +step:2164 train loss:3.768094 +step:2165 train loss:3.696404 +step:2166 train loss:3.669921 +step:2167 train loss:3.692847 +step:2168 train loss:3.678780 +step:2169 train loss:3.704104 +step:2170 train loss:3.735871 +step:2171 train loss:3.716577 +step:2172 train loss:3.740129 +step:2173 train loss:3.655180 +step:2174 train loss:3.714876 +step:2175 train loss:3.693615 +step:2176 train loss:3.650416 +step:2177 train loss:3.751898 +step:2178 train loss:3.673374 +step:2179 train loss:3.710598 +step:2180 train loss:3.704377 +step:2181 train loss:3.656993 +step:2182 train loss:3.738296 +step:2183 train loss:3.737131 +step:2184 train loss:3.737948 +step:2185 train loss:3.723205 +step:2186 train loss:3.667639 +step:2187 train loss:3.669444 +step:2188 train loss:3.727106 +step:2189 train loss:3.636597 +step:2190 train loss:3.681892 +step:2191 train loss:3.714883 +step:2192 train loss:3.778044 +step:2193 train loss:3.705728 +step:2194 train loss:3.701794 +step:2195 train loss:3.710807 +step:2196 train loss:3.687008 +step:2197 train loss:3.671454 +step:2198 train loss:3.690727 +step:2199 train loss:3.683402 +step:2200 train loss:3.718102 +step:2201 train loss:3.772151 +step:2202 train loss:3.713410 +step:2203 train loss:3.687897 +step:2204 train loss:3.716365 +step:2205 train loss:3.698214 +step:2206 train loss:3.729962 +step:2207 train loss:3.716666 +step:2208 train loss:3.663605 +step:2209 train loss:3.849877 +step:2210 train loss:3.684385 +step:2211 train loss:3.659402 +step:2212 train loss:3.763222 +step:2213 train loss:3.805569 +step:2214 train loss:3.730128 +step:2215 train loss:3.652188 +step:2216 train loss:3.674358 +step:2217 train loss:3.721282 +step:2218 train loss:3.664212 +step:2219 train loss:3.660008 +step:2220 train loss:3.707814 +step:2221 train loss:3.685085 +step:2222 train loss:3.732097 +step:2223 train loss:3.707183 +step:2224 train loss:3.681427 +step:2225 train loss:3.774451 +step:2226 train loss:3.717040 +step:2227 train loss:3.722530 +step:2228 train loss:3.748725 +step:2229 train loss:3.677607 +step:2230 train loss:3.624418 +step:2231 train loss:3.755554 +step:2232 train loss:3.738995 +step:2233 train loss:3.821200 +step:2234 train loss:3.676582 +step:2235 train loss:3.632645 +step:2236 train loss:3.539271 +step:2237 train loss:3.660066 +step:2238 train loss:3.695543 +step:2239 train loss:3.706547 +step:2240 train loss:3.664409 +step:2241 train loss:3.760506 +step:2242 train loss:3.681937 +step:2243 train loss:3.726968 +step:2244 train loss:3.693327 +step:2245 train loss:3.694201 +step:2246 train loss:3.693928 +step:2247 train loss:3.668461 +step:2248 train loss:3.645936 +step:2249 train loss:3.691702 +step:2250 validation loss:3.629376 +step:2250 train loss:3.679664 +step:2251 train loss:3.694929 +step:2252 train loss:3.733463 +step:2253 train loss:3.653408 +step:2254 train loss:3.735461 +step:2255 train loss:3.654561 +step:2256 train loss:3.719547 +step:2257 train loss:3.641533 +step:2258 train loss:3.713300 +step:2259 train loss:3.691482 +step:2260 train loss:3.683625 +step:2261 train loss:3.712940 +step:2262 train loss:3.673395 +step:2263 train loss:3.655690 +step:2264 train loss:3.728078 +step:2265 train loss:3.679341 +step:2266 train loss:3.679585 +step:2267 train loss:3.681253 +step:2268 train loss:3.677171 +step:2269 train loss:3.700294 +step:2270 train loss:3.700317 +step:2271 train loss:3.680573 +step:2272 train loss:3.662345 +step:2273 train loss:3.727422 +step:2274 train loss:3.684193 +step:2275 train loss:3.732633 +step:2276 train loss:3.676192 +step:2277 train loss:3.691676 +step:2278 train loss:3.705985 +step:2279 train loss:3.678665 +step:2280 train loss:3.635178 +step:2281 train loss:3.705018 +step:2282 train loss:3.611461 +step:2283 train loss:3.707105 +step:2284 train loss:3.631065 +step:2285 train loss:3.715977 +step:2286 train loss:3.683787 +step:2287 train loss:3.710971 +step:2288 train loss:3.667447 +step:2289 train loss:3.857152 +step:2290 train loss:4.016744 +step:2291 train loss:3.685476 +step:2292 train loss:3.695192 +step:2293 train loss:3.711479 +step:2294 train loss:3.652764 +step:2295 train loss:3.680887 +step:2296 train loss:3.652351 +step:2297 train loss:3.613384 +step:2298 train loss:3.710341 +step:2299 train loss:3.622171 +step:2300 train loss:3.623646 +step:2301 train loss:3.653417 +step:2302 train loss:3.720039 +step:2303 train loss:3.685858 +step:2304 train loss:3.690622 +step:2305 train loss:3.647851 +step:2306 train loss:3.665975 +step:2307 train loss:3.630337 +step:2308 train loss:3.648216 +step:2309 train loss:3.694109 +step:2310 train loss:3.717333 +step:2311 train loss:3.659964 +step:2312 train loss:3.641789 +step:2313 train loss:3.737465 +step:2314 train loss:3.652991 +step:2315 train loss:3.610880 +step:2316 train loss:3.607560 +step:2317 train loss:3.625713 +step:2318 train loss:3.734202 +step:2319 train loss:3.664006 +step:2320 train loss:3.647804 +step:2321 train loss:3.689423 +step:2322 train loss:3.712046 +step:2323 train loss:3.657529 +step:2324 train loss:3.707901 +step:2325 train loss:3.646227 +step:2326 train loss:3.656493 +step:2327 train loss:3.654802 +step:2328 train loss:3.655883 +step:2329 train loss:3.704531 +step:2330 train loss:3.689506 +step:2331 train loss:3.704664 +step:2332 train loss:3.631643 +step:2333 train loss:3.703811 +step:2334 train loss:3.872653 +step:2335 train loss:3.744986 +step:2336 train loss:3.644735 +step:2337 train loss:3.694031 +step:2338 train loss:3.645964 +step:2339 train loss:3.635073 +step:2340 train loss:3.628509 +step:2341 train loss:3.668569 +step:2342 train loss:3.622515 +step:2343 train loss:3.684615 +step:2344 train loss:3.690453 +step:2345 train loss:3.650217 +step:2346 train loss:3.607337 +step:2347 train loss:3.628920 +step:2348 train loss:3.677814 +step:2349 train loss:3.708183 +step:2350 train loss:3.659292 +step:2351 train loss:3.680155 +step:2352 train loss:3.732330 +step:2353 train loss:3.655660 +step:2354 train loss:3.633976 +step:2355 train loss:3.678625 +step:2356 train loss:3.661017 +step:2357 train loss:3.667459 +step:2358 train loss:3.655575 +step:2359 train loss:3.653733 +step:2360 train loss:3.631009 +step:2361 train loss:3.638187 +step:2362 train loss:3.608483 +step:2363 train loss:3.623906 +step:2364 train loss:3.702610 +step:2365 train loss:3.652653 +step:2366 train loss:3.722235 +step:2367 train loss:3.641879 +step:2368 train loss:3.642054 +step:2369 train loss:3.654791 +step:2370 train loss:3.687539 +step:2371 train loss:3.673607 +step:2372 train loss:3.689342 +step:2373 train loss:3.662717 +step:2374 train loss:3.649411 +step:2375 train loss:3.658634 +step:2376 train loss:3.652707 +step:2377 train loss:3.646709 +step:2378 train loss:3.611830 +step:2379 train loss:3.636108 +step:2380 train loss:3.647364 +step:2381 train loss:3.638600 +step:2382 train loss:3.644951 +step:2383 train loss:3.687755 +step:2384 train loss:3.588193 +step:2385 train loss:3.672359 +step:2386 train loss:3.675534 +step:2387 train loss:3.669647 +step:2388 train loss:3.648016 +step:2389 train loss:3.644908 +step:2390 train loss:3.676203 +step:2391 train loss:3.664755 +step:2392 train loss:3.657412 +step:2393 train loss:3.598283 +step:2394 train loss:3.702393 +step:2395 train loss:3.677894 +step:2396 train loss:3.672220 +step:2397 train loss:3.646366 +step:2398 train loss:3.645421 +step:2399 train loss:3.586734 +step:2400 train loss:3.605795 +step:2401 train loss:3.604173 +step:2402 train loss:3.615781 +step:2403 train loss:3.652978 +step:2404 train loss:3.660123 +step:2405 train loss:3.612313 +step:2406 train loss:3.612371 +step:2407 train loss:3.644120 +step:2408 train loss:3.719818 +step:2409 train loss:3.683880 +step:2410 train loss:3.709319 +step:2411 train loss:3.651545 +step:2412 train loss:3.662302 +step:2413 train loss:3.698602 +step:2414 train loss:3.584339 +step:2415 train loss:3.686720 +step:2416 train loss:3.673407 +step:2417 train loss:3.681260 +step:2418 train loss:3.713666 +step:2419 train loss:3.709240 +step:2420 train loss:3.578463 +step:2421 train loss:3.651985 +step:2422 train loss:3.728374 +step:2423 train loss:3.680633 +step:2424 train loss:3.664325 +step:2425 train loss:3.624106 +step:2426 train loss:3.641410 +step:2427 train loss:3.640069 +step:2428 train loss:3.626463 +step:2429 train loss:3.712046 +step:2430 train loss:3.640187 +step:2431 train loss:3.677592 +step:2432 train loss:3.623531 +step:2433 train loss:3.610528 +step:2434 train loss:3.645766 +step:2435 train loss:3.644488 +step:2436 train loss:3.708973 +step:2437 train loss:3.659793 +step:2438 train loss:3.688048 +step:2439 train loss:3.703998 +step:2440 train loss:3.643095 +step:2441 train loss:3.619995 +step:2442 train loss:3.647784 +step:2443 train loss:3.608581 +step:2444 train loss:3.597201 +step:2445 train loss:3.697270 +step:2446 train loss:3.690388 +step:2447 train loss:3.681730 +step:2448 train loss:3.696736 +step:2449 train loss:3.668977 +step:2450 train loss:3.601318 +step:2451 train loss:3.764810 +step:2452 train loss:3.655078 +step:2453 train loss:3.640497 +step:2454 train loss:3.618160 +step:2455 train loss:3.693069 +step:2456 train loss:3.644468 +step:2457 train loss:3.676709 +step:2458 train loss:3.719737 +step:2459 train loss:3.566020 +step:2460 train loss:3.628553 +step:2461 train loss:3.620270 +step:2462 train loss:3.631468 +step:2463 train loss:3.697510 +step:2464 train loss:3.645852 +step:2465 train loss:3.668610 +step:2466 train loss:3.657183 +step:2467 train loss:3.617749 +step:2468 train loss:3.630460 +step:2469 train loss:3.695856 +step:2470 train loss:3.647044 +step:2471 train loss:3.664808 +step:2472 train loss:3.623236 +step:2473 train loss:3.671909 +step:2474 train loss:3.648576 +step:2475 train loss:3.657661 +step:2476 train loss:3.631972 +step:2477 train loss:3.643299 +step:2478 train loss:3.628023 +step:2479 train loss:3.705724 +step:2480 train loss:3.650916 +step:2481 train loss:3.630926 +step:2482 train loss:3.614748 +step:2483 train loss:3.628311 +step:2484 train loss:3.683518 +step:2485 train loss:3.674182 +step:2486 train loss:3.634790 +step:2487 train loss:3.705207 +step:2488 train loss:3.650155 +step:2489 train loss:3.657196 +step:2490 train loss:3.591441 +step:2491 train loss:3.677648 +step:2492 train loss:3.666847 +step:2493 train loss:3.647481 +step:2494 train loss:3.740691 +step:2495 train loss:3.605719 +step:2496 train loss:3.617162 +step:2497 train loss:3.648173 +step:2498 train loss:3.632392 +step:2499 train loss:3.723318 +step:2500 validation loss:3.602940 total_sharp:6.4297e-03 L1_sharp:6.4801e-03 L2_sharp:8.7190e-04 L3_sharp:1.6933e-03 L4_sharp:1.2989e-03 L5_sharp:1.1555e-03 L6_sharp:1.4586e-03 L7_sharp:1.6905e-03 L8_sharp:1.5703e-03 L9_sharp:8.7612e-04 L10_sharp:5.7771e-04 L11_sharp:5.6673e-04 L12_sharp:4.6357e-04 total_fnorm:2.3935e+00 total_l1_linf:2.0476e+04 total_spectral:2.3935e+00 L1_fnorm:5.8719e-01 L2_fnorm:5.0244e-01 L3_fnorm:4.8048e-01 L4_fnorm:5.5107e-01 L5_fnorm:5.8027e-01 L6_fnorm:5.9081e-01 L7_fnorm:5.9609e-01 L8_fnorm:5.9523e-01 L9_fnorm:5.9747e-01 L10_fnorm:5.9704e-01 L11_fnorm:5.9582e-01 L12_fnorm:6.0313e-01 L1_l1linf:4.4595e-01 L2_l1linf:4.1336e-01 L3_l1linf:3.9695e-01 L4_l1linf:3.9785e-01 L5_l1linf:4.1291e-01 L6_l1linf:4.1211e-01 L7_l1linf:4.1260e-01 L8_l1linf:4.1563e-01 L9_l1linf:4.1183e-01 L10_l1linf:4.1348e-01 L11_l1linf:4.0729e-01 L12_l1linf:3.9591e-01 L1_spectral:1.2052e-02 L2_spectral:1.2062e-02 L3_spectral:1.3147e-02 L4_spectral:1.2044e-02 L5_spectral:1.2045e-02 L6_spectral:1.2048e-02 L7_spectral:1.2062e-02 L8_spectral:1.2063e-02 L9_spectral:1.2046e-02 L10_spectral:1.2045e-02 L11_spectral:1.2044e-02 L12_spectral:1.2046e-02 v_norm:2.3935e+00 cos_v_-g_hvp:3.9688e-02 g_hvp_norm:6.3247e-01 cos_v_-g_t:4.4568e-02 g_t_norm:5.6846e-01 hv_norm:7.6853e-01 cos_v_hv:2.0024e-02 hg_norm:1.8345e+02 cos_g_hg:2.5047e-03 v_par:6.0835e-03 v_perp:2.3934e+00 L1_cos_v_neg_g:2.9519e-02 L1_v_norm:5.8719e-01 L2_cos_v_neg_g:2.9169e-02 L2_v_norm:5.0244e-01 L3_cos_v_neg_g:3.2290e-02 L3_v_norm:4.8048e-01 L4_cos_v_neg_g:3.5814e-02 L4_v_norm:5.5107e-01 L5_cos_v_neg_g:4.1952e-02 L5_v_norm:5.8027e-01 L6_cos_v_neg_g:4.1733e-02 L6_v_norm:5.9081e-01 L7_cos_v_neg_g:4.1583e-02 L7_v_norm:5.9609e-01 L8_cos_v_neg_g:3.9766e-02 L8_v_norm:5.9523e-01 L9_cos_v_neg_g:4.3006e-02 L9_v_norm:5.9747e-01 L10_cos_v_neg_g:4.5248e-02 L10_v_norm:5.9704e-01 L11_cos_v_neg_g:5.5842e-02 L11_v_norm:5.9582e-01 L12_cos_v_neg_g:9.0655e-02 L12_v_norm:6.0313e-01 +step:2500 train loss:3.638523 +step:2501 train loss:3.674690 +step:2502 train loss:3.694574 +step:2503 train loss:3.629875 +step:2504 train loss:3.638120 +step:2505 train loss:3.730935 +step:2506 train loss:3.698273 +step:2507 train loss:3.604421 +step:2508 train loss:3.659787 +step:2509 train loss:3.659917 +step:2510 train loss:3.635866 +step:2511 train loss:3.604890 +step:2512 train loss:3.680091 +step:2513 train loss:3.658204 +step:2514 train loss:3.628381 +step:2515 train loss:3.641824 +step:2516 train loss:3.647663 +step:2517 train loss:3.632071 +step:2518 train loss:3.620651 +step:2519 train loss:3.660318 +step:2520 train loss:3.617889 +step:2521 train loss:3.662004 +step:2522 train loss:3.658303 +step:2523 train loss:3.603226 +step:2524 train loss:3.645390 +step:2525 train loss:3.706590 +step:2526 train loss:3.617537 +step:2527 train loss:3.689312 +step:2528 train loss:3.756687 +step:2529 train loss:3.601857 +step:2530 train loss:3.633727 +step:2531 train loss:3.625865 +step:2532 train loss:3.712078 +step:2533 train loss:3.584200 +step:2534 train loss:3.589852 +step:2535 train loss:3.630180 +step:2536 train loss:3.600233 +step:2537 train loss:3.626183 +step:2538 train loss:3.666010 +step:2539 train loss:3.566126 +step:2540 train loss:3.653489 +step:2541 train loss:3.656942 +step:2542 train loss:3.690103 +step:2543 train loss:3.601421 +step:2544 train loss:3.697948 +step:2545 train loss:3.623038 +step:2546 train loss:3.652142 +step:2547 train loss:3.640436 +step:2548 train loss:3.662123 +step:2549 train loss:3.642910 +step:2550 train loss:3.654664 +step:2551 train loss:3.645308 +step:2552 train loss:3.693402 +step:2553 train loss:3.705807 +step:2554 train loss:3.655751 +step:2555 train loss:3.634677 +step:2556 train loss:3.636890 +step:2557 train loss:3.630138 +step:2558 train loss:3.659719 +step:2559 train loss:3.655872 +step:2560 train loss:3.693753 +step:2561 train loss:3.602396 +step:2562 train loss:3.652253 +step:2563 train loss:3.636747 +step:2564 train loss:3.595336 +step:2565 train loss:3.681095 +step:2566 train loss:3.649362 +step:2567 train loss:3.576323 +step:2568 train loss:3.664970 +step:2569 train loss:3.673101 +step:2570 train loss:3.639106 +step:2571 train loss:3.631677 +step:2572 train loss:3.694173 +step:2573 train loss:3.659294 +step:2574 train loss:3.580850 +step:2575 train loss:3.649040 +step:2576 train loss:3.632874 +step:2577 train loss:3.593710 +step:2578 train loss:3.780719 +step:2579 train loss:3.618045 +step:2580 train loss:3.634181 +step:2581 train loss:3.657809 +step:2582 train loss:3.631202 +step:2583 train loss:3.719184 +step:2584 train loss:3.658897 +step:2585 train loss:3.617879 +step:2586 train loss:3.608573 +step:2587 train loss:3.635563 +step:2588 train loss:3.633710 +step:2589 train loss:3.699186 +step:2590 train loss:3.567302 +step:2591 train loss:3.646366 +step:2592 train loss:3.625857 +step:2593 train loss:3.598697 +step:2594 train loss:3.638787 +step:2595 train loss:3.690956 +step:2596 train loss:3.598660 +step:2597 train loss:3.684129 +step:2598 train loss:3.651528 +step:2599 train loss:3.693834 +step:2600 train loss:3.583177 +step:2601 train loss:3.630730 +step:2602 train loss:3.620857 +step:2603 train loss:3.656438 +step:2604 train loss:3.718167 +step:2605 train loss:3.674402 +step:2606 train loss:3.678834 +step:2607 train loss:3.675975 +step:2608 train loss:3.580225 +step:2609 train loss:3.644316 +step:2610 train loss:3.611825 +step:2611 train loss:3.664077 +step:2612 train loss:3.709466 +step:2613 train loss:3.649771 +step:2614 train loss:3.670492 +step:2615 train loss:3.625478 +step:2616 train loss:3.537874 +step:2617 train loss:3.607254 +step:2618 train loss:3.582193 +step:2619 train loss:3.625504 +step:2620 train loss:3.651104 +step:2621 train loss:3.768404 +step:2622 train loss:3.716210 +step:2623 train loss:3.737176 +step:2624 train loss:3.660734 +step:2625 train loss:3.685222 +step:2626 train loss:3.619908 +step:2627 train loss:3.599318 +step:2628 train loss:3.629424 +step:2629 train loss:3.646738 +step:2630 train loss:3.609751 +step:2631 train loss:3.640383 +step:2632 train loss:3.651618 +step:2633 train loss:3.600825 +step:2634 train loss:3.637769 +step:2635 train loss:3.676105 +step:2636 train loss:3.649664 +step:2637 train loss:3.642353 +step:2638 train loss:3.622425 +step:2639 train loss:3.680807 +step:2640 train loss:3.611537 +step:2641 train loss:3.704073 +step:2642 train loss:3.612236 +step:2643 train loss:3.633281 +step:2644 train loss:3.618978 +step:2645 train loss:3.651723 +step:2646 train loss:3.623237 +step:2647 train loss:3.638035 +step:2648 train loss:3.630705 +step:2649 train loss:3.646919 +step:2650 train loss:3.607859 +step:2651 train loss:3.644016 +step:2652 train loss:3.567064 +step:2653 train loss:3.685806 +step:2654 train loss:3.616264 +step:2655 train loss:3.707891 +step:2656 train loss:3.639023 +step:2657 train loss:3.773504 +step:2658 train loss:3.774652 +step:2659 train loss:3.640622 +step:2660 train loss:3.681506 +step:2661 train loss:3.593343 +step:2662 train loss:3.609235 +step:2663 train loss:3.671477 +step:2664 train loss:3.601129 +step:2665 train loss:3.663865 +step:2666 train loss:3.640488 +step:2667 train loss:3.642567 +step:2668 train loss:3.606144 +step:2669 train loss:3.676461 +step:2670 train loss:3.631369 +step:2671 train loss:3.607451 +step:2672 train loss:3.644062 +step:2673 train loss:3.655838 +step:2674 train loss:3.677165 +step:2675 train loss:3.651305 +step:2676 train loss:3.644351 +step:2677 train loss:3.640765 +step:2678 train loss:3.609396 +step:2679 train loss:3.714744 +step:2680 train loss:3.659662 +step:2681 train loss:3.560549 +step:2682 train loss:3.646956 +step:2683 train loss:3.622167 +step:2684 train loss:3.651844 +step:2685 train loss:3.655728 +step:2686 train loss:3.601733 +step:2687 train loss:3.655600 +step:2688 train loss:3.625360 +step:2689 train loss:3.638794 +step:2690 train loss:3.690311 +step:2691 train loss:3.683449 +step:2692 train loss:3.657523 +step:2693 train loss:3.637120 +step:2694 train loss:3.626057 +step:2695 train loss:3.656082 +step:2696 train loss:3.641935 +step:2697 train loss:3.674000 +step:2698 train loss:3.656656 +step:2699 train loss:3.631013 +step:2700 train loss:3.687005 +step:2701 train loss:3.641570 +step:2702 train loss:3.643517 +step:2703 train loss:3.610590 +step:2704 train loss:3.627001 +step:2705 train loss:3.668111 +step:2706 train loss:3.646914 +step:2707 train loss:3.672678 +step:2708 train loss:3.573933 +step:2709 train loss:3.668021 +step:2710 train loss:3.632949 +step:2711 train loss:3.686722 +step:2712 train loss:3.637664 +step:2713 train loss:3.602836 +step:2714 train loss:3.723689 +step:2715 train loss:3.660794 +step:2716 train loss:3.652034 +step:2717 train loss:3.650567 +step:2718 train loss:3.697055 +step:2719 train loss:3.691679 +step:2720 train loss:3.703802 +step:2721 train loss:3.627337 +step:2722 train loss:3.616198 +step:2723 train loss:3.654883 +step:2724 train loss:3.667385 +step:2725 train loss:3.666748 +step:2726 train loss:3.646745 +step:2727 train loss:3.670915 +step:2728 train loss:3.619801 +step:2729 train loss:3.644957 +step:2730 train loss:3.643552 +step:2731 train loss:3.653483 +step:2732 train loss:3.633303 +step:2733 train loss:3.701568 +step:2734 train loss:3.635292 +step:2735 train loss:3.647942 +step:2736 train loss:3.649485 +step:2737 train loss:3.643629 +step:2738 train loss:3.614735 +step:2739 train loss:3.644289 +step:2740 train loss:3.682665 +step:2741 train loss:3.764274 +step:2742 train loss:3.820357 +step:2743 train loss:3.681522 +step:2744 train loss:3.658341 +step:2745 train loss:3.712399 +step:2746 train loss:3.640271 +step:2747 train loss:3.646895 +step:2748 train loss:3.647410 +step:2749 train loss:3.665028 +step:2750 validation loss:3.579879 +step:2750 train loss:3.761361 +step:2751 train loss:3.616629 +step:2752 train loss:3.654661 +step:2753 train loss:3.639331 +step:2754 train loss:3.637074 +step:2755 train loss:3.631714 +step:2756 train loss:3.637153 +step:2757 train loss:3.661778 +step:2758 train loss:3.600841 +step:2759 train loss:3.613737 +step:2760 train loss:3.621641 +step:2761 train loss:3.619144 +step:2762 train loss:3.612120 +step:2763 train loss:3.605650 +step:2764 train loss:3.631718 +step:2765 train loss:3.615864 +step:2766 train loss:3.717135 +step:2767 train loss:3.663362 +step:2768 train loss:3.673059 +step:2769 train loss:3.690225 +step:2770 train loss:3.646699 +step:2771 train loss:3.656256 +step:2772 train loss:3.668328 +step:2773 train loss:3.630485 +step:2774 train loss:3.700167 +step:2775 train loss:3.603549 +step:2776 train loss:3.597222 +step:2777 train loss:3.617702 +step:2778 train loss:3.562902 +step:2779 train loss:3.639529 +step:2780 train loss:3.600768 +step:2781 train loss:3.654930 +step:2782 train loss:3.663420 +step:2783 train loss:3.685639 +step:2784 train loss:3.575503 +step:2785 train loss:3.616566 +step:2786 train loss:3.636379 +step:2787 train loss:3.705799 +step:2788 train loss:3.664264 +step:2789 train loss:3.767504 +step:2790 train loss:3.615650 +step:2791 train loss:3.659940 +step:2792 train loss:3.630221 +step:2793 train loss:3.699487 +step:2794 train loss:3.596557 +step:2795 train loss:3.619620 +step:2796 train loss:3.656014 +step:2797 train loss:3.748510 +step:2798 train loss:3.638562 +step:2799 train loss:3.627942 +step:2800 train loss:3.641419 +step:2801 train loss:3.659924 +step:2802 train loss:3.769783 +step:2803 train loss:3.671665 +step:2804 train loss:3.638467 +step:2805 train loss:3.635030 +step:2806 train loss:3.620274 +step:2807 train loss:3.621083 +step:2808 train loss:3.641623 +step:2809 train loss:3.643946 +step:2810 train loss:3.634574 +step:2811 train loss:3.648367 +step:2812 train loss:3.712822 +step:2813 train loss:3.630811 +step:2814 train loss:3.649875 +step:2815 train loss:3.626607 +step:2816 train loss:3.692693 +step:2817 train loss:3.680529 +step:2818 train loss:3.642697 +step:2819 train loss:3.643870 +step:2820 train loss:3.633140 +step:2821 train loss:3.650106 +step:2822 train loss:3.634125 +step:2823 train loss:3.711193 +step:2824 train loss:3.639991 +step:2825 train loss:3.604668 +step:2826 train loss:3.650705 +step:2827 train loss:3.612200 +step:2828 train loss:3.611370 +step:2829 train loss:3.810199 +step:2830 train loss:3.603795 +step:2831 train loss:3.635983 +step:2832 train loss:3.608841 +step:2833 train loss:3.625264 +step:2834 train loss:3.610882 +step:2835 train loss:3.677737 +step:2836 train loss:3.603354 +step:2837 train loss:3.654124 +step:2838 train loss:3.620886 +step:2839 train loss:3.621433 +step:2840 train loss:3.601632 +step:2841 train loss:3.672786 +step:2842 train loss:3.577625 +step:2843 train loss:3.643219 +step:2844 train loss:3.568401 +step:2845 train loss:3.580081 +step:2846 train loss:3.644803 +step:2847 train loss:3.875871 +step:2848 train loss:3.633824 +step:2849 train loss:3.708565 +step:2850 train loss:3.602311 +step:2851 train loss:3.624225 +step:2852 train loss:3.638644 +step:2853 train loss:3.656202 +step:2854 train loss:3.650265 +step:2855 train loss:3.755269 +step:2856 train loss:3.723016 +step:2857 train loss:3.589853 +step:2858 train loss:3.620689 +step:2859 train loss:3.627433 +step:2860 train loss:3.593709 +step:2861 train loss:3.655810 +step:2862 train loss:3.613305 +step:2863 train loss:3.660263 +step:2864 train loss:3.633452 +step:2865 train loss:3.599829 +step:2866 train loss:3.569876 +step:2867 train loss:3.583333 +step:2868 train loss:3.666394 +step:2869 train loss:3.616716 +step:2870 train loss:3.639091 +step:2871 train loss:3.628768 +step:2872 train loss:3.703279 +step:2873 train loss:3.613246 +step:2874 train loss:3.614429 +step:2875 train loss:3.556693 +step:2876 train loss:3.649901 +step:2877 train loss:3.585027 +step:2878 train loss:3.598882 +step:2879 train loss:3.621973 +step:2880 train loss:3.626183 +step:2881 train loss:3.654284 +step:2882 train loss:3.649948 +step:2883 train loss:3.608911 +step:2884 train loss:3.654272 +step:2885 train loss:3.617575 +step:2886 train loss:3.605182 +step:2887 train loss:3.614691 +step:2888 train loss:3.647319 +step:2889 train loss:3.672788 +step:2890 train loss:3.717382 +step:2891 train loss:3.658420 +step:2892 train loss:3.541862 +step:2893 train loss:3.622686 +step:2894 train loss:3.633082 +step:2895 train loss:3.642954 +step:2896 train loss:3.617409 +step:2897 train loss:3.617584 +step:2898 train loss:3.580853 +step:2899 train loss:3.627966 +step:2900 train loss:3.662281 +step:2901 train loss:3.660041 +step:2902 train loss:3.624597 +step:2903 train loss:3.586545 +step:2904 train loss:3.608346 +step:2905 train loss:3.644297 +step:2906 train loss:3.560984 +step:2907 train loss:3.596489 +step:2908 train loss:3.691670 +step:2909 train loss:3.670009 +step:2910 train loss:3.671550 +step:2911 train loss:3.618441 +step:2912 train loss:3.595201 +step:2913 train loss:3.621432 +step:2914 train loss:3.623261 +step:2915 train loss:3.618492 +step:2916 train loss:3.622328 +step:2917 train loss:3.592684 +step:2918 train loss:3.661711 +step:2919 train loss:3.574732 +step:2920 train loss:3.638676 +step:2921 train loss:3.609784 +step:2922 train loss:3.647155 +step:2923 train loss:3.780873 +step:2924 train loss:3.654747 +step:2925 train loss:3.611275 +step:2926 train loss:3.610651 +step:2927 train loss:3.631558 +step:2928 train loss:3.562003 +step:2929 train loss:3.700285 +step:2930 train loss:3.628783 +step:2931 train loss:3.655844 +step:2932 train loss:3.660954 +step:2933 train loss:3.673120 +step:2934 train loss:3.641454 +step:2935 train loss:3.621109 +step:2936 train loss:3.581201 +step:2937 train loss:3.619742 +step:2938 train loss:3.626263 +step:2939 train loss:3.576326 +step:2940 train loss:3.640820 +step:2941 train loss:3.615924 +step:2942 train loss:3.617151 +step:2943 train loss:3.631634 +step:2944 train loss:3.590302 +step:2945 train loss:3.611632 +step:2946 train loss:3.641033 +step:2947 train loss:3.632557 +step:2948 train loss:3.599425 +step:2949 train loss:3.616993 +step:2950 train loss:3.618705 +step:2951 train loss:3.646060 +step:2952 train loss:3.651445 +step:2953 train loss:3.679532 +step:2954 train loss:3.640734 +step:2955 train loss:3.621334 +step:2956 train loss:3.614761 +step:2957 train loss:3.611936 +step:2958 train loss:3.565990 +step:2959 train loss:3.648508 +step:2960 train loss:3.614706 +step:2961 train loss:3.637642 +step:2962 train loss:3.622217 +step:2963 train loss:3.649554 +step:2964 train loss:3.580419 +step:2965 train loss:3.651230 +step:2966 train loss:3.639111 +step:2967 train loss:3.623007 +step:2968 train loss:3.572679 +step:2969 train loss:3.631327 +step:2970 train loss:3.597315 +step:2971 train loss:3.615254 +step:2972 train loss:3.562011 +step:2973 train loss:3.618988 +step:2974 train loss:3.582642 +step:2975 train loss:3.572889 +step:2976 train loss:3.568102 +step:2977 train loss:3.615624 +step:2978 train loss:3.601338 +step:2979 train loss:3.588316 +step:2980 train loss:3.648926 +step:2981 train loss:3.617120 +step:2982 train loss:3.654282 +step:2983 train loss:3.629019 +step:2984 train loss:3.618926 +step:2985 train loss:3.612035 +step:2986 train loss:3.625566 +step:2987 train loss:3.616302 +step:2988 train loss:3.591141 +step:2989 train loss:3.664847 +step:2990 train loss:3.619922 +step:2991 train loss:3.619551 +step:2992 train loss:3.633792 +step:2993 train loss:3.631516 +step:2994 train loss:3.592471 +step:2995 train loss:3.608865 +step:2996 train loss:3.636189 +step:2997 train loss:3.626305 +step:2998 train loss:3.602136 +step:2999 train loss:3.621929 +step:3000 validation loss:3.561556 total_sharp:5.2714e-03 L1_sharp:7.9104e-03 L2_sharp:3.9838e-03 L3_sharp:7.8043e-04 L4_sharp:7.5828e-04 L5_sharp:8.3714e-04 L6_sharp:1.2936e-03 L7_sharp:1.1715e-03 L8_sharp:1.1232e-03 L9_sharp:7.2609e-04 L10_sharp:5.2915e-04 L11_sharp:5.0040e-04 L12_sharp:3.9320e-04 total_fnorm:2.3827e+00 total_l1_linf:2.0360e+04 total_spectral:2.3827e+00 L1_fnorm:5.8498e-01 L2_fnorm:4.7470e-01 L3_fnorm:4.4750e-01 L4_fnorm:5.5198e-01 L5_fnorm:5.8186e-01 L6_fnorm:5.9152e-01 L7_fnorm:5.9831e-01 L8_fnorm:5.9781e-01 L9_fnorm:5.9737e-01 L10_fnorm:5.9618e-01 L11_fnorm:5.9361e-01 L12_fnorm:6.0307e-01 L1_l1linf:4.4344e-01 L2_l1linf:4.1705e-01 L3_l1linf:4.1408e-01 L4_l1linf:3.9956e-01 L5_l1linf:4.0661e-01 L6_l1linf:4.1167e-01 L7_l1linf:4.0955e-01 L8_l1linf:4.1115e-01 L9_l1linf:4.1559e-01 L10_l1linf:4.1207e-01 L11_l1linf:4.0827e-01 L12_l1linf:3.9886e-01 L1_spectral:1.2050e-02 L2_spectral:1.2067e-02 L3_spectral:1.3282e-02 L4_spectral:1.2044e-02 L5_spectral:1.2046e-02 L6_spectral:1.2051e-02 L7_spectral:1.2047e-02 L8_spectral:1.2065e-02 L9_spectral:1.2046e-02 L10_spectral:1.2045e-02 L11_spectral:1.2042e-02 L12_spectral:1.2043e-02 v_norm:2.3827e+00 cos_v_-g_hvp:3.5837e-02 g_hvp_norm:6.2816e-01 cos_v_-g_t:4.1851e-02 g_t_norm:5.4264e-01 hv_norm:9.0050e-01 cos_v_hv:1.3948e-02 hg_norm:5.9664e+01 cos_g_hg:3.1473e-01 v_par:5.8134e-03 v_perp:2.3826e+00 L1_cos_v_neg_g:2.7292e-02 L1_v_norm:5.8498e-01 L2_cos_v_neg_g:3.2520e-02 L2_v_norm:4.7470e-01 L3_cos_v_neg_g:3.0784e-02 L3_v_norm:4.4750e-01 L4_cos_v_neg_g:3.3370e-02 L4_v_norm:5.5198e-01 L5_cos_v_neg_g:3.7562e-02 L5_v_norm:5.8186e-01 L6_cos_v_neg_g:3.5981e-02 L6_v_norm:5.9152e-01 L7_cos_v_neg_g:3.6668e-02 L7_v_norm:5.9831e-01 L8_cos_v_neg_g:3.7195e-02 L8_v_norm:5.9781e-01 L9_cos_v_neg_g:4.0507e-02 L9_v_norm:5.9737e-01 L10_cos_v_neg_g:4.3611e-02 L10_v_norm:5.9618e-01 L11_cos_v_neg_g:5.3175e-02 L11_v_norm:5.9361e-01 L12_cos_v_neg_g:8.7471e-02 L12_v_norm:6.0307e-01 +step:3000 train loss:3.590948 +step:3001 train loss:3.662578 +step:3002 train loss:3.641973 +step:3003 train loss:3.670077 +step:3004 train loss:3.611857 +step:3005 train loss:3.562692 +step:3006 train loss:3.610374 +step:3007 train loss:3.575727 +step:3008 train loss:3.661136 +step:3009 train loss:3.639467 +step:3010 train loss:3.643322 +step:3011 train loss:3.660422 +step:3012 train loss:3.655915 +step:3013 train loss:3.643430 +step:3014 train loss:3.574497 +step:3015 train loss:3.645455 +step:3016 train loss:3.570106 +step:3017 train loss:3.569861 +step:3018 train loss:3.605243 +step:3019 train loss:3.644928 +step:3020 train loss:3.591979 +step:3021 train loss:3.638186 +step:3022 train loss:3.608974 +step:3023 train loss:3.657625 +step:3024 train loss:3.612231 +step:3025 train loss:3.584231 +step:3026 train loss:3.629736 +step:3027 train loss:3.625456 +step:3028 train loss:3.635428 +step:3029 train loss:3.671497 +step:3030 train loss:3.605464 +step:3031 train loss:3.632625 +step:3032 train loss:3.619993 +step:3033 train loss:3.534416 +step:3034 train loss:3.622298 +step:3035 train loss:3.598526 +step:3036 train loss:3.561263 +step:3037 train loss:3.647393 +step:3038 train loss:3.612611 +step:3039 train loss:3.640338 +step:3040 train loss:3.606016 +step:3041 train loss:3.620506 +step:3042 train loss:3.611691 +step:3043 train loss:3.639953 +step:3044 train loss:3.648011 +step:3045 train loss:3.620527 +step:3046 train loss:3.554124 +step:3047 train loss:3.687399 +step:3048 train loss:3.658057 +step:3049 train loss:3.604634 +step:3050 train loss:3.659029 +step:3051 train loss:3.604042 +step:3052 train loss:3.489742 +step:3053 train loss:3.626220 +step:3054 train loss:3.592533 +step:3055 train loss:3.559543 +step:3056 train loss:3.605159 +step:3057 train loss:3.556033 +step:3058 train loss:3.633372 +step:3059 train loss:3.529343 +step:3060 train loss:3.671626 +step:3061 train loss:3.689593 +step:3062 train loss:3.589281 +step:3063 train loss:3.553535 +step:3064 train loss:3.571500 +step:3065 train loss:3.535929 +step:3066 train loss:3.725070 +step:3067 train loss:3.585576 +step:3068 train loss:3.573665 +step:3069 train loss:3.611726 +step:3070 train loss:3.612782 +step:3071 train loss:3.562603 +step:3072 train loss:3.576187 +step:3073 train loss:3.583807 +step:3074 train loss:3.577113 +step:3075 train loss:3.581534 +step:3076 train loss:3.534162 +step:3077 train loss:3.538090 +step:3078 train loss:3.623934 +step:3079 train loss:3.534781 +step:3080 train loss:3.608957 +step:3081 train loss:3.568419 +step:3082 train loss:3.512007 +step:3083 train loss:3.567720 +step:3084 train loss:3.573507 +step:3085 train loss:3.617743 +step:3086 train loss:3.653095 +step:3087 train loss:3.554116 +step:3088 train loss:3.581105 +step:3089 train loss:3.575159 +step:3090 train loss:3.586965 +step:3091 train loss:3.593254 +step:3092 train loss:3.669926 +step:3093 train loss:3.557633 +step:3094 train loss:3.576568 +step:3095 train loss:3.649359 +step:3096 train loss:3.597132 +step:3097 train loss:3.610021 +step:3098 train loss:3.581244 +step:3099 train loss:3.573324 +step:3100 train loss:3.630248 +step:3101 train loss:3.566607 +step:3102 train loss:3.569496 +step:3103 train loss:3.571978 +step:3104 train loss:3.580989 +step:3105 train loss:3.601523 +step:3106 train loss:3.571861 +step:3107 train loss:3.624253 +step:3108 train loss:3.522766 +step:3109 train loss:3.600832 +step:3110 train loss:3.619376 +step:3111 train loss:3.619878 +step:3112 train loss:3.647970 +step:3113 train loss:3.596605 +step:3114 train loss:3.592496 +step:3115 train loss:3.633685 +step:3116 train loss:3.604671 +step:3117 train loss:3.554093 +step:3118 train loss:3.535814 +step:3119 train loss:3.535058 +step:3120 train loss:3.681673 +step:3121 train loss:3.597949 +step:3122 train loss:3.571349 +step:3123 train loss:3.580089 +step:3124 train loss:3.490876 +step:3125 train loss:3.557575 +step:3126 train loss:3.557658 +step:3127 train loss:3.579770 +step:3128 train loss:3.577486 +step:3129 train loss:3.566183 +step:3130 train loss:3.538956 +step:3131 train loss:3.634348 +step:3132 train loss:3.632912 +step:3133 train loss:3.575320 +step:3134 train loss:3.575640 +step:3135 train loss:3.540520 +step:3136 train loss:3.593762 +step:3137 train loss:3.523040 +step:3138 train loss:3.614938 +step:3139 train loss:3.623642 +step:3140 train loss:3.613750 +step:3141 train loss:3.617601 +step:3142 train loss:3.577877 +step:3143 train loss:3.569715 +step:3144 train loss:3.189580 +step:3145 train loss:3.308544 +step:3146 train loss:3.547861 +step:3147 train loss:3.597746 +step:3148 train loss:3.572498 +step:3149 train loss:3.580932 +step:3150 train loss:3.576954 +step:3151 train loss:3.574314 +step:3152 train loss:3.546334 +step:3153 train loss:3.564716 +step:3154 train loss:3.571129 +step:3155 train loss:3.552021 +step:3156 train loss:3.585944 +step:3157 train loss:3.540051 +step:3158 train loss:3.627112 +step:3159 train loss:3.600726 +step:3160 train loss:3.557858 +step:3161 train loss:3.603953 +step:3162 train loss:3.596126 +step:3163 train loss:3.558347 +step:3164 train loss:3.613004 +step:3165 train loss:3.641065 +step:3166 train loss:3.560797 +step:3167 train loss:3.662217 +step:3168 train loss:3.534715 +step:3169 train loss:3.556810 +step:3170 train loss:3.688292 +step:3171 train loss:3.537303 +step:3172 train loss:3.622366 +step:3173 train loss:3.615913 +step:3174 train loss:3.534586 +step:3175 train loss:3.538424 +step:3176 train loss:3.654799 +step:3177 train loss:3.537377 +step:3178 train loss:3.581753 +step:3179 train loss:3.549633 +step:3180 train loss:3.559788 +step:3181 train loss:3.594519 +step:3182 train loss:3.658301 +step:3183 train loss:3.543079 +step:3184 train loss:3.571113 +step:3185 train loss:3.570036 +step:3186 train loss:3.595909 +step:3187 train loss:3.553699 +step:3188 train loss:3.524242 +step:3189 train loss:3.553043 +step:3190 train loss:3.544333 +step:3191 train loss:3.591222 +step:3192 train loss:3.570961 +step:3193 train loss:3.566114 +step:3194 train loss:3.575686 +step:3195 train loss:3.636697 +step:3196 train loss:3.581506 +step:3197 train loss:3.590882 +step:3198 train loss:3.574819 +step:3199 train loss:3.526950 +step:3200 train loss:3.622481 +step:3201 train loss:3.560523 +step:3202 train loss:3.578906 +step:3203 train loss:3.511710 +step:3204 train loss:3.566291 +step:3205 train loss:3.588575 +step:3206 train loss:3.568103 +step:3207 train loss:3.522512 +step:3208 train loss:3.582203 +step:3209 train loss:3.544024 +step:3210 train loss:3.560074 +step:3211 train loss:3.596772 +step:3212 train loss:3.549316 +step:3213 train loss:3.525390 +step:3214 train loss:3.657220 +step:3215 train loss:3.544455 +step:3216 train loss:3.559614 +step:3217 train loss:3.625469 +step:3218 train loss:3.571748 +step:3219 train loss:3.560245 +step:3220 train loss:3.562037 +step:3221 train loss:3.560330 +step:3222 train loss:3.563880 +step:3223 train loss:3.555398 +step:3224 train loss:3.614687 +step:3225 train loss:3.594890 +step:3226 train loss:3.564355 +step:3227 train loss:3.630897 +step:3228 train loss:3.650218 +step:3229 train loss:3.578680 +step:3230 train loss:3.556224 +step:3231 train loss:3.572930 +step:3232 train loss:3.552721 +step:3233 train loss:3.627895 +step:3234 train loss:3.569009 +step:3235 train loss:3.575056 +step:3236 train loss:3.654473 +step:3237 train loss:3.540257 +step:3238 train loss:3.642525 +step:3239 train loss:3.558321 +step:3240 train loss:3.638746 +step:3241 train loss:3.783997 +step:3242 train loss:3.584244 +step:3243 train loss:3.687659 +step:3244 train loss:3.586874 +step:3245 train loss:3.571831 +step:3246 train loss:3.565600 +step:3247 train loss:3.558551 +step:3248 train loss:3.604096 +step:3249 train loss:3.575047 +step:3250 validation loss:3.556724 +step:3250 train loss:3.577950 +step:3251 train loss:3.608695 +step:3252 train loss:3.584672 +step:3253 train loss:3.568184 +step:3254 train loss:3.613929 +step:3255 train loss:3.579181 +step:3256 train loss:3.638960 +step:3257 train loss:3.542131 +step:3258 train loss:3.553895 +step:3259 train loss:3.561029 +step:3260 train loss:3.550186 +step:3261 train loss:3.583519 +step:3262 train loss:3.610089 +step:3263 train loss:3.612782 +step:3264 train loss:3.580013 +step:3265 train loss:3.594409 +step:3266 train loss:3.572418 +step:3267 train loss:3.597309 +step:3268 train loss:3.569885 +step:3269 train loss:3.591087 +step:3270 train loss:3.608388 +step:3271 train loss:3.556504 +step:3272 train loss:3.575953 +step:3273 train loss:3.565011 +step:3274 train loss:3.624657 +step:3275 train loss:3.584451 +step:3276 train loss:3.587597 +step:3277 train loss:3.632027 +step:3278 train loss:3.618998 +step:3279 train loss:3.540075 +step:3280 train loss:3.596836 +step:3281 train loss:3.566112 +step:3282 train loss:3.550040 +step:3283 train loss:3.570735 +step:3284 train loss:3.634304 +step:3285 train loss:3.597512 +step:3286 train loss:3.547060 +step:3287 train loss:3.568431 +step:3288 train loss:3.557419 +step:3289 train loss:3.644370 +step:3290 train loss:3.561196 +step:3291 train loss:3.543540 +step:3292 train loss:3.617065 +step:3293 train loss:3.556699 +step:3294 train loss:3.593684 +step:3295 train loss:3.761590 +step:3296 train loss:3.620008 +step:3297 train loss:3.600252 +step:3298 train loss:3.619843 +step:3299 train loss:3.581433 +step:3300 train loss:3.547047 +step:3301 train loss:3.602731 +step:3302 train loss:3.530287 +step:3303 train loss:3.546066 +step:3304 train loss:3.631508 +step:3305 train loss:3.544228 +step:3306 train loss:3.527907 +step:3307 train loss:3.553655 +step:3308 train loss:3.507291 +step:3309 train loss:3.664830 +step:3310 train loss:3.616765 +step:3311 train loss:3.645326 +step:3312 train loss:3.584115 +step:3313 train loss:3.587964 +step:3314 train loss:3.569199 +step:3315 train loss:3.551124 +step:3316 train loss:3.600791 +step:3317 train loss:3.539313 +step:3318 train loss:3.502347 +step:3319 train loss:3.619837 +step:3320 train loss:3.577974 +step:3321 train loss:3.539326 +step:3322 train loss:3.651111 +step:3323 train loss:3.567426 +step:3324 train loss:3.578021 +step:3325 train loss:3.618376 +step:3326 train loss:3.584918 +step:3327 train loss:3.656812 +step:3328 train loss:3.517977 +step:3329 train loss:3.604924 +step:3330 train loss:3.545703 +step:3331 train loss:3.652162 +step:3332 train loss:3.584002 +step:3333 train loss:3.596559 +step:3334 train loss:3.587591 +step:3335 train loss:3.570884 +step:3336 train loss:3.584841 +step:3337 train loss:3.610960 +step:3338 train loss:3.533448 +step:3339 train loss:3.612443 +step:3340 train loss:3.525862 +step:3341 train loss:3.551284 +step:3342 train loss:3.608317 +step:3343 train loss:3.573756 +step:3344 train loss:3.581719 +step:3345 train loss:3.607685 +step:3346 train loss:3.544756 +step:3347 train loss:3.577248 +step:3348 train loss:3.616437 +step:3349 train loss:3.544943 +step:3350 train loss:3.551067 +step:3351 train loss:3.567703 +step:3352 train loss:3.639071 +step:3353 train loss:3.590809 +step:3354 train loss:3.554304 +step:3355 train loss:3.548666 +step:3356 train loss:3.555511 +step:3357 train loss:3.562250 +step:3358 train loss:3.601204 +step:3359 train loss:3.574163 +step:3360 train loss:3.625612 +step:3361 train loss:3.576529 +step:3362 train loss:3.552630 +step:3363 train loss:3.678013 +step:3364 train loss:3.607585 +step:3365 train loss:3.539359 +step:3366 train loss:3.626662 +step:3367 train loss:3.564354 +step:3368 train loss:3.559419 +step:3369 train loss:3.568502 +step:3370 train loss:3.597331 +step:3371 train loss:3.586487 +step:3372 train loss:3.588486 +step:3373 train loss:3.570344 +step:3374 train loss:3.632144 +step:3375 train loss:3.733002 +step:3376 train loss:3.614677 +step:3377 train loss:3.693496 +step:3378 train loss:3.577538 +step:3379 train loss:3.569515 +step:3380 train loss:3.601803 +step:3381 train loss:3.623268 +step:3382 train loss:3.598510 +step:3383 train loss:3.538212 +step:3384 train loss:3.566738 +step:3385 train loss:3.581258 +step:3386 train loss:3.527375 +step:3387 train loss:3.599643 +step:3388 train loss:3.588182 +step:3389 train loss:3.543527 +step:3390 train loss:3.578284 +step:3391 train loss:3.632500 +step:3392 train loss:3.577161 +step:3393 train loss:3.595952 +step:3394 train loss:3.576488 +step:3395 train loss:3.599359 +step:3396 train loss:3.639689 +step:3397 train loss:3.605372 +step:3398 train loss:3.616022 +step:3399 train loss:3.597441 +step:3400 train loss:3.638381 +step:3401 train loss:3.521323 +step:3402 train loss:3.550091 +step:3403 train loss:3.562590 +step:3404 train loss:3.593482 +step:3405 train loss:3.555417 +step:3406 train loss:3.612746 +step:3407 train loss:3.727397 +step:3408 train loss:3.576186 +step:3409 train loss:3.568027 +step:3410 train loss:3.621733 +step:3411 train loss:3.592932 +step:3412 train loss:3.602055 +step:3413 train loss:3.515722 +step:3414 train loss:3.531459 +step:3415 train loss:3.606612 +step:3416 train loss:3.596634 +step:3417 train loss:3.603544 +step:3418 train loss:3.531676 +step:3419 train loss:3.473393 +step:3420 train loss:3.558270 +step:3421 train loss:3.601521 +step:3422 train loss:3.571661 +step:3423 train loss:3.623156 +step:3424 train loss:3.682561 +step:3425 train loss:3.619365 +step:3426 train loss:3.568156 +step:3427 train loss:3.586504 +step:3428 train loss:3.635158 +step:3429 train loss:3.537062 +step:3430 train loss:3.594181 +step:3431 train loss:3.576702 +step:3432 train loss:3.545965 +step:3433 train loss:3.526218 +step:3434 train loss:3.588125 +step:3435 train loss:3.555951 +step:3436 train loss:3.604198 +step:3437 train loss:3.513865 +step:3438 train loss:3.574680 +step:3439 train loss:3.587217 +step:3440 train loss:3.514190 +step:3441 train loss:3.609078 +step:3442 train loss:3.587993 +step:3443 train loss:3.554926 +step:3444 train loss:3.551381 +step:3445 train loss:3.557326 +step:3446 train loss:3.512549 +step:3447 train loss:3.565718 +step:3448 train loss:3.655501 +step:3449 train loss:3.577634 +step:3450 train loss:3.568725 +step:3451 train loss:3.628061 +step:3452 train loss:3.639418 +step:3453 train loss:3.529899 +step:3454 train loss:3.561340 +step:3455 train loss:3.584379 +step:3456 train loss:3.581019 +step:3457 train loss:3.616311 +step:3458 train loss:3.600249 +step:3459 train loss:3.567046 +step:3460 train loss:3.622018 +step:3461 train loss:3.539625 +step:3462 train loss:3.566900 +step:3463 train loss:3.565053 +step:3464 train loss:3.587306 +step:3465 train loss:3.552596 +step:3466 train loss:3.656389 +step:3467 train loss:3.523212 +step:3468 train loss:3.667207 +step:3469 train loss:3.581780 +step:3470 train loss:3.590308 +step:3471 train loss:3.540833 +step:3472 train loss:3.600366 +step:3473 train loss:3.594682 +step:3474 train loss:3.545906 +step:3475 train loss:3.632322 +step:3476 train loss:3.561505 +step:3477 train loss:3.584556 +step:3478 train loss:3.511073 +step:3479 train loss:3.551915 +step:3480 train loss:3.505913 +step:3481 train loss:3.520375 +step:3482 train loss:3.522330 +step:3483 train loss:3.573374 +step:3484 train loss:3.549811 +step:3485 train loss:3.643065 +step:3486 train loss:3.779140 +step:3487 train loss:3.584939 +step:3488 train loss:3.600091 +step:3489 train loss:3.513565 +step:3490 train loss:3.543414 +step:3491 train loss:3.561643 +step:3492 train loss:3.617944 +step:3493 train loss:3.531962 +step:3494 train loss:3.596891 +step:3495 train loss:3.514516 +step:3496 train loss:3.675854 +step:3497 train loss:3.564138 +step:3498 train loss:3.585813 +step:3499 train loss:3.554135 +step:3500 validation loss:3.521747 total_sharp:4.2636e-03 L1_sharp:4.9105e-03 L2_sharp:1.1645e-03 L3_sharp:1.6831e-03 L4_sharp:1.0839e-03 L5_sharp:1.0667e-03 L6_sharp:1.1402e-03 L7_sharp:1.0716e-03 L8_sharp:9.4513e-04 L9_sharp:5.8771e-04 L10_sharp:4.1091e-04 L11_sharp:4.7574e-04 L12_sharp:3.5180e-04 total_fnorm:2.3835e+00 total_l1_linf:2.0370e+04 total_spectral:2.3835e+00 L1_fnorm:5.8155e-01 L2_fnorm:4.7426e-01 L3_fnorm:4.2735e-01 L4_fnorm:5.5330e-01 L5_fnorm:5.8326e-01 L6_fnorm:5.9405e-01 L7_fnorm:6.0024e-01 L8_fnorm:5.9892e-01 L9_fnorm:5.9968e-01 L10_fnorm:5.9691e-01 L11_fnorm:5.9287e-01 L12_fnorm:6.0251e-01 L1_l1linf:4.4337e-01 L2_l1linf:4.2663e-01 L3_l1linf:4.7820e-01 L4_l1linf:4.1101e-01 L5_l1linf:4.0955e-01 L6_l1linf:4.1526e-01 L7_l1linf:4.0890e-01 L8_l1linf:4.1056e-01 L9_l1linf:4.1191e-01 L10_l1linf:4.1780e-01 L11_l1linf:4.0685e-01 L12_l1linf:3.9822e-01 L1_spectral:1.2051e-02 L2_spectral:1.2054e-02 L3_spectral:1.8544e-02 L4_spectral:1.2043e-02 L5_spectral:1.2052e-02 L6_spectral:1.2046e-02 L7_spectral:1.2049e-02 L8_spectral:1.2050e-02 L9_spectral:1.2065e-02 L10_spectral:1.2043e-02 L11_spectral:1.2046e-02 L12_spectral:1.2045e-02 v_norm:2.3835e+00 cos_v_-g_hvp:3.1089e-02 g_hvp_norm:6.2604e-01 cos_v_-g_t:3.6821e-02 g_t_norm:5.3545e-01 hv_norm:6.3654e-01 cos_v_hv:1.5965e-02 hg_norm:3.6718e+01 cos_g_hg:6.8468e-01 v_par:6.2025e-03 v_perp:2.3835e+00 L1_cos_v_neg_g:1.8060e-02 L1_v_norm:5.8155e-01 L2_cos_v_neg_g:2.2833e-02 L2_v_norm:4.7426e-01 L3_cos_v_neg_g:2.7699e-02 L3_v_norm:4.2735e-01 L4_cos_v_neg_g:2.8451e-02 L4_v_norm:5.5330e-01 L5_cos_v_neg_g:3.4946e-02 L5_v_norm:5.8326e-01 L6_cos_v_neg_g:3.4352e-02 L6_v_norm:5.9405e-01 L7_cos_v_neg_g:3.4821e-02 L7_v_norm:6.0024e-01 L8_cos_v_neg_g:3.3152e-02 L8_v_norm:5.9892e-01 L9_cos_v_neg_g:3.5280e-02 L9_v_norm:5.9968e-01 L10_cos_v_neg_g:3.6049e-02 L10_v_norm:5.9691e-01 L11_cos_v_neg_g:4.6447e-02 L11_v_norm:5.9287e-01 L12_cos_v_neg_g:7.7367e-02 L12_v_norm:6.0251e-01 +step:3500 train loss:3.526814 +step:3501 train loss:3.622166 +step:3502 train loss:3.564592 +step:3503 train loss:3.589919 +step:3504 train loss:3.563109 +step:3505 train loss:3.573572 +step:3506 train loss:3.519181 +step:3507 train loss:3.601375 +step:3508 train loss:3.553961 +step:3509 train loss:3.585313 +step:3510 train loss:3.599814 +step:3511 train loss:3.568459 +step:3512 train loss:3.523074 +step:3513 train loss:3.550104 +step:3514 train loss:3.538092 +step:3515 train loss:3.558237 +step:3516 train loss:3.498054 +step:3517 train loss:3.606382 +step:3518 train loss:3.534410 +step:3519 train loss:3.585517 +step:3520 train loss:3.607216 +step:3521 train loss:3.569016 +step:3522 train loss:3.576356 +step:3523 train loss:3.654063 +step:3524 train loss:3.599027 +step:3525 train loss:3.573464 +step:3526 train loss:3.575363 +step:3527 train loss:3.601580 +step:3528 train loss:3.590599 +step:3529 train loss:3.555753 +step:3530 train loss:3.531580 +step:3531 train loss:3.582145 +step:3532 train loss:3.551295 +step:3533 train loss:3.566308 +step:3534 train loss:3.559493 +step:3535 train loss:3.537348 +step:3536 train loss:3.644079 +step:3537 train loss:3.612953 +step:3538 train loss:3.600909 +step:3539 train loss:3.553312 +step:3540 train loss:3.533789 +step:3541 train loss:3.516253 +step:3542 train loss:3.541559 +step:3543 train loss:3.525942 +step:3544 train loss:3.553645 +step:3545 train loss:3.581141 +step:3546 train loss:3.511354 +step:3547 train loss:3.579241 +step:3548 train loss:3.695592 +step:3549 train loss:3.505751 +step:3550 train loss:3.549410 +step:3551 train loss:3.509663 +step:3552 train loss:3.626890 +step:3553 train loss:3.564145 +step:3554 train loss:3.549667 +step:3555 train loss:3.564073 +step:3556 train loss:3.569477 +step:3557 train loss:3.532812 +step:3558 train loss:3.578585 +step:3559 train loss:3.545899 +step:3560 train loss:3.575536 +step:3561 train loss:3.484516 +step:3562 train loss:3.615502 +step:3563 train loss:3.564972 +step:3564 train loss:3.547888 +step:3565 train loss:3.549582 +step:3566 train loss:3.586993 +step:3567 train loss:3.533996 +step:3568 train loss:3.563442 +step:3569 train loss:3.534851 +step:3570 train loss:3.585457 +step:3571 train loss:3.546305 +step:3572 train loss:3.645831 +step:3573 train loss:3.529666 +step:3574 train loss:3.591632 +step:3575 train loss:3.535619 +step:3576 train loss:3.597946 +step:3577 train loss:3.602809 +step:3578 train loss:3.570632 +step:3579 train loss:3.571601 +step:3580 train loss:3.572165 +step:3581 train loss:3.525071 +step:3582 train loss:3.506917 +step:3583 train loss:3.551117 +step:3584 train loss:3.529922 +step:3585 train loss:3.510826 +step:3586 train loss:3.609314 +step:3587 train loss:3.662856 +step:3588 train loss:3.620909 +step:3589 train loss:3.576182 +step:3590 train loss:3.597706 +step:3591 train loss:3.545385 +step:3592 train loss:3.552891 +step:3593 train loss:3.575460 +step:3594 train loss:3.568663 +step:3595 train loss:3.553916 +step:3596 train loss:3.549714 +step:3597 train loss:3.540536 +step:3598 train loss:3.538385 +step:3599 train loss:3.576509 +step:3600 train loss:3.589956 +step:3601 train loss:3.571702 +step:3602 train loss:3.578286 +step:3603 train loss:3.655466 +step:3604 train loss:3.626344 +step:3605 train loss:3.561605 +step:3606 train loss:3.534773 +step:3607 train loss:3.507313 +step:3608 train loss:3.531953 +step:3609 train loss:3.561795 +step:3610 train loss:3.538767 +step:3611 train loss:3.632352 +step:3612 train loss:3.511285 +step:3613 train loss:3.590636 +step:3614 train loss:3.526482 +step:3615 train loss:3.507337 +step:3616 train loss:3.600983 +step:3617 train loss:3.584497 +step:3618 train loss:3.588424 +step:3619 train loss:3.533482 +step:3620 train loss:3.543591 +step:3621 train loss:3.617165 +step:3622 train loss:3.559241 +step:3623 train loss:3.555601 +step:3624 train loss:3.572171 +step:3625 train loss:3.670907 +step:3626 train loss:3.545450 +step:3627 train loss:3.523052 +step:3628 train loss:3.557018 +step:3629 train loss:3.571805 +step:3630 train loss:3.565861 +step:3631 train loss:3.588223 +step:3632 train loss:3.571922 +step:3633 train loss:3.554725 +step:3634 train loss:3.545019 +step:3635 train loss:3.595840 +step:3636 train loss:3.529671 +step:3637 train loss:3.533506 +step:3638 train loss:3.521765 +step:3639 train loss:3.567587 +step:3640 train loss:3.551249 +step:3641 train loss:3.537061 +step:3642 train loss:3.556797 +step:3643 train loss:3.519840 +step:3644 train loss:3.562996 +step:3645 train loss:3.588443 +step:3646 train loss:3.607993 +step:3647 train loss:3.599208 +step:3648 train loss:3.674174 +step:3649 train loss:3.565526 +step:3650 train loss:3.576002 +step:3651 train loss:3.570836 +step:3652 train loss:3.583650 +step:3653 train loss:3.591699 +step:3654 train loss:3.609803 +step:3655 train loss:3.590738 +step:3656 train loss:3.598375 +step:3657 train loss:3.522581 +step:3658 train loss:3.582722 +step:3659 train loss:3.550896 +step:3660 train loss:3.542585 +step:3661 train loss:3.552934 +step:3662 train loss:3.573640 +step:3663 train loss:3.576555 +step:3664 train loss:3.554050 +step:3665 train loss:3.553982 +step:3666 train loss:3.598006 +step:3667 train loss:3.556818 +step:3668 train loss:3.603821 +step:3669 train loss:3.603727 +step:3670 train loss:3.603114 +step:3671 train loss:3.562320 +step:3672 train loss:3.621905 +step:3673 train loss:3.606301 +step:3674 train loss:3.631821 +step:3675 train loss:3.614820 +step:3676 train loss:3.570119 +step:3677 train loss:3.548397 +step:3678 train loss:3.598236 +step:3679 train loss:3.572056 +step:3680 train loss:3.556672 +step:3681 train loss:3.635368 +step:3682 train loss:3.587960 +step:3683 train loss:3.583005 +step:3684 train loss:3.566153 +step:3685 train loss:3.556098 +step:3686 train loss:3.581750 +step:3687 train loss:3.589540 +step:3688 train loss:3.627614 +step:3689 train loss:3.521744 +step:3690 train loss:3.525917 +step:3691 train loss:3.605775 +step:3692 train loss:3.539041 +step:3693 train loss:3.551794 +step:3694 train loss:3.662110 +step:3695 train loss:3.513224 +step:3696 train loss:3.598071 +step:3697 train loss:3.638849 +step:3698 train loss:3.644367 +step:3699 train loss:3.567705 +step:3700 train loss:3.546071 +step:3701 train loss:3.567512 +step:3702 train loss:3.533326 +step:3703 train loss:3.588209 +step:3704 train loss:3.744156 +step:3705 train loss:3.567995 +step:3706 train loss:3.583594 +step:3707 train loss:3.568924 +step:3708 train loss:3.580888 +step:3709 train loss:3.585054 +step:3710 train loss:3.638596 +step:3711 train loss:3.507055 +step:3712 train loss:3.569690 +step:3713 train loss:3.594953 +step:3714 train loss:3.539119 +step:3715 train loss:3.558588 +step:3716 train loss:3.559105 +step:3717 train loss:3.550530 +step:3718 train loss:3.572564 +step:3719 train loss:3.595248 +step:3720 train loss:3.550539 +step:3721 train loss:3.558901 +step:3722 train loss:3.731764 +step:3723 train loss:3.560339 +step:3724 train loss:3.595706 +step:3725 train loss:3.570099 +step:3726 train loss:3.573876 +step:3727 train loss:3.550762 +step:3728 train loss:3.605199 +step:3729 train loss:3.567565 +step:3730 train loss:3.613991 +step:3731 train loss:3.591911 +step:3732 train loss:3.557645 +step:3733 train loss:3.522114 +step:3734 train loss:3.524451 +step:3735 train loss:3.524134 +step:3736 train loss:3.558689 +step:3737 train loss:3.610731 +step:3738 train loss:3.600549 +step:3739 train loss:3.548210 +step:3740 train loss:3.540864 +step:3741 train loss:3.596973 +step:3742 train loss:3.645542 +step:3743 train loss:3.589917 +step:3744 train loss:3.626724 +step:3745 train loss:3.570331 +step:3746 train loss:3.587652 +step:3747 train loss:3.583292 +step:3748 train loss:3.636509 +step:3749 train loss:3.576403 +step:3750 validation loss:3.511273 +step:3750 train loss:3.562146 +step:3751 train loss:3.607696 +step:3752 train loss:3.586007 +step:3753 train loss:3.572268 +step:3754 train loss:3.571614 +step:3755 train loss:3.578437 +step:3756 train loss:3.523669 +step:3757 train loss:3.561591 +step:3758 train loss:3.577039 +step:3759 train loss:3.552775 +step:3760 train loss:3.555332 +step:3761 train loss:3.511931 +step:3762 train loss:3.630341 +step:3763 train loss:3.535422 +step:3764 train loss:3.537555 +step:3765 train loss:3.572653 +step:3766 train loss:3.561690 +step:3767 train loss:3.553267 +step:3768 train loss:3.627791 +step:3769 train loss:3.608460 +step:3770 train loss:3.566453 +step:3771 train loss:3.611798 +step:3772 train loss:3.606609 +step:3773 train loss:3.599769 +step:3774 train loss:3.592461 +step:3775 train loss:3.597148 +step:3776 train loss:3.532171 +step:3777 train loss:3.584243 +step:3778 train loss:3.502734 +step:3779 train loss:3.593097 +step:3780 train loss:3.575755 +step:3781 train loss:3.492313 +step:3782 train loss:3.551670 +step:3783 train loss:3.542311 +step:3784 train loss:3.545767 +step:3785 train loss:3.569786 +step:3786 train loss:3.574180 +step:3787 train loss:3.586147 +step:3788 train loss:3.612651 +step:3789 train loss:3.728355 +step:3790 train loss:3.607375 +step:3791 train loss:3.576220 +step:3792 train loss:3.582257 +step:3793 train loss:3.554521 +step:3794 train loss:3.663926 +step:3795 train loss:3.581983 +step:3796 train loss:3.588306 +step:3797 train loss:3.565502 +step:3798 train loss:3.516737 +step:3799 train loss:3.538602 +step:3800 train loss:3.554699 +step:3801 train loss:3.543161 +step:3802 train loss:3.510242 +step:3803 train loss:3.529936 +step:3804 train loss:3.655808 +step:3805 train loss:3.638090 +step:3806 train loss:3.573326 +step:3807 train loss:3.623853 +step:3808 train loss:3.551828 +step:3809 train loss:3.569603 +step:3810 train loss:3.562333 +step:3811 train loss:3.587374 +step:3812 train loss:3.586740 +step:3813 train loss:3.523690 +step:3814 train loss:3.529588 +step:3815 train loss:3.574130 +step:3816 train loss:3.568116 +step:3817 train loss:3.574634 +step:3818 train loss:3.592376 +step:3819 train loss:3.529408 +step:3820 train loss:3.598762 +step:3821 train loss:3.542380 +step:3822 train loss:3.614356 +step:3823 train loss:3.541775 +step:3824 train loss:3.576838 +step:3825 train loss:3.566430 +step:3826 train loss:3.578407 +step:3827 train loss:3.609319 +step:3828 train loss:3.545043 +step:3829 train loss:3.582383 +step:3830 train loss:3.558189 +step:3831 train loss:3.633662 +step:3832 train loss:3.609325 +step:3833 train loss:3.619238 +step:3834 train loss:3.615310 +step:3835 train loss:3.618770 +step:3836 train loss:3.620019 +step:3837 train loss:3.595433 +step:3838 train loss:3.574946 +step:3839 train loss:3.617321 +step:3840 train loss:3.580060 +step:3841 train loss:3.536660 +step:3842 train loss:3.600722 +step:3843 train loss:3.600448 +step:3844 train loss:3.539404 +step:3845 train loss:3.544803 +step:3846 train loss:3.544422 +step:3847 train loss:3.562415 +step:3848 train loss:3.608426 +step:3849 train loss:3.576989 +step:3850 train loss:3.626263 +step:3851 train loss:3.535738 +step:3852 train loss:3.610426 +step:3853 train loss:3.530654 +step:3854 train loss:3.566621 +step:3855 train loss:3.567000 +step:3856 train loss:3.609571 +step:3857 train loss:3.523923 +step:3858 train loss:3.513122 +step:3859 train loss:3.550681 +step:3860 train loss:3.540823 +step:3861 train loss:3.592214 +step:3862 train loss:3.531444 +step:3863 train loss:3.548202 +step:3864 train loss:3.540550 +step:3865 train loss:3.539023 +step:3866 train loss:3.600629 +step:3867 train loss:3.526677 +step:3868 train loss:3.572037 +step:3869 train loss:3.507106 +step:3870 train loss:3.553595 +step:3871 train loss:3.558117 +step:3872 train loss:3.522887 +step:3873 train loss:3.551682 +step:3874 train loss:3.547525 +step:3875 train loss:3.579513 +step:3876 train loss:3.551325 +step:3877 train loss:3.529247 +step:3878 train loss:3.587730 +step:3879 train loss:3.557671 +step:3880 train loss:3.573456 +step:3881 train loss:3.604893 +step:3882 train loss:3.543683 +step:3883 train loss:3.524725 +step:3884 train loss:3.520803 +step:3885 train loss:3.543948 +step:3886 train loss:3.576519 +step:3887 train loss:3.568849 +step:3888 train loss:3.538906 +step:3889 train loss:3.507330 +step:3890 train loss:3.564713 +step:3891 train loss:3.601405 +step:3892 train loss:3.551455 +step:3893 train loss:3.501318 +step:3894 train loss:3.508391 +step:3895 train loss:3.610044 +step:3896 train loss:3.663564 +step:3897 train loss:3.504444 +step:3898 train loss:3.543781 +step:3899 train loss:3.587415 +step:3900 train loss:3.611575 +step:3901 train loss:3.590109 +step:3902 train loss:3.601301 +step:3903 train loss:3.565566 +step:3904 train loss:3.553299 +step:3905 train loss:3.562832 +step:3906 train loss:3.507365 +step:3907 train loss:3.585532 +step:3908 train loss:3.573172 +step:3909 train loss:3.526556 +step:3910 train loss:3.529017 +step:3911 train loss:3.622963 +step:3912 train loss:3.557541 +step:3913 train loss:3.568721 +step:3914 train loss:3.436287 +step:3915 train loss:3.568064 +step:3916 train loss:3.555089 +step:3917 train loss:3.714610 +step:3918 train loss:3.547799 +step:3919 train loss:3.602377 +step:3920 train loss:3.551632 +step:3921 train loss:3.528535 +step:3922 train loss:3.562736 +step:3923 train loss:3.582806 +step:3924 train loss:3.588569 +step:3925 train loss:3.575339 +step:3926 train loss:3.591928 +step:3927 train loss:3.645576 +step:3928 train loss:3.601488 +step:3929 train loss:3.600147 +step:3930 train loss:3.568874 +step:3931 train loss:3.578815 +step:3932 train loss:3.528203 +step:3933 train loss:3.554373 +step:3934 train loss:3.538039 +step:3935 train loss:3.543125 +step:3936 train loss:3.532257 +step:3937 train loss:3.544372 +step:3938 train loss:3.591118 +step:3939 train loss:3.585627 +step:3940 train loss:3.591236 +step:3941 train loss:3.573538 +step:3942 train loss:3.620564 +step:3943 train loss:3.504609 +step:3944 train loss:3.543141 +step:3945 train loss:3.523691 +step:3946 train loss:3.574203 +step:3947 train loss:3.573018 +step:3948 train loss:3.554372 +step:3949 train loss:3.536014 +step:3950 train loss:3.581198 +step:3951 train loss:3.558086 +step:3952 train loss:3.596933 +step:3953 train loss:3.608394 +step:3954 train loss:3.576005 +step:3955 train loss:3.613906 +step:3956 train loss:3.593821 +step:3957 train loss:3.592865 +step:3958 train loss:3.538582 +step:3959 train loss:3.536168 +step:3960 train loss:3.541535 +step:3961 train loss:3.553451 +step:3962 train loss:3.545272 +step:3963 train loss:3.638035 +step:3964 train loss:3.546031 +step:3965 train loss:3.497367 +step:3966 train loss:3.522824 +step:3967 train loss:3.524381 +step:3968 train loss:3.581723 +step:3969 train loss:3.595460 +step:3970 train loss:3.582261 +step:3971 train loss:3.541239 +step:3972 train loss:3.564199 +step:3973 train loss:3.555775 +step:3974 train loss:3.591617 +step:3975 train loss:3.564737 +step:3976 train loss:3.567562 +step:3977 train loss:3.554315 +step:3978 train loss:3.540706 +step:3979 train loss:3.600759 +step:3980 train loss:3.485268 +step:3981 train loss:3.537551 +step:3982 train loss:3.512873 +step:3983 train loss:3.580430 +step:3984 train loss:3.480712 +step:3985 train loss:3.585680 +step:3986 train loss:3.585524 +step:3987 train loss:3.559103 +step:3988 train loss:3.550639 +step:3989 train loss:3.531441 +step:3990 train loss:3.558052 +step:3991 train loss:3.547445 +step:3992 train loss:3.575303 +step:3993 train loss:3.568799 +step:3994 train loss:3.532586 +step:3995 train loss:3.541131 +step:3996 train loss:3.532295 +step:3997 train loss:3.546611 +step:3998 train loss:3.547603 +step:3999 train loss:3.549742 +step:4000 validation loss:3.507454 total_sharp:7.5768e-03 L1_sharp:1.6086e-02 L2_sharp:1.1589e-02 L3_sharp:5.5978e-03 L4_sharp:1.7625e-03 L5_sharp:1.3749e-03 L6_sharp:1.2969e-03 L7_sharp:1.2482e-03 L8_sharp:1.1335e-03 L9_sharp:7.2341e-04 L10_sharp:4.6427e-04 L11_sharp:5.1508e-04 L12_sharp:5.3901e-04 total_fnorm:2.3616e+00 total_l1_linf:2.0130e+04 total_spectral:2.3616e+00 L1_fnorm:5.6190e-01 L2_fnorm:4.2343e-01 L3_fnorm:4.1179e-01 L4_fnorm:5.4561e-01 L5_fnorm:5.8188e-01 L6_fnorm:5.9480e-01 L7_fnorm:6.0078e-01 L8_fnorm:5.9989e-01 L9_fnorm:5.9865e-01 L10_fnorm:5.9586e-01 L11_fnorm:5.8901e-01 L12_fnorm:6.0080e-01 L1_l1linf:4.2875e-01 L2_l1linf:4.1428e-01 L3_l1linf:6.0859e-01 L4_l1linf:3.9931e-01 L5_l1linf:4.1197e-01 L6_l1linf:4.1193e-01 L7_l1linf:4.1034e-01 L8_l1linf:4.1026e-01 L9_l1linf:4.1118e-01 L10_l1linf:4.1183e-01 L11_l1linf:4.0785e-01 L12_l1linf:3.9781e-01 L1_spectral:1.2044e-02 L2_spectral:1.3707e-02 L3_spectral:2.2605e-02 L4_spectral:1.2042e-02 L5_spectral:1.2049e-02 L6_spectral:1.2055e-02 L7_spectral:1.2048e-02 L8_spectral:1.2052e-02 L9_spectral:1.2070e-02 L10_spectral:1.2053e-02 L11_spectral:1.2044e-02 L12_spectral:1.2044e-02 v_norm:2.3616e+00 cos_v_-g_hvp:3.1345e-02 g_hvp_norm:7.4317e-01 cos_v_-g_t:3.5647e-02 g_t_norm:7.0226e-01 hv_norm:1.0602e+00 cos_v_hv:1.6878e-02 hg_norm:4.6435e+01 cos_g_hg:6.3004e-01 v_par:5.4584e-03 v_perp:2.3616e+00 L1_cos_v_neg_g:2.6977e-02 L1_v_norm:5.6190e-01 L2_cos_v_neg_g:4.0757e-02 L2_v_norm:4.2343e-01 L3_cos_v_neg_g:3.5206e-02 L3_v_norm:4.1179e-01 L4_cos_v_neg_g:3.1343e-02 L4_v_norm:5.4561e-01 L5_cos_v_neg_g:3.3905e-02 L5_v_norm:5.8188e-01 L6_cos_v_neg_g:3.1929e-02 L6_v_norm:5.9480e-01 L7_cos_v_neg_g:3.2610e-02 L7_v_norm:6.0078e-01 L8_cos_v_neg_g:3.0596e-02 L8_v_norm:5.9989e-01 L9_cos_v_neg_g:3.4465e-02 L9_v_norm:5.9865e-01 L10_cos_v_neg_g:3.5893e-02 L10_v_norm:5.9586e-01 L11_cos_v_neg_g:4.6853e-02 L11_v_norm:5.8901e-01 L12_cos_v_neg_g:8.2211e-02 L12_v_norm:6.0080e-01 +step:4000 train loss:3.557024 +step:4001 train loss:3.577695 +step:4002 train loss:3.524297 +step:4003 train loss:3.550997 +step:4004 train loss:3.572024 +step:4005 train loss:3.511522 +step:4006 train loss:3.517400 +step:4007 train loss:3.508086 +step:4008 train loss:3.550270 +step:4009 train loss:3.552579 +step:4010 train loss:3.480325 +step:4011 train loss:3.584960 +step:4012 train loss:3.518065 +step:4013 train loss:3.547018 +step:4014 train loss:3.534450 +step:4015 train loss:3.494284 +step:4016 train loss:3.537571 +step:4017 train loss:3.601721 +step:4018 train loss:3.540904 +step:4019 train loss:3.550039 +step:4020 train loss:3.499100 +step:4021 train loss:3.527997 +step:4022 train loss:3.526222 +step:4023 train loss:3.586227 +step:4024 train loss:3.512645 +step:4025 train loss:3.495224 +step:4026 train loss:3.595896 +step:4027 train loss:3.504511 +step:4028 train loss:3.569911 +step:4029 train loss:3.443820 +step:4030 train loss:3.513848 +step:4031 train loss:3.522452 +step:4032 train loss:3.561452 +step:4033 train loss:3.532433 +step:4034 train loss:3.510635 +step:4035 train loss:3.536593 +step:4036 train loss:3.544785 +step:4037 train loss:3.497462 +step:4038 train loss:3.478589 +step:4039 train loss:3.534711 +step:4040 train loss:3.570370 +step:4041 train loss:3.624133 +step:4042 train loss:3.550260 +step:4043 train loss:3.575748 +step:4044 train loss:3.528666 +step:4045 train loss:3.527110 +step:4046 train loss:3.492326 +step:4047 train loss:3.514266 +step:4048 train loss:3.530708 +step:4049 train loss:3.594289 +step:4050 train loss:3.527348 +step:4051 train loss:3.522647 +step:4052 train loss:3.517730 +step:4053 train loss:3.556962 +step:4054 train loss:3.516333 +step:4055 train loss:3.510886 +step:4056 train loss:3.587035 +step:4057 train loss:3.530915 +step:4058 train loss:3.540070 +step:4059 train loss:3.507067 +step:4060 train loss:3.565818 +step:4061 train loss:3.501471 +step:4062 train loss:3.523009 +step:4063 train loss:3.560994 +step:4064 train loss:3.529479 +step:4065 train loss:3.512014 +step:4066 train loss:3.574937 +step:4067 train loss:3.604374 +step:4068 train loss:3.587831 +step:4069 train loss:3.594794 +step:4070 train loss:3.561983 +step:4071 train loss:3.558016 +step:4072 train loss:3.495281 +step:4073 train loss:3.534555 +step:4074 train loss:3.448179 +step:4075 train loss:3.585654 +step:4076 train loss:3.496321 +step:4077 train loss:3.523357 +step:4078 train loss:3.477483 +step:4079 train loss:3.562003 +step:4080 train loss:3.534098 +step:4081 train loss:3.556994 +step:4082 train loss:3.513780 +step:4083 train loss:3.497528 +step:4084 train loss:3.561827 +step:4085 train loss:3.567019 +step:4086 train loss:3.612506 +step:4087 train loss:3.598254 +step:4088 train loss:3.500230 +step:4089 train loss:3.555997 +step:4090 train loss:3.489940 +step:4091 train loss:3.536132 +step:4092 train loss:3.495427 +step:4093 train loss:3.736593 +step:4094 train loss:3.579103 +step:4095 train loss:3.491832 +step:4096 train loss:3.609490 +step:4097 train loss:3.469772 +step:4098 train loss:3.529027 +step:4099 train loss:3.502861 +step:4100 train loss:3.556744 +step:4101 train loss:3.519931 +step:4102 train loss:3.522703 +step:4103 train loss:3.503023 +step:4104 train loss:3.506686 +step:4105 train loss:3.513176 +step:4106 train loss:3.471327 +step:4107 train loss:3.480392 +step:4108 train loss:3.495317 +step:4109 train loss:3.558594 +step:4110 train loss:3.534976 +step:4111 train loss:3.450884 +step:4112 train loss:3.495727 +step:4113 train loss:3.511142 +step:4114 train loss:3.568177 +step:4115 train loss:3.540523 +step:4116 train loss:3.579116 +step:4117 train loss:3.494589 +step:4118 train loss:3.506147 +step:4119 train loss:3.509779 +step:4120 train loss:3.487239 +step:4121 train loss:3.539991 +step:4122 train loss:3.529435 +step:4123 train loss:3.464203 +step:4124 train loss:3.519100 +step:4125 train loss:3.490547 +step:4126 train loss:3.508639 +step:4127 train loss:3.531188 +step:4128 train loss:3.512775 +step:4129 train loss:3.602499 +step:4130 train loss:3.526506 +step:4131 train loss:3.481946 +step:4132 train loss:3.483994 +step:4133 train loss:3.522562 +step:4134 train loss:3.509170 +step:4135 train loss:3.492066 +step:4136 train loss:3.514404 +step:4137 train loss:3.532066 +step:4138 train loss:3.589526 +step:4139 train loss:3.472342 +step:4140 train loss:3.558641 +step:4141 train loss:3.484299 +step:4142 train loss:3.527819 +step:4143 train loss:3.529411 +step:4144 train loss:3.542705 +step:4145 train loss:3.584691 +step:4146 train loss:3.464140 +step:4147 train loss:3.502379 +step:4148 train loss:3.522157 +step:4149 train loss:3.531933 +step:4150 train loss:3.561037 +step:4151 train loss:3.521520 +step:4152 train loss:3.576914 +step:4153 train loss:3.529345 +step:4154 train loss:3.551094 +step:4155 train loss:3.510660 +step:4156 train loss:3.584424 +step:4157 train loss:3.544280 +step:4158 train loss:3.549309 +step:4159 train loss:3.502946 +step:4160 train loss:3.467481 +step:4161 train loss:3.544332 +step:4162 train loss:3.495970 +step:4163 train loss:3.504519 +step:4164 train loss:3.516671 +step:4165 train loss:3.502828 +step:4166 train loss:3.499029 +step:4167 train loss:3.532044 +step:4168 train loss:3.543284 +step:4169 train loss:3.495183 +step:4170 train loss:3.554126 +step:4171 train loss:3.524117 +step:4172 train loss:3.532537 +step:4173 train loss:3.555301 +step:4174 train loss:3.524269 +step:4175 train loss:3.564178 +step:4176 train loss:3.526343 +step:4177 train loss:3.565740 +step:4178 train loss:3.550607 +step:4179 train loss:3.604719 +step:4180 train loss:3.605310 +step:4181 train loss:3.416292 +step:4182 train loss:3.561548 +step:4183 train loss:3.498023 +step:4184 train loss:3.541067 +step:4185 train loss:3.550828 +step:4186 train loss:3.512583 +step:4187 train loss:3.526784 +step:4188 train loss:3.520711 +step:4189 train loss:3.536690 +step:4190 train loss:3.487973 +step:4191 train loss:3.476999 +step:4192 train loss:3.506344 +step:4193 train loss:3.435569 +step:4194 train loss:3.527681 +step:4195 train loss:3.475502 +step:4196 train loss:3.515805 +step:4197 train loss:3.572859 +step:4198 train loss:3.508630 +step:4199 train loss:3.515971 +step:4200 train loss:3.540528 +step:4201 train loss:3.550622 +step:4202 train loss:3.560387 +step:4203 train loss:3.506129 +step:4204 train loss:3.538339 +step:4205 train loss:3.576789 +step:4206 train loss:3.553745 +step:4207 train loss:3.538804 +step:4208 train loss:3.510826 +step:4209 train loss:3.558141 +step:4210 train loss:3.521582 +step:4211 train loss:3.581748 +step:4212 train loss:3.472631 +step:4213 train loss:3.531808 +step:4214 train loss:3.556236 +step:4215 train loss:3.578328 +step:4216 train loss:3.565734 +step:4217 train loss:3.571491 +step:4218 train loss:3.519415 +step:4219 train loss:3.556448 +step:4220 train loss:3.505035 +step:4221 train loss:3.519432 +step:4222 train loss:3.488870 +step:4223 train loss:3.594004 +step:4224 train loss:3.544623 +step:4225 train loss:3.554300 +step:4226 train loss:3.588364 +step:4227 train loss:3.499733 +step:4228 train loss:3.564276 +step:4229 train loss:3.531157 +step:4230 train loss:3.610298 +step:4231 train loss:3.630556 +step:4232 train loss:3.561748 +step:4233 train loss:3.543281 +step:4234 train loss:3.586890 +step:4235 train loss:3.523113 +step:4236 train loss:3.554947 +step:4237 train loss:3.494820 +step:4238 train loss:3.492475 +step:4239 train loss:3.527603 +step:4240 train loss:3.514138 +step:4241 train loss:3.536938 +step:4242 train loss:3.542912 +step:4243 train loss:3.559786 +step:4244 train loss:3.609251 +step:4245 train loss:3.539045 +step:4246 train loss:3.616217 +step:4247 train loss:3.649849 +step:4248 train loss:3.657474 +step:4249 train loss:3.563160 +step:4250 validation loss:3.505758 +step:4250 train loss:3.589798 +step:4251 train loss:3.609015 +step:4252 train loss:3.547488 +step:4253 train loss:3.502429 +step:4254 train loss:3.531981 +step:4255 train loss:3.532923 +step:4256 train loss:3.555890 +step:4257 train loss:3.557544 +step:4258 train loss:3.590488 +step:4259 train loss:3.511833 +step:4260 train loss:3.587459 +step:4261 train loss:3.542458 +step:4262 train loss:3.594087 +step:4263 train loss:3.584088 +step:4264 train loss:3.592716 +step:4265 train loss:3.641143 +step:4266 train loss:3.751412 +step:4267 train loss:3.533640 +step:4268 train loss:3.574489 +step:4269 train loss:3.513397 +step:4270 train loss:3.587822 +step:4271 train loss:3.534650 +step:4272 train loss:3.532433 +step:4273 train loss:3.559576 +step:4274 train loss:3.492474 +step:4275 train loss:3.509017 +step:4276 train loss:3.595577 +step:4277 train loss:3.598769 +step:4278 train loss:3.570314 +step:4279 train loss:3.549589 +step:4280 train loss:3.551471 +step:4281 train loss:3.527033 +step:4282 train loss:3.548497 +step:4283 train loss:3.508981 +step:4284 train loss:3.536829 +step:4285 train loss:3.571292 +step:4286 train loss:3.511913 +step:4287 train loss:3.547121 +step:4288 train loss:3.517947 +step:4289 train loss:3.520531 +step:4290 train loss:3.560140 +step:4291 train loss:3.541280 +step:4292 train loss:3.554659 +step:4293 train loss:3.565447 +step:4294 train loss:3.564720 +step:4295 train loss:3.548172 +step:4296 train loss:3.608806 +step:4297 train loss:3.556448 +step:4298 train loss:3.544264 +step:4299 train loss:3.549706 +step:4300 train loss:3.467284 +step:4301 train loss:3.525871 +step:4302 train loss:3.605047 +step:4303 train loss:3.568069 +step:4304 train loss:3.580043 +step:4305 train loss:3.559536 +step:4306 train loss:3.520405 +step:4307 train loss:3.672243 +step:4308 train loss:3.627382 +step:4309 train loss:3.564198 +step:4310 train loss:3.576019 +step:4311 train loss:3.511665 +step:4312 train loss:3.539212 +step:4313 train loss:3.504823 +step:4314 train loss:3.597283 +step:4315 train loss:3.541826 +step:4316 train loss:3.540495 +step:4317 train loss:3.562371 +step:4318 train loss:3.585496 +step:4319 train loss:3.573439 +step:4320 train loss:3.573509 +step:4321 train loss:3.576717 +step:4322 train loss:3.538277 +step:4323 train loss:3.543804 +step:4324 train loss:3.597797 +step:4325 train loss:3.607414 +step:4326 train loss:3.540891 +step:4327 train loss:3.519550 +step:4328 train loss:3.533290 +step:4329 train loss:3.569692 +step:4330 train loss:3.540415 +step:4331 train loss:3.535209 +step:4332 train loss:3.572491 +step:4333 train loss:3.523042 +step:4334 train loss:3.584787 +step:4335 train loss:3.602189 +step:4336 train loss:3.585602 +step:4337 train loss:3.526573 +step:4338 train loss:3.537215 +step:4339 train loss:3.518155 +step:4340 train loss:3.519685 +step:4341 train loss:3.476514 +step:4342 train loss:3.521660 +step:4343 train loss:3.581977 +step:4344 train loss:3.591260 +step:4345 train loss:3.565238 +step:4346 train loss:3.587523 +step:4347 train loss:3.519001 +step:4348 train loss:3.521303 +step:4349 train loss:3.498347 +step:4350 train loss:3.533536 +step:4351 train loss:3.456766 +step:4352 train loss:3.512841 +step:4353 train loss:3.596712 +step:4354 train loss:3.546495 +step:4355 train loss:3.479805 +step:4356 train loss:3.537672 +step:4357 train loss:3.527628 +step:4358 train loss:3.553104 +step:4359 train loss:3.543293 +step:4360 train loss:3.525623 +step:4361 train loss:3.581259 +step:4362 train loss:3.527936 +step:4363 train loss:3.489503 +step:4364 train loss:3.602334 +step:4365 train loss:3.643133 +step:4366 train loss:3.529446 +step:4367 train loss:3.568153 +step:4368 train loss:3.525199 +step:4369 train loss:3.557496 +step:4370 train loss:3.589168 +step:4371 train loss:3.561068 +step:4372 train loss:3.531372 +step:4373 train loss:3.594727 +step:4374 train loss:3.597554 +step:4375 train loss:3.583952 +step:4376 train loss:3.611074 +step:4377 train loss:3.629722 +step:4378 train loss:3.609082 +step:4379 train loss:3.523829 +step:4380 train loss:3.642456 +step:4381 train loss:3.554203 +step:4382 train loss:3.577064 +step:4383 train loss:3.566658 +step:4384 train loss:3.625084 +step:4385 train loss:3.529628 +step:4386 train loss:3.507038 +step:4387 train loss:3.545333 +step:4388 train loss:3.513760 +step:4389 train loss:3.494587 +step:4390 train loss:3.489961 +step:4391 train loss:3.551345 +step:4392 train loss:3.494642 +step:4393 train loss:3.601452 +step:4394 train loss:3.529478 +step:4395 train loss:3.491363 +step:4396 train loss:3.610861 +step:4397 train loss:3.549294 +step:4398 train loss:3.557789 +step:4399 train loss:3.541231 +step:4400 train loss:3.572453 +step:4401 train loss:3.509223 +step:4402 train loss:3.519286 +step:4403 train loss:3.547763 +step:4404 train loss:3.606167 +step:4405 train loss:3.507052 +step:4406 train loss:3.530293 +step:4407 train loss:3.581064 +step:4408 train loss:3.549685 +step:4409 train loss:3.485161 +step:4410 train loss:3.554464 +step:4411 train loss:3.543224 +step:4412 train loss:3.542068 +step:4413 train loss:3.572896 +step:4414 train loss:3.520124 +step:4415 train loss:3.520019 +step:4416 train loss:3.528747 +step:4417 train loss:3.538159 +step:4418 train loss:3.716049 +step:4419 train loss:3.562216 +step:4420 train loss:3.529279 +step:4421 train loss:3.533329 +step:4422 train loss:3.483144 +step:4423 train loss:3.525223 +step:4424 train loss:3.460120 +step:4425 train loss:3.496541 +step:4426 train loss:3.499739 +step:4427 train loss:3.489631 +step:4428 train loss:3.567780 +step:4429 train loss:3.558608 +step:4430 train loss:3.665363 +step:4431 train loss:3.487102 +step:4432 train loss:3.529785 +step:4433 train loss:3.610146 +step:4434 train loss:3.674617 +step:4435 train loss:3.592731 +step:4436 train loss:3.632327 +step:4437 train loss:3.547674 +step:4438 train loss:3.497194 +step:4439 train loss:3.550711 +step:4440 train loss:3.592669 +step:4441 train loss:3.527315 +step:4442 train loss:3.496653 +step:4443 train loss:3.546269 +step:4444 train loss:3.529454 +step:4445 train loss:3.578430 +step:4446 train loss:3.503883 +step:4447 train loss:3.512486 +step:4448 train loss:3.547013 +step:4449 train loss:3.588101 +step:4450 train loss:3.457792 +step:4451 train loss:3.511971 +step:4452 train loss:3.522169 +step:4453 train loss:3.512671 +step:4454 train loss:3.506131 +step:4455 train loss:3.535951 +step:4456 train loss:3.542557 +step:4457 train loss:3.516349 +step:4458 train loss:3.524365 +step:4459 train loss:3.542172 +step:4460 train loss:3.543947 +step:4461 train loss:3.545114 +step:4462 train loss:3.523035 +step:4463 train loss:3.564437 +step:4464 train loss:3.580534 +step:4465 train loss:3.555433 +step:4466 train loss:3.591660 +step:4467 train loss:3.487444 +step:4468 train loss:3.484293 +step:4469 train loss:3.577099 +step:4470 train loss:3.594140 +step:4471 train loss:3.530883 +step:4472 train loss:3.546014 +step:4473 train loss:3.487561 +step:4474 train loss:3.568620 +step:4475 train loss:3.550776 +step:4476 train loss:3.594102 +step:4477 train loss:3.555209 +step:4478 train loss:3.500237 +step:4479 train loss:3.565790 +step:4480 train loss:3.518190 +step:4481 train loss:3.558211 +step:4482 train loss:3.621862 +step:4483 train loss:3.512020 +step:4484 train loss:3.672476 +step:4485 train loss:3.559738 +step:4486 train loss:3.491645 +step:4487 train loss:3.556786 +step:4488 train loss:3.548749 +step:4489 train loss:3.581188 +step:4490 train loss:3.560583 +step:4491 train loss:3.533878 +step:4492 train loss:3.502127 +step:4493 train loss:3.548883 +step:4494 train loss:3.511074 +step:4495 train loss:3.531974 +step:4496 train loss:3.512521 +step:4497 train loss:3.489355 +step:4498 train loss:3.527106 +step:4499 train loss:3.570989 +step:4500 validation loss:3.499329 total_sharp:8.6881e-03 L1_sharp:1.3409e-02 L2_sharp:6.7160e-03 L3_sharp:3.5417e-03 L4_sharp:1.6234e-03 L5_sharp:1.3473e-03 L6_sharp:1.8098e-03 L7_sharp:1.8463e-03 L8_sharp:2.0216e-03 L9_sharp:9.8360e-04 L10_sharp:5.5874e-04 L11_sharp:5.7632e-04 L12_sharp:8.4024e-04 total_fnorm:2.3464e+00 total_l1_linf:1.9987e+04 total_spectral:2.3464e+00 L1_fnorm:5.6287e-01 L2_fnorm:3.9581e-01 L3_fnorm:4.1677e-01 L4_fnorm:5.3339e-01 L5_fnorm:5.8002e-01 L6_fnorm:5.9198e-01 L7_fnorm:5.9962e-01 L8_fnorm:5.9753e-01 L9_fnorm:5.9550e-01 L10_fnorm:5.9315e-01 L11_fnorm:5.8293e-01 L12_fnorm:6.0033e-01 L1_l1linf:4.3295e-01 L2_l1linf:4.2325e-01 L3_l1linf:5.0892e-01 L4_l1linf:3.9643e-01 L5_l1linf:4.0700e-01 L6_l1linf:4.1070e-01 L7_l1linf:4.0622e-01 L8_l1linf:4.0643e-01 L9_l1linf:4.1028e-01 L10_l1linf:4.1147e-01 L11_l1linf:4.0629e-01 L12_l1linf:3.9947e-01 L1_spectral:1.2044e-02 L2_spectral:1.6104e-02 L3_spectral:1.9534e-02 L4_spectral:1.2039e-02 L5_spectral:1.2046e-02 L6_spectral:1.2047e-02 L7_spectral:1.2053e-02 L8_spectral:1.2057e-02 L9_spectral:1.2051e-02 L10_spectral:1.2068e-02 L11_spectral:1.2043e-02 L12_spectral:1.2046e-02 v_norm:2.3464e+00 cos_v_-g_hvp:2.6824e-02 g_hvp_norm:8.4930e-01 cos_v_-g_t:3.0902e-02 g_t_norm:7.8458e-01 hv_norm:1.5115e+00 cos_v_hv:1.3487e-02 hg_norm:8.8903e+01 cos_g_hg:6.7440e-01 v_par:6.2981e-03 v_perp:2.3464e+00 L1_cos_v_neg_g:2.0733e-02 L1_v_norm:5.6287e-01 L2_cos_v_neg_g:3.3709e-02 L2_v_norm:3.9581e-01 L3_cos_v_neg_g:3.1503e-02 L3_v_norm:4.1677e-01 L4_cos_v_neg_g:2.6539e-02 L4_v_norm:5.3339e-01 L5_cos_v_neg_g:3.1525e-02 L5_v_norm:5.8002e-01 L6_cos_v_neg_g:3.2489e-02 L6_v_norm:5.9198e-01 L7_cos_v_neg_g:3.2779e-02 L7_v_norm:5.9962e-01 L8_cos_v_neg_g:3.1010e-02 L8_v_norm:5.9753e-01 L9_cos_v_neg_g:3.2034e-02 L9_v_norm:5.9550e-01 L10_cos_v_neg_g:3.2129e-02 L10_v_norm:5.9315e-01 L11_cos_v_neg_g:4.1054e-02 L11_v_norm:5.8293e-01 L12_cos_v_neg_g:6.6742e-02 L12_v_norm:6.0033e-01 +step:4500 train loss:3.510579 +step:4501 train loss:3.544361 +step:4502 train loss:3.561533 +step:4503 train loss:3.551188 +step:4504 train loss:3.535362 +step:4505 train loss:3.623516 +step:4506 train loss:3.547436 +step:4507 train loss:3.495310 +step:4508 train loss:3.505431 +step:4509 train loss:3.577378 +step:4510 train loss:3.478481 +step:4511 train loss:3.550482 +step:4512 train loss:3.495694 +step:4513 train loss:3.537956 +step:4514 train loss:3.502267 +step:4515 train loss:3.548825 +step:4516 train loss:3.568060 +step:4517 train loss:3.648222 +step:4518 train loss:3.560042 +step:4519 train loss:3.601600 +step:4520 train loss:3.501233 +step:4521 train loss:3.539899 +step:4522 train loss:3.487237 +step:4523 train loss:3.573025 +step:4524 train loss:3.607797 +step:4525 train loss:3.549007 +step:4526 train loss:3.505445 +step:4527 train loss:3.523458 +step:4528 train loss:3.473797 +step:4529 train loss:3.504391 +step:4530 train loss:3.500662 +step:4531 train loss:3.492506 +step:4532 train loss:3.519826 +step:4533 train loss:3.518044 +step:4534 train loss:3.473623 +step:4535 train loss:3.539696 +step:4536 train loss:3.562011 +step:4537 train loss:3.507299 +step:4538 train loss:3.532383 +step:4539 train loss:3.542036 +step:4540 train loss:3.514818 +step:4541 train loss:3.610863 +step:4542 train loss:3.535644 +step:4543 train loss:3.530711 +step:4544 train loss:3.477422 +step:4545 train loss:3.517212 +step:4546 train loss:3.554719 +step:4547 train loss:3.519598 +step:4548 train loss:3.524324 +step:4549 train loss:3.490804 +step:4550 train loss:3.516328 +step:4551 train loss:3.557906 +step:4552 train loss:3.539883 +step:4553 train loss:3.536270 +step:4554 train loss:3.547654 +step:4555 train loss:3.531516 +step:4556 train loss:3.539149 +step:4557 train loss:3.512130 +step:4558 train loss:3.537389 +step:4559 train loss:3.511971 +step:4560 train loss:3.507847 +step:4561 train loss:3.508697 +step:4562 train loss:3.533616 +step:4563 train loss:3.443482 +step:4564 train loss:3.544506 +step:4565 train loss:3.494320 +step:4566 train loss:3.514597 +step:4567 train loss:3.516911 +step:4568 train loss:3.534056 +step:4569 train loss:3.475469 +step:4570 train loss:3.573516 +step:4571 train loss:3.540061 +step:4572 train loss:3.512235 +step:4573 train loss:3.498238 +step:4574 train loss:3.511436 +step:4575 train loss:3.558834 +step:4576 train loss:3.600645 +step:4577 train loss:3.543921 +step:4578 train loss:3.585215 +step:4579 train loss:3.619589 +step:4580 train loss:3.529314 +step:4581 train loss:3.620863 +step:4582 train loss:3.545785 +step:4583 train loss:3.542820 +step:4584 train loss:3.515865 +step:4585 train loss:3.534376 +step:4586 train loss:3.508659 +step:4587 train loss:3.493716 +step:4588 train loss:3.517035 +step:4589 train loss:3.524202 +step:4590 train loss:3.482018 +step:4591 train loss:3.519486 +step:4592 train loss:3.516364 +step:4593 train loss:3.519380 +step:4594 train loss:3.541569 +step:4595 train loss:3.600485 +step:4596 train loss:3.541932 +step:4597 train loss:3.611854 +step:4598 train loss:3.524876 +step:4599 train loss:3.477357 +step:4600 train loss:3.493321 +step:4601 train loss:3.582292 +step:4602 train loss:3.569129 +step:4603 train loss:3.533115 +step:4604 train loss:3.535180 +step:4605 train loss:3.493217 +step:4606 train loss:3.490279 +step:4607 train loss:3.545221 +step:4608 train loss:3.598415 +step:4609 train loss:3.528210 +step:4610 train loss:3.504058 +step:4611 train loss:3.551658 +step:4612 train loss:3.524885 +step:4613 train loss:3.532171 +step:4614 train loss:3.516314 +step:4615 train loss:3.480602 +step:4616 train loss:3.517239 +step:4617 train loss:3.472111 +step:4618 train loss:3.570997 +step:4619 train loss:3.559703 +step:4620 train loss:3.528503 +step:4621 train loss:3.613121 +step:4622 train loss:3.520916 +step:4623 train loss:3.482441 +step:4624 train loss:3.484466 +step:4625 train loss:3.515289 +step:4626 train loss:3.590502 +step:4627 train loss:3.559262 +step:4628 train loss:3.609293 +step:4629 train loss:3.497843 +step:4630 train loss:3.500111 +step:4631 train loss:3.493464 +step:4632 train loss:3.492188 +step:4633 train loss:3.481044 +step:4634 train loss:3.487151 +step:4635 train loss:3.585624 +step:4636 train loss:3.428688 +step:4637 train loss:3.542158 +step:4638 train loss:3.578607 +step:4639 train loss:3.518853 +step:4640 train loss:3.513354 +step:4641 train loss:3.506281 +step:4642 train loss:3.559450 +step:4643 train loss:3.487141 +step:4644 train loss:3.554918 +step:4645 train loss:3.490405 +step:4646 train loss:3.483491 +step:4647 train loss:3.558961 +step:4648 train loss:3.520028 +step:4649 train loss:3.536027 +step:4650 train loss:3.494802 +step:4651 train loss:3.573154 +step:4652 train loss:3.522727 +step:4653 train loss:3.631215 +step:4654 train loss:3.589067 +step:4655 train loss:3.494959 +step:4656 train loss:3.475288 +step:4657 train loss:3.542546 +step:4658 train loss:3.526722 +step:4659 train loss:3.494125 +step:4660 train loss:3.535384 +step:4661 train loss:3.546638 +step:4662 train loss:3.506354 +step:4663 train loss:3.469571 +step:4664 train loss:3.512210 +step:4665 train loss:3.501740 +step:4666 train loss:3.504317 +step:4667 train loss:3.525087 +step:4668 train loss:3.493132 +step:4669 train loss:3.494299 +step:4670 train loss:3.510888 +step:4671 train loss:3.511476 +step:4672 train loss:3.557069 +step:4673 train loss:3.493548 +step:4674 train loss:3.499332 +step:4675 train loss:3.511288 +step:4676 train loss:3.543933 +step:4677 train loss:3.503554 +step:4678 train loss:3.532533 +step:4679 train loss:3.500772 +step:4680 train loss:3.460891 +step:4681 train loss:3.505412 +step:4682 train loss:3.527190 +step:4683 train loss:3.502238 +step:4684 train loss:3.547122 +step:4685 train loss:3.553958 +step:4686 train loss:3.543136 +step:4687 train loss:3.532375 +step:4688 train loss:3.500529 +step:4689 train loss:3.535766 +step:4690 train loss:3.523461 +step:4691 train loss:3.486680 +step:4692 train loss:3.532696 +step:4693 train loss:3.507618 +step:4694 train loss:3.560644 +step:4695 train loss:3.492092 +step:4696 train loss:3.623374 +step:4697 train loss:3.563270 +step:4698 train loss:3.542799 +step:4699 train loss:3.556725 +step:4700 train loss:3.538260 +step:4701 train loss:3.565318 +step:4702 train loss:3.561153 +step:4703 train loss:3.408872 +step:4704 train loss:3.534685 +step:4705 train loss:3.524803 +step:4706 train loss:3.507806 +step:4707 train loss:3.574899 +step:4708 train loss:3.567439 +step:4709 train loss:3.498777 +step:4710 train loss:3.546720 +step:4711 train loss:3.517477 +step:4712 train loss:3.507592 +step:4713 train loss:3.582208 +step:4714 train loss:3.590306 +step:4715 train loss:3.563902 +step:4716 train loss:3.716006 +step:4717 train loss:3.623888 +step:4718 train loss:3.590057 +step:4719 train loss:3.503820 +step:4720 train loss:3.537589 +step:4721 train loss:3.581123 +step:4722 train loss:3.590498 +step:4723 train loss:3.585666 +step:4724 train loss:3.619121 +step:4725 train loss:3.502370 +step:4726 train loss:3.609662 +step:4727 train loss:3.514795 +step:4728 train loss:3.519594 +step:4729 train loss:3.507726 +step:4730 train loss:3.507124 +step:4731 train loss:3.571280 +step:4732 train loss:3.584143 +step:4733 train loss:3.563145 +step:4734 train loss:3.504674 +step:4735 train loss:3.521024 +step:4736 train loss:3.500551 +step:4737 train loss:3.527114 +step:4738 train loss:3.520097 +step:4739 train loss:3.557199 +step:4740 train loss:3.510737 +step:4741 train loss:3.546311 +step:4742 train loss:3.503685 +step:4743 train loss:3.589627 +step:4744 train loss:3.498567 +step:4745 train loss:3.540935 +step:4746 train loss:3.513166 +step:4747 train loss:3.460886 +step:4748 train loss:3.525772 +step:4749 train loss:3.536537 +step:4750 validation loss:3.493831 +step:4750 train loss:3.565601 +step:4751 train loss:3.560650 +step:4752 train loss:3.629374 +step:4753 train loss:3.570403 +step:4754 train loss:3.563446 +step:4755 train loss:3.576613 +step:4756 train loss:3.502872 +step:4757 train loss:3.543267 +step:4758 train loss:3.554191 +step:4759 train loss:3.516679 +step:4760 train loss:3.511539 +step:4761 train loss:3.530401 +step:4762 train loss:3.544017 +step:4763 train loss:3.535786 +step:4764 train loss:3.473878 +step:4765 train loss:3.557832 +step:4766 train loss:3.521487 +step:4767 train loss:3.469851 +step:4768 train loss:3.548432 +step:4769 train loss:3.504891 +step:4770 train loss:3.534049 +step:4771 train loss:3.510258 +step:4772 train loss:3.445332 +step:4773 train loss:3.538476 +step:4774 train loss:3.544268 +step:4775 train loss:3.500844 +step:4776 train loss:3.547543 +step:4777 train loss:3.500224 +step:4778 train loss:3.532381 +step:4779 train loss:3.505893 +step:4780 train loss:3.485648 +step:4781 train loss:3.544684 +step:4782 train loss:3.528156 +step:4783 train loss:3.587478 +step:4784 train loss:3.517617 +step:4785 train loss:3.473264 +step:4786 train loss:3.544621 +step:4787 train loss:3.508975 +step:4788 train loss:3.504333 +step:4789 train loss:3.488290 +step:4790 train loss:3.549402 +step:4791 train loss:3.499820 +step:4792 train loss:3.552984 +step:4793 train loss:3.564259 +step:4794 train loss:3.554435 +step:4795 train loss:3.565534 +step:4796 train loss:3.542999 +step:4797 train loss:3.535005 +step:4798 train loss:3.571744 +step:4799 train loss:3.484757 +step:4800 train loss:3.519145 +step:4801 train loss:3.497530 +step:4802 train loss:3.528610 +step:4803 train loss:3.555026 +step:4804 train loss:3.543097 +step:4805 train loss:3.532865 +step:4806 train loss:3.530760 +step:4807 train loss:3.521173 +step:4808 train loss:3.504178 +step:4809 train loss:3.459771 +step:4810 train loss:3.539732 +step:4811 train loss:3.576632 +step:4812 train loss:3.504616 +step:4813 train loss:3.509416 +step:4814 train loss:3.553946 +step:4815 train loss:3.493410 +step:4816 train loss:3.502284 +step:4817 train loss:3.536091 +step:4818 train loss:3.561958 +step:4819 train loss:3.483736 +step:4820 train loss:3.511204 +step:4821 train loss:3.508491 +step:4822 train loss:3.530380 +step:4823 train loss:3.524399 +step:4824 train loss:3.569247 +step:4825 train loss:3.512638 +step:4826 train loss:3.515311 +step:4827 train loss:3.528110 +step:4828 train loss:3.500175 +step:4829 train loss:3.565414 +step:4830 train loss:3.462606 +step:4831 train loss:3.508025 +step:4832 train loss:3.493512 +step:4833 train loss:3.472149 +step:4834 train loss:3.520667 +step:4835 train loss:3.556574 +step:4836 train loss:3.479479 +step:4837 train loss:3.548153 +step:4838 train loss:3.540080 +step:4839 train loss:3.575561 +step:4840 train loss:3.494442 +step:4841 train loss:3.489488 +step:4842 train loss:3.495153 +step:4843 train loss:3.564205 +step:4844 train loss:3.537525 +step:4845 train loss:3.523674 +step:4846 train loss:3.585736 +step:4847 train loss:3.500682 +step:4848 train loss:3.568044 +step:4849 train loss:3.551529 +step:4850 train loss:3.548269 +step:4851 train loss:3.557588 +step:4852 train loss:3.543566 +step:4853 train loss:3.570240 +step:4854 train loss:3.525472 +step:4855 train loss:3.552217 +step:4856 train loss:3.524592 +step:4857 train loss:3.523237 +step:4858 train loss:3.449818 +step:4859 train loss:3.494655 +step:4860 train loss:3.563345 +step:4861 train loss:3.519906 +step:4862 train loss:3.550506 +step:4863 train loss:3.528492 +step:4864 train loss:3.499489 +step:4865 train loss:3.545290 +step:4866 train loss:3.522188 +step:4867 train loss:3.551462 +step:4868 train loss:3.516995 +step:4869 train loss:3.506006 +step:4870 train loss:3.670893 +step:4871 train loss:3.569197 +step:4872 train loss:3.508327 +step:4873 train loss:3.565543 +step:4874 train loss:3.549814 +step:4875 train loss:3.534436 +step:4876 train loss:3.459271 +step:4877 train loss:3.496449 +step:4878 train loss:3.453224 +step:4879 train loss:3.450802 +step:4880 train loss:3.486804 +step:4881 train loss:3.481179 +step:4882 train loss:3.509233 +step:4883 train loss:3.537667 +step:4884 train loss:3.548428 +step:4885 train loss:3.522189 +step:4886 train loss:3.513460 +step:4887 train loss:3.543743 +step:4888 train loss:3.590668 +step:4889 train loss:3.521671 +step:4890 train loss:3.488216 +step:4891 train loss:3.503176 +step:4892 train loss:3.531226 +step:4893 train loss:3.507050 +step:4894 train loss:3.493092 +step:4895 train loss:3.560819 +step:4896 train loss:3.572842 +step:4897 train loss:3.568316 +step:4898 train loss:3.484947 +step:4899 train loss:3.470239 +step:4900 train loss:3.514662 +step:4901 train loss:3.525470 +step:4902 train loss:3.509335 +step:4903 train loss:3.463634 +step:4904 train loss:3.525071 +step:4905 train loss:3.510591 +step:4906 train loss:3.578262 +step:4907 train loss:3.511737 +step:4908 train loss:3.509655 +step:4909 train loss:3.544931 +step:4910 train loss:3.529851 +step:4911 train loss:3.506323 +step:4912 train loss:3.620022 +step:4913 train loss:3.479419 +step:4914 train loss:3.571563 +step:4915 train loss:3.524217 +step:4916 train loss:3.530214 +step:4917 train loss:3.529924 +step:4918 train loss:3.521121 +step:4919 train loss:3.439905 +step:4920 train loss:3.473456 +step:4921 train loss:3.531448 +step:4922 train loss:3.513994 +step:4923 train loss:3.529670 +step:4924 train loss:3.511241 +step:4925 train loss:3.518796 +step:4926 train loss:3.477592 +step:4927 train loss:3.532733 +step:4928 train loss:3.497960 +step:4929 train loss:3.507903 +step:4930 train loss:3.511395 +step:4931 train loss:3.447253 +step:4932 train loss:3.532407 +step:4933 train loss:3.524037 +step:4934 train loss:3.472546 +step:4935 train loss:3.524484 +step:4936 train loss:3.537521 +step:4937 train loss:3.481965 +step:4938 train loss:3.563214 +step:4939 train loss:3.470204 +step:4940 train loss:3.503711 +step:4941 train loss:3.563136 +step:4942 train loss:3.502729 +step:4943 train loss:3.506499 +step:4944 train loss:3.502808 +step:4945 train loss:3.558046 +step:4946 train loss:3.461231 +step:4947 train loss:3.602030 +step:4948 train loss:3.457107 +step:4949 train loss:3.495602 +step:4950 train loss:3.553274 +step:4951 train loss:3.507797 +step:4952 train loss:3.529031 +step:4953 train loss:3.521744 +step:4954 train loss:3.459339 +step:4955 train loss:3.501961 +step:4956 train loss:3.527875 +step:4957 train loss:3.506851 +step:4958 train loss:3.533644 +step:4959 train loss:3.578738 +step:4960 train loss:3.619063 +step:4961 train loss:3.509983 +step:4962 train loss:3.545164 +step:4963 train loss:3.524582 +step:4964 train loss:3.485182 +step:4965 train loss:3.559029 +step:4966 train loss:3.489653 +step:4967 train loss:3.587192 +step:4968 train loss:3.558440 +step:4969 train loss:3.496688 +step:4970 train loss:3.533985 +step:4971 train loss:3.512519 +step:4972 train loss:3.553859 +step:4973 train loss:3.755196 +step:4974 train loss:3.514335 +step:4975 train loss:3.604994 +step:4976 train loss:3.563645 +step:4977 train loss:3.602270 +step:4978 train loss:3.500870 +step:4979 train loss:3.497812 +step:4980 train loss:3.520608 +step:4981 train loss:3.472650 +step:4982 train loss:3.542433 +step:4983 train loss:3.505881 +step:4984 train loss:3.520880 +step:4985 train loss:3.522739 +step:4986 train loss:3.459219 +step:4987 train loss:3.569425 +step:4988 train loss:3.477662 +step:4989 train loss:3.557404 +step:4990 train loss:3.507522 +step:4991 train loss:3.487651 +step:4992 train loss:3.496953 +step:4993 train loss:3.565860 +step:4994 train loss:3.541023 +step:4995 train loss:3.541219 +step:4996 train loss:3.573876 +step:4997 train loss:3.573072 +step:4998 train loss:3.540502 +step:4999 train loss:3.493396 +step:5000 validation loss:3.488152 total_sharp:5.2176e-03 L1_sharp:4.4714e-03 L2_sharp:2.7320e-03 L3_sharp:2.9885e-03 L4_sharp:1.5701e-03 L5_sharp:1.0737e-03 L6_sharp:1.5181e-03 L7_sharp:1.5340e-03 L8_sharp:1.4082e-03 L9_sharp:7.9080e-04 L10_sharp:4.4366e-04 L11_sharp:4.4336e-04 L12_sharp:3.4635e-04 total_fnorm:2.3486e+00 total_l1_linf:2.0019e+04 total_spectral:2.3486e+00 L1_fnorm:5.4963e-01 L2_fnorm:4.0305e-01 L3_fnorm:4.2078e-01 L4_fnorm:5.2373e-01 L5_fnorm:5.7607e-01 L6_fnorm:5.8974e-01 L7_fnorm:5.9981e-01 L8_fnorm:5.9897e-01 L9_fnorm:5.9780e-01 L10_fnorm:5.9793e-01 L11_fnorm:5.8511e-01 L12_fnorm:6.0063e-01 L1_l1linf:4.3639e-01 L2_l1linf:3.8063e-01 L3_l1linf:4.5352e-01 L4_l1linf:3.9697e-01 L5_l1linf:4.0277e-01 L6_l1linf:4.0523e-01 L7_l1linf:4.0749e-01 L8_l1linf:4.0652e-01 L9_l1linf:4.0949e-01 L10_l1linf:4.1416e-01 L11_l1linf:4.0426e-01 L12_l1linf:4.0039e-01 L1_spectral:1.2047e-02 L2_spectral:1.2932e-02 L3_spectral:1.7856e-02 L4_spectral:1.4366e-02 L5_spectral:1.2044e-02 L6_spectral:1.2046e-02 L7_spectral:1.2048e-02 L8_spectral:1.2055e-02 L9_spectral:1.2046e-02 L10_spectral:1.2063e-02 L11_spectral:1.2044e-02 L12_spectral:1.2046e-02 v_norm:2.3486e+00 cos_v_-g_hvp:2.7591e-02 g_hvp_norm:6.8017e-01 cos_v_-g_t:3.1357e-02 g_t_norm:6.3311e-01 hv_norm:1.0670e+00 cos_v_hv:1.1484e-02 hg_norm:2.3880e+02 cos_g_hg:2.3649e-02 v_par:4.6315e-03 v_perp:2.3486e+00 L1_cos_v_neg_g:1.7905e-02 L1_v_norm:5.4963e-01 L2_cos_v_neg_g:2.1357e-02 L2_v_norm:4.0305e-01 L3_cos_v_neg_g:2.7176e-02 L3_v_norm:4.2078e-01 L4_cos_v_neg_g:2.4046e-02 L4_v_norm:5.2373e-01 L5_cos_v_neg_g:2.8958e-02 L5_v_norm:5.7607e-01 L6_cos_v_neg_g:2.9173e-02 L6_v_norm:5.8974e-01 L7_cos_v_neg_g:2.9613e-02 L7_v_norm:5.9981e-01 L8_cos_v_neg_g:2.9100e-02 L8_v_norm:5.9897e-01 L9_cos_v_neg_g:3.0799e-02 L9_v_norm:5.9780e-01 L10_cos_v_neg_g:3.1817e-02 L10_v_norm:5.9793e-01 L11_cos_v_neg_g:4.0157e-02 L11_v_norm:5.8511e-01 L12_cos_v_neg_g:7.2439e-02 L12_v_norm:6.0063e-01 +step:5000 train loss:3.469208 +step:5001 train loss:3.502992 +step:5002 train loss:3.526908 +step:5003 train loss:3.507936 +step:5004 train loss:3.511385 +step:5005 train loss:3.439279 +step:5006 train loss:3.536694 +step:5007 train loss:3.500938 +step:5008 train loss:3.567303 +step:5009 train loss:3.538865 +step:5010 train loss:3.466369 +step:5011 train loss:3.514885 +step:5012 train loss:3.497128 +step:5013 train loss:3.719243 +step:5014 train loss:3.528672 +step:5015 train loss:3.546634 +step:5016 train loss:3.518779 +step:5017 train loss:3.458637 +step:5018 train loss:3.550912 +step:5019 train loss:3.512153 +step:5020 train loss:3.509469 +step:5021 train loss:3.504296 +step:5022 train loss:3.487724 +step:5023 train loss:3.551606 +step:5024 train loss:3.574292 +step:5025 train loss:3.501111 +step:5026 train loss:3.479232 +step:5027 train loss:3.505470 +step:5028 train loss:3.510818 +step:5029 train loss:3.548055 +step:5030 train loss:3.474123 +step:5031 train loss:3.498152 +step:5032 train loss:3.503823 +step:5033 train loss:3.606779 +step:5034 train loss:3.521926 +step:5035 train loss:3.534382 +step:5036 train loss:3.524209 +step:5037 train loss:3.545172 +step:5038 train loss:3.531326 +step:5039 train loss:3.539375 +step:5040 train loss:3.488284 +step:5041 train loss:3.541677 +step:5042 train loss:3.498264 +step:5043 train loss:3.501986 +step:5044 train loss:3.575616 +step:5045 train loss:3.509658 +step:5046 train loss:3.505806 +step:5047 train loss:3.521385 +step:5048 train loss:3.492245 +step:5049 train loss:3.537528 +step:5050 train loss:3.507514 +step:5051 train loss:3.562696 +step:5052 train loss:3.497401 +step:5053 train loss:3.470153 +step:5054 train loss:3.453437 +step:5055 train loss:3.474750 +step:5056 train loss:3.559416 +step:5057 train loss:3.526802 +step:5058 train loss:3.500974 +step:5059 train loss:3.527724 +step:5060 train loss:3.484439 +step:5061 train loss:3.601526 +step:5062 train loss:3.481114 +step:5063 train loss:3.465232 +step:5064 train loss:3.572967 +step:5065 train loss:3.537895 +step:5066 train loss:3.554712 +step:5067 train loss:3.543567 +step:5068 train loss:3.530201 +step:5069 train loss:3.547000 +step:5070 train loss:3.478729 +step:5071 train loss:3.506522 +step:5072 train loss:3.587214 +step:5073 train loss:3.504047 +step:5074 train loss:3.569400 +step:5075 train loss:3.582749 +step:5076 train loss:3.568423 +step:5077 train loss:3.503294 +step:5078 train loss:3.499581 +step:5079 train loss:3.483088 +step:5080 train loss:3.504135 +step:5081 train loss:3.523074 +step:5082 train loss:3.495485 +step:5083 train loss:3.509159 +step:5084 train loss:3.490477 +step:5085 train loss:3.519584 +step:5086 train loss:3.507313 +step:5087 train loss:3.497344 +step:5088 train loss:3.476154 +step:5089 train loss:3.539667 +step:5090 train loss:3.521629 +step:5091 train loss:3.499830 +step:5092 train loss:3.518340 +step:5093 train loss:3.502945 +step:5094 train loss:3.503304 +step:5095 train loss:3.566832 +step:5096 train loss:3.698567 +step:5097 train loss:3.484406 +step:5098 train loss:3.534870 +step:5099 train loss:3.505996 +step:5100 train loss:3.496557 +step:5101 train loss:3.545525 +step:5102 train loss:3.507331 +step:5103 train loss:3.516346 +step:5104 train loss:3.552512 +step:5105 train loss:3.457246 +step:5106 train loss:3.546177 +step:5107 train loss:3.527975 +step:5108 train loss:3.486588 +step:5109 train loss:3.475981 +step:5110 train loss:3.524397 +step:5111 train loss:3.503647 +step:5112 train loss:3.471926 +step:5113 train loss:3.510611 +step:5114 train loss:3.502976 +step:5115 train loss:3.471687 +step:5116 train loss:3.494704 +step:5117 train loss:3.563145 +step:5118 train loss:3.441190 +step:5119 train loss:3.561035 +step:5120 train loss:3.468179 +step:5121 train loss:3.504899 +step:5122 train loss:3.556981 +step:5123 train loss:3.478801 +step:5124 train loss:3.522815 +step:5125 train loss:3.531162 +step:5126 train loss:3.472618 +step:5127 train loss:3.517805 +step:5128 train loss:3.509326 +step:5129 train loss:3.471221 +step:5130 train loss:3.529746 +step:5131 train loss:3.565028 +step:5132 train loss:3.573575 +step:5133 train loss:3.555845 +step:5134 train loss:3.589056 +step:5135 train loss:3.534169 +step:5136 train loss:3.483695 +step:5137 train loss:3.505578 +step:5138 train loss:3.515310 +step:5139 train loss:3.532976 +step:5140 train loss:3.573152 +step:5141 train loss:3.545174 +step:5142 train loss:3.607304 +step:5143 train loss:3.636728 +step:5144 train loss:3.604414 +step:5145 train loss:3.497416 +step:5146 train loss:3.576813 +step:5147 train loss:3.616462 +step:5148 train loss:3.533517 +step:5149 train loss:3.524838 +step:5150 train loss:3.506797 +step:5151 train loss:3.511993 +step:5152 train loss:3.506430 +step:5153 train loss:3.500733 +step:5154 train loss:3.485386 +step:5155 train loss:3.483932 +step:5156 train loss:3.513053 +step:5157 train loss:3.502707 +step:5158 train loss:3.539773 +step:5159 train loss:3.563553 +step:5160 train loss:3.527887 +step:5161 train loss:3.540145 +step:5162 train loss:3.453067 +step:5163 train loss:3.517351 +step:5164 train loss:3.528237 +step:5165 train loss:3.526785 +step:5166 train loss:3.484730 +step:5167 train loss:3.479072 +step:5168 train loss:3.540650 +step:5169 train loss:3.525754 +step:5170 train loss:3.553840 +step:5171 train loss:3.511189 +step:5172 train loss:3.504817 +step:5173 train loss:3.493949 +step:5174 train loss:3.561503 +step:5175 train loss:3.511579 +step:5176 train loss:3.548650 +step:5177 train loss:3.599195 +step:5178 train loss:3.725879 +step:5179 train loss:3.542365 +step:5180 train loss:3.495400 +step:5181 train loss:3.499627 +step:5182 train loss:3.519286 +step:5183 train loss:3.509672 +step:5184 train loss:3.471273 +step:5185 train loss:3.466869 +step:5186 train loss:3.513717 +step:5187 train loss:3.539869 +step:5188 train loss:3.521674 +step:5189 train loss:3.501811 +step:5190 train loss:3.442261 +step:5191 train loss:3.562917 +step:5192 train loss:3.468327 +step:5193 train loss:3.485744 +step:5194 train loss:3.577662 +step:5195 train loss:3.493202 +step:5196 train loss:3.527931 +step:5197 train loss:3.416589 +step:5198 train loss:3.489327 +step:5199 train loss:3.494007 +step:5200 train loss:3.495401 +step:5201 train loss:3.472837 +step:5202 train loss:3.513205 +step:5203 train loss:3.502190 +step:5204 train loss:3.510651 +step:5205 train loss:3.503982 +step:5206 train loss:3.537828 +step:5207 train loss:3.490554 +step:5208 train loss:3.557486 +step:5209 train loss:3.469942 +step:5210 train loss:3.498100 +step:5211 train loss:3.505167 +step:5212 train loss:3.528222 +step:5213 train loss:3.492064 +step:5214 train loss:3.539724 +step:5215 train loss:3.520367 +step:5216 train loss:3.485176 +step:5217 train loss:3.525643 +step:5218 train loss:3.546102 +step:5219 train loss:3.454078 +step:5220 train loss:3.489986 +step:5221 train loss:3.523118 +step:5222 train loss:3.594619 +step:5223 train loss:3.548644 +step:5224 train loss:3.531765 +step:5225 train loss:3.475618 +step:5226 train loss:3.469728 +step:5227 train loss:3.463583 +step:5228 train loss:3.545105 +step:5229 train loss:3.528423 +step:5230 train loss:3.516248 +step:5231 train loss:3.442743 +step:5232 train loss:3.497458 +step:5233 train loss:3.475729 +step:5234 train loss:3.525942 +step:5235 train loss:3.522034 +step:5236 train loss:3.542551 +step:5237 train loss:3.437805 +step:5238 train loss:3.502093 +step:5239 train loss:3.436202 +step:5240 train loss:3.520728 +step:5241 train loss:3.485802 +step:5242 train loss:3.506402 +step:5243 train loss:3.480780 +step:5244 train loss:3.506419 +step:5245 train loss:3.505678 +step:5246 train loss:3.631588 +step:5247 train loss:3.468913 +step:5248 train loss:3.613371 +step:5249 train loss:3.469403 +step:5250 validation loss:3.480626 +step:5250 train loss:3.538104 +step:5251 train loss:3.489577 +step:5252 train loss:3.517745 +step:5253 train loss:3.448001 +step:5254 train loss:3.518048 +step:5255 train loss:3.466919 +step:5256 train loss:3.522853 +step:5257 train loss:3.506398 +step:5258 train loss:3.538917 +step:5259 train loss:3.481082 +step:5260 train loss:3.484831 +step:5261 train loss:3.491357 +step:5262 train loss:3.494750 +step:5263 train loss:3.495601 +step:5264 train loss:3.552267 +step:5265 train loss:3.445290 +step:5266 train loss:3.522522 +step:5267 train loss:3.483851 +step:5268 train loss:3.468417 +step:5269 train loss:3.530830 +step:5270 train loss:3.522305 +step:5271 train loss:3.455526 +step:5272 train loss:3.579093 +step:5273 train loss:3.473600 +step:5274 train loss:3.484995 +step:5275 train loss:3.489516 +step:5276 train loss:3.528666 +step:5277 train loss:3.483559 +step:5278 train loss:3.501661 +step:5279 train loss:3.458626 +step:5280 train loss:3.512900 +step:5281 train loss:3.469542 +step:5282 train loss:3.504385 +step:5283 train loss:3.513412 +step:5284 train loss:3.509363 +step:5285 train loss:3.476114 +step:5286 train loss:3.469622 +step:5287 train loss:3.546430 +step:5288 train loss:3.495948 +step:5289 train loss:3.507205 +step:5290 train loss:3.503684 +step:5291 train loss:3.510697 +step:5292 train loss:3.503031 +step:5293 train loss:3.503576 +step:5294 train loss:3.450286 +step:5295 train loss:3.569465 +step:5296 train loss:3.579084 +step:5297 train loss:3.532406 +step:5298 train loss:3.493110 +step:5299 train loss:3.474888 +step:5300 train loss:3.424391 +step:5301 train loss:3.488364 +step:5302 train loss:3.460406 +step:5303 train loss:3.503286 +step:5304 train loss:3.530547 +step:5305 train loss:3.524079 +step:5306 train loss:3.507334 +step:5307 train loss:3.541881 +step:5308 train loss:3.486776 +step:5309 train loss:3.472988 +step:5310 train loss:3.508744 +step:5311 train loss:3.476871 +step:5312 train loss:3.510211 +step:5313 train loss:3.509940 +step:5314 train loss:3.517282 +step:5315 train loss:3.487489 +step:5316 train loss:3.511124 +step:5317 train loss:3.527654 +step:5318 train loss:3.533291 +step:5319 train loss:3.584065 +step:5320 train loss:3.507634 +step:5321 train loss:3.498355 +step:5322 train loss:3.460747 +step:5323 train loss:3.545863 +step:5324 train loss:3.473702 +step:5325 train loss:3.527105 +step:5326 train loss:3.453987 +step:5327 train loss:3.480611 +step:5328 train loss:3.462758 +step:5329 train loss:3.567462 +step:5330 train loss:3.469818 +step:5331 train loss:3.640496 +step:5332 train loss:3.481026 +step:5333 train loss:3.523540 +step:5334 train loss:3.497361 +step:5335 train loss:3.474364 +step:5336 train loss:3.665688 +step:5337 train loss:3.502295 +step:5338 train loss:3.530405 +step:5339 train loss:3.469801 +step:5340 train loss:3.512357 +step:5341 train loss:3.507604 +step:5342 train loss:3.546742 +step:5343 train loss:3.484540 +step:5344 train loss:3.496600 +step:5345 train loss:3.557219 +step:5346 train loss:3.562896 +step:5347 train loss:3.507921 +step:5348 train loss:3.537322 +step:5349 train loss:3.499881 +step:5350 train loss:3.512280 +step:5351 train loss:3.508871 +step:5352 train loss:3.521604 +step:5353 train loss:3.491204 +step:5354 train loss:3.548012 +step:5355 train loss:3.547348 +step:5356 train loss:3.504775 +step:5357 train loss:3.457229 +step:5358 train loss:3.577048 +step:5359 train loss:3.499712 +step:5360 train loss:3.521646 +step:5361 train loss:3.541052 +step:5362 train loss:3.529578 +step:5363 train loss:3.526230 +step:5364 train loss:3.567964 +step:5365 train loss:3.554655 +step:5366 train loss:3.540908 +step:5367 train loss:3.559707 +step:5368 train loss:3.559741 +step:5369 train loss:3.506791 +step:5370 train loss:3.496279 +step:5371 train loss:3.555600 +step:5372 train loss:3.496517 +step:5373 train loss:3.462083 +step:5374 train loss:3.537411 +step:5375 train loss:3.538802 +step:5376 train loss:3.537986 +step:5377 train loss:3.540485 +step:5378 train loss:3.501223 +step:5379 train loss:3.502838 +step:5380 train loss:3.551250 +step:5381 train loss:3.509425 +step:5382 train loss:3.532823 +step:5383 train loss:3.551910 +step:5384 train loss:3.683544 +step:5385 train loss:3.528956 +step:5386 train loss:3.656646 +step:5387 train loss:3.491948 +step:5388 train loss:3.526205 +step:5389 train loss:3.523997 +step:5390 train loss:3.528270 +step:5391 train loss:3.494798 +step:5392 train loss:3.505061 +step:5393 train loss:3.469710 +step:5394 train loss:3.492426 +step:5395 train loss:3.541687 +step:5396 train loss:3.513394 +step:5397 train loss:3.479546 +step:5398 train loss:3.513260 +step:5399 train loss:3.491374 +step:5400 train loss:3.521133 +step:5401 train loss:3.512290 +step:5402 train loss:3.643198 +step:5403 train loss:3.506489 +step:5404 train loss:3.517540 +step:5405 train loss:3.511431 +step:5406 train loss:3.490099 +step:5407 train loss:3.558348 +step:5408 train loss:3.541488 +step:5409 train loss:3.724895 +step:5410 train loss:3.533314 +step:5411 train loss:3.510092 +step:5412 train loss:3.512292 +step:5413 train loss:3.533311 +step:5414 train loss:3.518151 +step:5415 train loss:3.493948 +step:5416 train loss:3.512678 +step:5417 train loss:3.474448 +step:5418 train loss:3.532993 +step:5419 train loss:3.554786 +step:5420 train loss:3.503543 +step:5421 train loss:3.541567 +step:5422 train loss:3.488381 +step:5423 train loss:3.522701 +step:5424 train loss:3.489115 +step:5425 train loss:3.539966 +step:5426 train loss:3.514371 +step:5427 train loss:3.491077 +step:5428 train loss:3.489609 +step:5429 train loss:3.488776 +step:5430 train loss:3.473779 +step:5431 train loss:3.566678 +step:5432 train loss:3.503961 +step:5433 train loss:3.507308 +step:5434 train loss:3.533761 +step:5435 train loss:3.723904 +step:5436 train loss:3.499646 +step:5437 train loss:3.513755 +step:5438 train loss:3.479414 +step:5439 train loss:3.487287 +step:5440 train loss:3.583460 +step:5441 train loss:3.646807 +step:5442 train loss:3.484560 +step:5443 train loss:3.535058 +step:5444 train loss:3.516324 +step:5445 train loss:3.505352 +step:5446 train loss:3.481609 +step:5447 train loss:3.524679 +step:5448 train loss:3.562634 +step:5449 train loss:3.514133 +step:5450 train loss:3.537821 +step:5451 train loss:3.504740 +step:5452 train loss:3.538267 +step:5453 train loss:3.511560 +step:5454 train loss:3.502586 +step:5455 train loss:3.489725 +step:5456 train loss:3.513174 +step:5457 train loss:3.514556 +step:5458 train loss:3.497612 +step:5459 train loss:3.531042 +step:5460 train loss:3.533387 +step:5461 train loss:3.441375 +step:5462 train loss:3.489776 +step:5463 train loss:3.533556 +step:5464 train loss:3.550436 +step:5465 train loss:3.487966 +step:5466 train loss:3.504716 +step:5467 train loss:3.519718 +step:5468 train loss:3.478882 +step:5469 train loss:3.475939 +step:5470 train loss:3.521620 +step:5471 train loss:3.532120 +step:5472 train loss:3.485584 +step:5473 train loss:3.516504 +step:5474 train loss:3.729999 +step:5475 train loss:3.544463 +step:5476 train loss:3.541385 +step:5477 train loss:3.641289 +step:5478 train loss:3.522995 +step:5479 train loss:3.556829 +step:5480 train loss:3.540490 +step:5481 train loss:3.562955 +step:5482 train loss:3.550724 +step:5483 train loss:3.592794 +step:5484 train loss:3.512225 +step:5485 train loss:3.507343 +step:5486 train loss:3.566639 +step:5487 train loss:3.553418 +step:5488 train loss:3.572172 +step:5489 train loss:3.521665 +step:5490 train loss:3.481400 +step:5491 train loss:3.524436 +step:5492 train loss:3.494434 +step:5493 train loss:3.484316 +step:5494 train loss:3.541928 +step:5495 train loss:3.530396 +step:5496 train loss:3.514680 +step:5497 train loss:3.518764 +step:5498 train loss:3.506471 +step:5499 train loss:3.556309 +step:5500 validation loss:3.480972 total_sharp:9.3841e-03 L1_sharp:2.4113e-02 L2_sharp:1.0617e-02 L3_sharp:3.2826e-03 L4_sharp:2.1215e-03 L5_sharp:1.5675e-03 L6_sharp:1.5488e-03 L7_sharp:1.6966e-03 L8_sharp:1.3822e-03 L9_sharp:7.7697e-04 L10_sharp:4.8170e-04 L11_sharp:5.8857e-04 L12_sharp:3.7801e-04 total_fnorm:2.3352e+00 total_l1_linf:1.9889e+04 total_spectral:2.3352e+00 L1_fnorm:5.1321e-01 L2_fnorm:3.9658e-01 L3_fnorm:4.2578e-01 L4_fnorm:5.1643e-01 L5_fnorm:5.7664e-01 L6_fnorm:5.8791e-01 L7_fnorm:5.9767e-01 L8_fnorm:5.9833e-01 L9_fnorm:5.9650e-01 L10_fnorm:5.9518e-01 L11_fnorm:5.8012e-01 L12_fnorm:5.9916e-01 L1_l1linf:4.2565e-01 L2_l1linf:4.0507e-01 L3_l1linf:4.4957e-01 L4_l1linf:3.9352e-01 L5_l1linf:4.0395e-01 L6_l1linf:4.0574e-01 L7_l1linf:4.0248e-01 L8_l1linf:4.0476e-01 L9_l1linf:4.0756e-01 L10_l1linf:4.1298e-01 L11_l1linf:4.0515e-01 L12_l1linf:4.0497e-01 L1_spectral:1.2038e-02 L2_spectral:1.2980e-02 L3_spectral:1.7948e-02 L4_spectral:1.5743e-02 L5_spectral:1.2041e-02 L6_spectral:1.2044e-02 L7_spectral:1.2047e-02 L8_spectral:1.2051e-02 L9_spectral:1.2048e-02 L10_spectral:1.2049e-02 L11_spectral:1.2046e-02 L12_spectral:1.2046e-02 v_norm:2.3352e+00 cos_v_-g_hvp:2.5841e-02 g_hvp_norm:7.9641e-01 cos_v_-g_t:2.7579e-02 g_t_norm:8.2958e-01 hv_norm:2.3219e+00 cos_v_hv:9.4378e-03 hg_norm:1.4679e+03 cos_g_hg:2.1068e-01 v_par:6.0760e-03 v_perp:2.3352e+00 L1_cos_v_neg_g:2.1189e-02 L1_v_norm:5.1321e-01 L2_cos_v_neg_g:2.9529e-02 L2_v_norm:3.9658e-01 L3_cos_v_neg_g:2.9212e-02 L3_v_norm:4.2578e-01 L4_cos_v_neg_g:2.4820e-02 L4_v_norm:5.1643e-01 L5_cos_v_neg_g:2.8896e-02 L5_v_norm:5.7664e-01 L6_cos_v_neg_g:2.9774e-02 L6_v_norm:5.8791e-01 L7_cos_v_neg_g:3.0261e-02 L7_v_norm:5.9767e-01 L8_cos_v_neg_g:2.8100e-02 L8_v_norm:5.9833e-01 L9_cos_v_neg_g:3.0786e-02 L9_v_norm:5.9650e-01 L10_cos_v_neg_g:2.9736e-02 L10_v_norm:5.9518e-01 L11_cos_v_neg_g:3.9076e-02 L11_v_norm:5.8012e-01 L12_cos_v_neg_g:7.1440e-02 L12_v_norm:5.9916e-01 +step:5500 train loss:3.484946 +step:5501 train loss:3.513968 +step:5502 train loss:3.494742 +step:5503 train loss:3.486557 +step:5504 train loss:3.556962 +step:5505 train loss:3.551097 +step:5506 train loss:3.530950 +step:5507 train loss:3.507683 +step:5508 train loss:3.561506 +step:5509 train loss:3.476836 +step:5510 train loss:3.592921 +step:5511 train loss:3.616096 +step:5512 train loss:3.532722 +step:5513 train loss:3.528383 +step:5514 train loss:3.548840 +step:5515 train loss:3.503095 +step:5516 train loss:3.442477 +step:5517 train loss:3.514563 +step:5518 train loss:3.494340 +step:5519 train loss:3.500069 +step:5520 train loss:3.533595 +step:5521 train loss:3.474965 +step:5522 train loss:3.499135 +step:5523 train loss:3.523763 +step:5524 train loss:3.505387 +step:5525 train loss:3.451317 +step:5526 train loss:3.542488 +step:5527 train loss:3.547985 +step:5528 train loss:3.582410 +step:5529 train loss:3.462457 +step:5530 train loss:3.553277 +step:5531 train loss:3.524825 +step:5532 train loss:3.513784 +step:5533 train loss:3.505221 +step:5534 train loss:3.556315 +step:5535 train loss:3.551833 +step:5536 train loss:3.679338 +step:5537 train loss:3.492206 +step:5538 train loss:3.542770 +step:5539 train loss:3.513644 +step:5540 train loss:3.502182 +step:5541 train loss:3.526382 +step:5542 train loss:3.507930 +step:5543 train loss:3.488255 +step:5544 train loss:3.526572 +step:5545 train loss:3.570008 +step:5546 train loss:3.486225 +step:5547 train loss:3.568395 +step:5548 train loss:3.534421 +step:5549 train loss:3.573401 +step:5550 train loss:3.586109 +step:5551 train loss:3.538037 +step:5552 train loss:3.488765 +step:5553 train loss:3.542628 +step:5554 train loss:3.592635 +step:5555 train loss:3.543201 +step:5556 train loss:3.520320 +step:5557 train loss:3.538697 +step:5558 train loss:3.533496 +step:5559 train loss:3.516260 +step:5560 train loss:3.496280 +step:5561 train loss:3.525754 +step:5562 train loss:3.492332 +step:5563 train loss:3.531634 +step:5564 train loss:3.546725 +step:5565 train loss:3.549598 +step:5566 train loss:3.528453 +step:5567 train loss:3.533246 +step:5568 train loss:3.466154 +step:5569 train loss:3.491944 +step:5570 train loss:3.547371 +step:5571 train loss:3.510171 +step:5572 train loss:3.523752 +step:5573 train loss:3.559724 +step:5574 train loss:3.531967 +step:5575 train loss:3.523643 +step:5576 train loss:3.561141 +step:5577 train loss:3.490405 +step:5578 train loss:3.527733 +step:5579 train loss:3.536148 +step:5580 train loss:3.576034 +step:5581 train loss:3.485388 +step:5582 train loss:3.552264 +step:5583 train loss:3.488979 +step:5584 train loss:3.585629 +step:5585 train loss:3.673960 +step:5586 train loss:3.481608 +step:5587 train loss:3.504846 +step:5588 train loss:3.506820 +step:5589 train loss:3.498069 +step:5590 train loss:3.543563 +step:5591 train loss:3.517110 +step:5592 train loss:3.501217 +step:5593 train loss:3.492269 +step:5594 train loss:3.549083 +step:5595 train loss:3.515538 +step:5596 train loss:3.514654 +step:5597 train loss:3.481062 +step:5598 train loss:3.556427 +step:5599 train loss:3.575560 +step:5600 train loss:3.471122 +step:5601 train loss:3.475863 +step:5602 train loss:3.541108 +step:5603 train loss:3.537161 +step:5604 train loss:3.488233 +step:5605 train loss:3.632794 +step:5606 train loss:3.469341 +step:5607 train loss:3.506887 +step:5608 train loss:3.488485 +step:5609 train loss:3.547312 +step:5610 train loss:3.540039 +step:5611 train loss:3.539796 +step:5612 train loss:3.531220 +step:5613 train loss:3.474460 +step:5614 train loss:3.506851 +step:5615 train loss:3.603266 +step:5616 train loss:3.523193 +step:5617 train loss:3.563904 +step:5618 train loss:3.511441 +step:5619 train loss:3.472231 +step:5620 train loss:3.476538 +step:5621 train loss:3.532798 +step:5622 train loss:3.516782 +step:5623 train loss:3.468043 +step:5624 train loss:3.464262 +step:5625 train loss:3.475449 +step:5626 train loss:3.452959 +step:5627 train loss:3.523311 +step:5628 train loss:3.519265 +step:5629 train loss:3.500185 +step:5630 train loss:3.543269 +step:5631 train loss:3.527661 +step:5632 train loss:3.563091 +step:5633 train loss:3.532963 +step:5634 train loss:3.553068 +step:5635 train loss:3.514437 +step:5636 train loss:3.487975 +step:5637 train loss:3.487416 +step:5638 train loss:3.508554 +step:5639 train loss:3.417886 +step:5640 train loss:3.521137 +step:5641 train loss:3.476322 +step:5642 train loss:3.485291 +step:5643 train loss:3.526108 +step:5644 train loss:3.558380 +step:5645 train loss:3.527926 +step:5646 train loss:3.530926 +step:5647 train loss:3.527766 +step:5648 train loss:3.538002 +step:5649 train loss:3.535746 +step:5650 train loss:3.513876 +step:5651 train loss:3.495383 +step:5652 train loss:3.531201 +step:5653 train loss:3.533714 +step:5654 train loss:3.591630 +step:5655 train loss:3.497383 +step:5656 train loss:3.485094 +step:5657 train loss:3.569794 +step:5658 train loss:3.515213 +step:5659 train loss:3.570525 +step:5660 train loss:3.583423 +step:5661 train loss:3.500601 +step:5662 train loss:3.487289 +step:5663 train loss:3.531676 +step:5664 train loss:3.514584 +step:5665 train loss:3.526260 +step:5666 train loss:3.526295 +step:5667 train loss:3.638824 +step:5668 train loss:3.534284 +step:5669 train loss:3.533828 +step:5670 train loss:3.559034 +step:5671 train loss:3.566228 +step:5672 train loss:3.570055 +step:5673 train loss:3.543055 +step:5674 train loss:3.496547 +step:5675 train loss:3.532176 +step:5676 train loss:3.505621 +step:5677 train loss:3.526336 +step:5678 train loss:3.534745 +step:5679 train loss:3.619889 +step:5680 train loss:3.519672 +step:5681 train loss:3.539199 +step:5682 train loss:3.525474 +step:5683 train loss:3.535086 +step:5684 train loss:3.524498 +step:5685 train loss:3.543889 +step:5686 train loss:3.527757 +step:5687 train loss:3.485887 +step:5688 train loss:3.463941 +step:5689 train loss:3.463572 +step:5690 train loss:3.502150 +step:5691 train loss:3.586004 +step:5692 train loss:3.502821 +step:5693 train loss:3.453642 +step:5694 train loss:3.503850 +step:5695 train loss:3.658861 +step:5696 train loss:3.622594 +step:5697 train loss:3.511830 +step:5698 train loss:3.495813 +step:5699 train loss:3.563676 +step:5700 train loss:3.628104 +step:5701 train loss:3.530206 +step:5702 train loss:3.585529 +step:5703 train loss:3.634539 +step:5704 train loss:3.542686 +step:5705 train loss:3.572310 +step:5706 train loss:3.585702 +step:5707 train loss:3.506626 +step:5708 train loss:3.495286 +step:5709 train loss:3.515868 +step:5710 train loss:3.476866 +step:5711 train loss:3.539340 +step:5712 train loss:3.494116 +step:5713 train loss:3.515819 +step:5714 train loss:3.547650 +step:5715 train loss:3.590895 +step:5716 train loss:3.521780 +step:5717 train loss:3.596065 +step:5718 train loss:3.589826 +step:5719 train loss:3.523575 +step:5720 train loss:3.522630 +step:5721 train loss:3.495218 +step:5722 train loss:3.546143 +step:5723 train loss:3.495226 +step:5724 train loss:3.467168 +step:5725 train loss:3.522855 +step:5726 train loss:3.481638 +step:5727 train loss:3.600150 +step:5728 train loss:3.470419 +step:5729 train loss:3.529470 +step:5730 train loss:3.513806 +step:5731 train loss:3.414422 +step:5732 train loss:3.491165 +step:5733 train loss:3.466898 +step:5734 train loss:3.527545 +step:5735 train loss:3.425024 +step:5736 train loss:3.489757 +step:5737 train loss:3.476246 +step:5738 train loss:3.498939 +step:5739 train loss:3.533759 +step:5740 train loss:3.460749 +step:5741 train loss:3.456358 +step:5742 train loss:3.511870 +step:5743 train loss:3.465899 +step:5744 train loss:3.537129 +step:5745 train loss:3.515079 +step:5746 train loss:3.528811 +step:5747 train loss:3.597873 +step:5748 train loss:3.490401 +step:5749 train loss:3.497399 +step:5750 validation loss:3.479132 +step:5750 train loss:3.519657 +step:5751 train loss:3.500974 +step:5752 train loss:3.542631 +step:5753 train loss:3.535168 +step:5754 train loss:3.507418 +step:5755 train loss:3.491460 +step:5756 train loss:3.509876 +step:5757 train loss:3.556391 +step:5758 train loss:3.530416 +step:5759 train loss:3.548100 +step:5760 train loss:3.563747 +step:5761 train loss:3.557036 +step:5762 train loss:3.478882 +step:5763 train loss:3.452756 +step:5764 train loss:3.545011 +step:5765 train loss:3.507688 +step:5766 train loss:3.484565 +step:5767 train loss:3.512079 +step:5768 train loss:3.492329 +step:5769 train loss:3.528148 +step:5770 train loss:3.576090 +step:5771 train loss:3.527029 +step:5772 train loss:3.516804 +step:5773 train loss:3.446742 +step:5774 train loss:3.490520 +step:5775 train loss:3.567595 +step:5776 train loss:3.574909 +step:5777 train loss:3.482929 +step:5778 train loss:3.496897 +step:5779 train loss:3.489463 +step:5780 train loss:3.524991 +step:5781 train loss:3.491999 +step:5782 train loss:3.554144 +step:5783 train loss:3.545505 +step:5784 train loss:3.532893 +step:5785 train loss:3.544937 +step:5786 train loss:3.507366 +step:5787 train loss:3.495181 +step:5788 train loss:3.483680 +step:5789 train loss:3.523854 +step:5790 train loss:3.468684 +step:5791 train loss:3.490176 +step:5792 train loss:3.535539 +step:5793 train loss:3.441887 +step:5794 train loss:3.531114 +step:5795 train loss:3.542585 +step:5796 train loss:3.486981 +step:5797 train loss:3.484598 +step:5798 train loss:3.495204 +step:5799 train loss:3.490341 +step:5800 train loss:3.548863 +step:5801 train loss:3.486215 +step:5802 train loss:3.553123 +step:5803 train loss:3.529074 +step:5804 train loss:3.446562 +step:5805 train loss:3.505774 +step:5806 train loss:3.498709 +step:5807 train loss:3.503643 +step:5808 train loss:3.495810 +step:5809 train loss:3.448037 +step:5810 train loss:3.491460 +step:5811 train loss:3.512615 +step:5812 train loss:3.524142 +step:5813 train loss:3.479603 +step:5814 train loss:3.450233 +step:5815 train loss:3.486480 +step:5816 train loss:3.498298 +step:5817 train loss:3.519160 +step:5818 train loss:3.497709 +step:5819 train loss:3.486672 +step:5820 train loss:3.523280 +step:5821 train loss:3.475786 +step:5822 train loss:3.543824 +step:5823 train loss:3.477970 +step:5824 train loss:3.476001 +step:5825 train loss:3.526736 +step:5826 train loss:3.542795 +step:5827 train loss:3.521968 +step:5828 train loss:3.480079 +step:5829 train loss:3.510610 +step:5830 train loss:3.491326 +step:5831 train loss:3.433705 +step:5832 train loss:3.481524 +step:5833 train loss:3.456481 +step:5834 train loss:3.513568 +step:5835 train loss:3.470131 +step:5836 train loss:3.505393 +step:5837 train loss:3.527091 +step:5838 train loss:3.521788 +step:5839 train loss:3.524797 +step:5840 train loss:3.499546 +step:5841 train loss:3.491601 +step:5842 train loss:3.511889 +step:5843 train loss:3.453012 +step:5844 train loss:3.487852 +step:5845 train loss:3.484933 +step:5846 train loss:3.494716 +step:5847 train loss:3.509113 +step:5848 train loss:3.525422 +step:5849 train loss:3.530006 +step:5850 train loss:3.498977 +step:5851 train loss:3.505154 +step:5852 train loss:3.583787 +step:5853 train loss:3.468088 +step:5854 train loss:3.467376 +step:5855 train loss:3.449558 +step:5856 train loss:3.450267 +step:5857 train loss:3.478827 +step:5858 train loss:3.473737 +step:5859 train loss:3.517377 +step:5860 train loss:3.477371 +step:5861 train loss:3.480430 +step:5862 train loss:3.561116 +step:5863 train loss:3.523299 +step:5864 train loss:3.569868 +step:5865 train loss:3.562678 +step:5866 train loss:3.456369 +step:5867 train loss:3.617363 +step:5868 train loss:3.523106 +step:5869 train loss:3.572446 +step:5870 train loss:3.488930 +step:5871 train loss:3.460189 +step:5872 train loss:3.536472 +step:5873 train loss:3.508427 +step:5874 train loss:3.488921 +step:5875 train loss:3.487589 +step:5876 train loss:3.517953 +step:5877 train loss:3.612735 +step:5878 train loss:3.462950 +step:5879 train loss:3.583726 +step:5880 train loss:3.837831 +step:5881 train loss:3.499256 +step:5882 train loss:3.547322 +step:5883 train loss:3.446259 +step:5884 train loss:3.486187 +step:5885 train loss:3.525180 +step:5886 train loss:3.523077 +step:5887 train loss:3.580040 +step:5888 train loss:3.536659 +step:5889 train loss:3.531663 +step:5890 train loss:3.478560 +step:5891 train loss:3.473114 +step:5892 train loss:3.478654 +step:5893 train loss:3.478313 +step:5894 train loss:3.540171 +step:5895 train loss:3.499963 +step:5896 train loss:3.481231 +step:5897 train loss:3.515848 +step:5898 train loss:3.558813 +step:5899 train loss:3.488273 +step:5900 train loss:3.530955 +step:5901 train loss:3.451876 +step:5902 train loss:3.474812 +step:5903 train loss:3.453761 +step:5904 train loss:3.476589 +step:5905 train loss:3.532605 +step:5906 train loss:3.451245 +step:5907 train loss:3.441690 +step:5908 train loss:3.504389 +step:5909 train loss:3.486401 +step:5910 train loss:3.503756 +step:5911 train loss:3.481559 +step:5912 train loss:3.465432 +step:5913 train loss:3.506824 +step:5914 train loss:3.487019 +step:5915 train loss:3.517166 +step:5916 train loss:3.474040 +step:5917 train loss:3.524524 +step:5918 train loss:3.522622 +step:5919 train loss:3.555129 +step:5920 train loss:3.497645 +step:5921 train loss:3.543395 +step:5922 train loss:3.456243 +step:5923 train loss:3.510843 +step:5924 train loss:3.465923 +step:5925 train loss:3.511654 +step:5926 train loss:3.530593 +step:5927 train loss:3.538551 +step:5928 train loss:3.482770 +step:5929 train loss:3.522464 +step:5930 train loss:3.434153 +step:5931 train loss:3.491673 +step:5932 train loss:3.432514 +step:5933 train loss:3.547393 +step:5934 train loss:3.493113 +step:5935 train loss:3.477783 +step:5936 train loss:3.462618 +step:5937 train loss:3.488102 +step:5938 train loss:3.523706 +step:5939 train loss:3.485205 +step:5940 train loss:3.464150 +step:5941 train loss:3.518668 +step:5942 train loss:3.482316 +step:5943 train loss:3.513817 +step:5944 train loss:3.472984 +step:5945 train loss:3.516633 +step:5946 train loss:3.475280 +step:5947 train loss:3.499746 +step:5948 train loss:3.587048 +step:5949 train loss:3.509689 +step:5950 train loss:3.445660 +step:5951 train loss:3.570494 +step:5952 train loss:3.450841 +step:5953 train loss:3.487527 +step:5954 train loss:3.478281 +step:5955 train loss:3.502991 +step:5956 train loss:3.476613 +step:5957 train loss:3.558858 +step:5958 train loss:3.420502 +step:5959 train loss:3.534885 +step:5960 train loss:3.445368 +step:5961 train loss:3.538055 +step:5962 train loss:3.462866 +step:5963 train loss:3.532020 +step:5964 train loss:3.493938 +step:5965 train loss:3.512695 +step:5966 train loss:3.463554 +step:5967 train loss:3.538769 +step:5968 train loss:3.453257 +step:5969 train loss:3.519340 +step:5970 train loss:3.496861 +step:5971 train loss:3.519840 +step:5972 train loss:3.513430 +step:5973 train loss:3.487221 +step:5974 train loss:3.478020 +step:5975 train loss:3.512931 +step:5976 train loss:3.504873 +step:5977 train loss:3.521850 +step:5978 train loss:3.461798 +step:5979 train loss:3.513706 +step:5980 train loss:3.491394 +step:5981 train loss:3.501093 +step:5982 train loss:3.516942 +step:5983 train loss:3.603092 +step:5984 train loss:3.514437 +step:5985 train loss:3.575423 +step:5986 train loss:3.508404 +step:5987 train loss:3.530441 +step:5988 train loss:3.470923 +step:5989 train loss:3.516338 +step:5990 train loss:3.475086 +step:5991 train loss:3.436142 +step:5992 train loss:3.472055 +step:5993 train loss:3.451144 +step:5994 train loss:3.512278 +step:5995 train loss:3.472533 +step:5996 train loss:3.500866 +step:5997 train loss:3.496528 +step:5998 train loss:3.530414 +step:5999 train loss:3.454688 +step:6000 validation loss:3.477997 total_sharp:5.8378e-03 L1_sharp:1.1674e-02 L2_sharp:1.3278e-02 L3_sharp:6.8861e-03 L4_sharp:2.0022e-03 L5_sharp:1.3037e-03 L6_sharp:1.4258e-03 L7_sharp:1.3915e-03 L8_sharp:1.4164e-03 L9_sharp:8.7888e-04 L10_sharp:5.2051e-04 L11_sharp:5.7049e-04 L12_sharp:9.2807e-04 total_fnorm:2.3006e+00 total_l1_linf:1.9474e+04 total_spectral:2.3006e+00 L1_fnorm:5.0723e-01 L2_fnorm:3.1078e-01 L3_fnorm:3.7187e-01 L4_fnorm:4.8791e-01 L5_fnorm:5.6828e-01 L6_fnorm:5.6957e-01 L7_fnorm:5.9605e-01 L8_fnorm:5.9600e-01 L9_fnorm:5.9731e-01 L10_fnorm:5.9533e-01 L11_fnorm:5.7705e-01 L12_fnorm:5.9878e-01 L1_l1linf:4.5416e-01 L2_l1linf:5.0951e-01 L3_l1linf:4.6523e-01 L4_l1linf:4.6530e-01 L5_l1linf:4.3254e-01 L6_l1linf:4.0304e-01 L7_l1linf:4.0077e-01 L8_l1linf:4.0216e-01 L9_l1linf:4.0325e-01 L10_l1linf:4.1001e-01 L11_l1linf:4.2733e-01 L12_l1linf:4.2038e-01 L1_spectral:1.2041e-02 L2_spectral:1.3908e-02 L3_spectral:1.4596e-02 L4_spectral:1.5804e-02 L5_spectral:1.2049e-02 L6_spectral:1.2045e-02 L7_spectral:1.2049e-02 L8_spectral:1.2049e-02 L9_spectral:1.2060e-02 L10_spectral:1.2051e-02 L11_spectral:1.2046e-02 L12_spectral:1.2043e-02 v_norm:2.3006e+00 cos_v_-g_hvp:1.9519e-02 g_hvp_norm:9.7678e-01 cos_v_-g_t:2.4189e-02 g_t_norm:1.1097e+00 hv_norm:1.3652e+00 cos_v_hv:9.8380e-03 hg_norm:8.7886e+02 cos_g_hg:1.6833e-01 v_par:5.7417e-03 v_perp:2.3006e+00 L1_cos_v_neg_g:5.8378e-03 L1_v_norm:5.0723e-01 L2_cos_v_neg_g:5.5369e-03 L2_v_norm:3.1078e-01 L3_cos_v_neg_g:3.9262e-02 L3_v_norm:3.7187e-01 L4_cos_v_neg_g:2.5738e-02 L4_v_norm:4.8791e-01 L5_cos_v_neg_g:2.9353e-02 L5_v_norm:5.6828e-01 L6_cos_v_neg_g:2.8967e-02 L6_v_norm:5.6957e-01 L7_cos_v_neg_g:2.9734e-02 L7_v_norm:5.9605e-01 L8_cos_v_neg_g:2.8276e-02 L8_v_norm:5.9600e-01 L9_cos_v_neg_g:2.9987e-02 L9_v_norm:5.9731e-01 L10_cos_v_neg_g:3.0816e-02 L10_v_norm:5.9533e-01 L11_cos_v_neg_g:3.9698e-02 L11_v_norm:5.7705e-01 L12_cos_v_neg_g:5.6353e-02 L12_v_norm:5.9878e-01 +step:6000 train loss:3.557721 +step:6001 train loss:3.449150 +step:6002 train loss:3.540522 +step:6003 train loss:3.471110 +step:6004 train loss:3.528486 +step:6005 train loss:3.486617 +step:6006 train loss:3.536439 +step:6007 train loss:3.453421 +step:6008 train loss:3.501314 +step:6009 train loss:3.492225 +step:6010 train loss:3.493169 +step:6011 train loss:3.501934 +step:6012 train loss:3.564184 +step:6013 train loss:3.503047 +step:6014 train loss:3.596980 +step:6015 train loss:3.515007 +step:6016 train loss:3.526838 +step:6017 train loss:3.532163 +step:6018 train loss:3.497648 +step:6019 train loss:3.508599 +step:6020 train loss:3.476417 +step:6021 train loss:3.583255 +step:6022 train loss:3.548257 +step:6023 train loss:3.527187 +step:6024 train loss:3.561727 +step:6025 train loss:3.557231 +step:6026 train loss:3.553901 +step:6027 train loss:3.520904 +step:6028 train loss:3.583234 +step:6029 train loss:3.474236 +step:6030 train loss:3.513264 +step:6031 train loss:3.496537 +step:6032 train loss:3.514297 +step:6033 train loss:3.477133 +step:6034 train loss:3.567041 +step:6035 train loss:3.512334 +step:6036 train loss:3.572687 +step:6037 train loss:3.505231 +step:6038 train loss:3.526325 +step:6039 train loss:3.522203 +step:6040 train loss:3.508855 +step:6041 train loss:3.500683 +step:6042 train loss:3.535039 +step:6043 train loss:3.566579 +step:6044 train loss:3.538902 +step:6045 train loss:3.560923 +step:6046 train loss:3.566391 +step:6047 train loss:3.557132 +step:6048 train loss:3.531218 +step:6049 train loss:3.563768 +step:6050 train loss:3.590987 +step:6051 train loss:3.578799 +step:6052 train loss:3.526360 +step:6053 train loss:3.502931 +step:6054 train loss:3.537201 +step:6055 train loss:3.602686 +step:6056 train loss:3.551688 +step:6057 train loss:3.558945 +step:6058 train loss:3.544321 +step:6059 train loss:3.483310 +step:6060 train loss:3.508697 +step:6061 train loss:3.535419 +step:6062 train loss:3.537549 +step:6063 train loss:3.496328 +step:6064 train loss:3.509918 +step:6065 train loss:3.518472 +step:6066 train loss:3.539720 +step:6067 train loss:3.579073 +step:6068 train loss:3.564518 +step:6069 train loss:3.557028 +step:6070 train loss:3.560582 +step:6071 train loss:3.444738 +step:6072 train loss:3.505138 +step:6073 train loss:3.530381 +step:6074 train loss:3.615171 +step:6075 train loss:3.446278 +step:6076 train loss:3.492423 +step:6077 train loss:3.466226 +step:6078 train loss:3.524061 +step:6079 train loss:3.537097 +step:6080 train loss:3.561958 +step:6081 train loss:3.500524 +step:6082 train loss:3.538714 +step:6083 train loss:3.566710 +step:6084 train loss:3.487879 +step:6085 train loss:3.548197 +step:6086 train loss:3.510388 +step:6087 train loss:3.556655 +step:6088 train loss:3.608748 +step:6089 train loss:3.548388 +step:6090 train loss:3.443597 +step:6091 train loss:3.535962 +step:6092 train loss:3.531403 +step:6093 train loss:3.580700 +step:6094 train loss:3.517387 +step:6095 train loss:3.526030 +step:6096 train loss:3.602907 +step:6097 train loss:3.762633 +step:6098 train loss:3.476758 +step:6099 train loss:3.535658 +step:6100 train loss:3.453930 +step:6101 train loss:3.520950 +step:6102 train loss:3.496181 +step:6103 train loss:3.551439 +step:6104 train loss:3.499202 +step:6105 train loss:3.578474 +step:6106 train loss:3.564197 +step:6107 train loss:3.535235 +step:6108 train loss:3.508336 +step:6109 train loss:3.475643 +step:6110 train loss:3.517844 +step:6111 train loss:3.490330 +step:6112 train loss:3.490378 +step:6113 train loss:3.527530 +step:6114 train loss:3.546024 +step:6115 train loss:3.573379 +step:6116 train loss:3.521998 +step:6117 train loss:3.552983 +step:6118 train loss:3.551851 +step:6119 train loss:3.531772 +step:6120 train loss:3.510424 +step:6121 train loss:3.577936 +step:6122 train loss:3.560216 +step:6123 train loss:3.526433 +step:6124 train loss:3.555501 +step:6125 train loss:3.533401 +step:6126 train loss:3.587723 +step:6127 train loss:3.599584 +step:6128 train loss:3.599160 +step:6129 train loss:3.608137 +step:6130 train loss:3.512742 +step:6131 train loss:3.550244 +step:6132 train loss:3.649349 +step:6133 train loss:3.480185 +step:6134 train loss:3.476760 +step:6135 train loss:3.518450 +step:6136 train loss:3.547674 +step:6137 train loss:3.504244 +step:6138 train loss:3.565632 +step:6139 train loss:3.578961 +step:6140 train loss:3.573117 +step:6141 train loss:3.589074 +step:6142 train loss:3.551243 +step:6143 train loss:3.525858 +step:6144 train loss:3.527445 +step:6145 train loss:3.572152 +step:6146 train loss:3.616767 +step:6147 train loss:3.530853 +step:6148 train loss:3.566984 +step:6149 train loss:3.622859 +step:6150 train loss:3.599498 +step:6151 train loss:3.468865 +step:6152 train loss:3.528122 +step:6153 train loss:3.464000 +step:6154 train loss:3.553016 +step:6155 train loss:3.533102 +step:6156 train loss:3.558030 +step:6157 train loss:3.479496 +step:6158 train loss:3.550897 +step:6159 train loss:3.696864 +step:6160 train loss:3.517376 +step:6161 train loss:3.539419 +step:6162 train loss:3.557515 +step:6163 train loss:3.538423 +step:6164 train loss:3.618766 +step:6165 train loss:3.550342 +step:6166 train loss:3.531769 +step:6167 train loss:3.546733 +step:6168 train loss:3.498061 +step:6169 train loss:3.438042 +step:6170 train loss:3.545567 +step:6171 train loss:3.465114 +step:6172 train loss:3.521060 +step:6173 train loss:3.536700 +step:6174 train loss:3.506821 +step:6175 train loss:3.545943 +step:6176 train loss:3.570489 +step:6177 train loss:3.533393 +step:6178 train loss:3.477023 +step:6179 train loss:3.554334 +step:6180 train loss:3.517963 +step:6181 train loss:3.505886 +step:6182 train loss:3.536367 +step:6183 train loss:3.536583 +step:6184 train loss:3.503576 +step:6185 train loss:3.537941 +step:6186 train loss:3.510986 +step:6187 train loss:3.501221 +step:6188 train loss:3.490332 +step:6189 train loss:3.524812 +step:6190 train loss:3.542670 +step:6191 train loss:3.506628 +step:6192 train loss:3.585608 +step:6193 train loss:3.503118 +step:6194 train loss:3.560091 +step:6195 train loss:3.583067 +step:6196 train loss:3.584357 +step:6197 train loss:3.515985 +step:6198 train loss:3.507348 +step:6199 train loss:3.535234 +step:6200 train loss:3.623542 +step:6201 train loss:3.566359 +step:6202 train loss:3.530689 +step:6203 train loss:3.561245 +step:6204 train loss:3.509523 +step:6205 train loss:3.573468 +step:6206 train loss:3.528992 +step:6207 train loss:3.533914 +step:6208 train loss:3.526304 +step:6209 train loss:3.558941 +step:6210 train loss:3.624272 +step:6211 train loss:3.573287 +step:6212 train loss:3.461185 +step:6213 train loss:3.521728 +step:6214 train loss:3.568104 +step:6215 train loss:3.599725 +step:6216 train loss:3.635114 +step:6217 train loss:3.563657 +step:6218 train loss:3.503998 +step:6219 train loss:3.512479 +step:6220 train loss:3.560986 +step:6221 train loss:3.508931 +step:6222 train loss:3.503618 +step:6223 train loss:3.563702 +step:6224 train loss:3.569869 +step:6225 train loss:3.517300 +step:6226 train loss:3.540339 +step:6227 train loss:3.545845 +step:6228 train loss:3.507711 +step:6229 train loss:3.493284 +step:6230 train loss:3.480156 +step:6231 train loss:3.528174 +step:6232 train loss:3.605943 +step:6233 train loss:3.467632 +step:6234 train loss:3.508161 +step:6235 train loss:3.529432 +step:6236 train loss:3.529088 +step:6237 train loss:3.496721 +step:6238 train loss:3.530079 +step:6239 train loss:3.564534 +step:6240 train loss:3.498112 +step:6241 train loss:3.523591 +step:6242 train loss:3.545910 +step:6243 train loss:3.543991 +step:6244 train loss:3.512181 +step:6245 train loss:3.584630 +step:6246 train loss:3.558372 +step:6247 train loss:3.511843 +step:6248 train loss:3.527520 +step:6249 train loss:3.526278 +step:6250 validation loss:3.495098 +step:6250 train loss:3.569022 +step:6251 train loss:3.594141 +step:6252 train loss:3.552181 +step:6253 train loss:3.549932 +step:6254 train loss:3.580000 +step:6255 train loss:3.555741 +step:6256 train loss:3.546463 +step:6257 train loss:3.567236 +step:6258 train loss:3.580869 +step:6259 train loss:3.567710 +step:6260 train loss:3.567378 +step:6261 train loss:3.568123 +step:6262 train loss:3.721467 +step:6263 train loss:3.606051 +step:6264 train loss:3.538016 +step:6265 train loss:3.490043 +step:6266 train loss:3.549366 +step:6267 train loss:3.526412 +step:6268 train loss:3.515520 +step:6269 train loss:3.510705 +step:6270 train loss:3.537799 +step:6271 train loss:3.519011 +step:6272 train loss:3.525539 +step:6273 train loss:3.542911 +step:6274 train loss:3.550941 +step:6275 train loss:3.568882 +step:6276 train loss:3.621468 +step:6277 train loss:3.549477 +step:6278 train loss:3.574499 +step:6279 train loss:3.475571 +step:6280 train loss:3.472200 +step:6281 train loss:3.502376 +step:6282 train loss:3.548009 +step:6283 train loss:3.518729 +step:6284 train loss:3.570880 +step:6285 train loss:3.532037 +step:6286 train loss:3.632277 +step:6287 train loss:3.529868 +step:6288 train loss:3.499625 +step:6289 train loss:3.556782 +step:6290 train loss:3.519078 +step:6291 train loss:3.558794 +step:6292 train loss:3.502759 +step:6293 train loss:3.520478 +step:6294 train loss:3.509174 +step:6295 train loss:3.574081 +step:6296 train loss:3.507725 +step:6297 train loss:3.545550 +step:6298 train loss:3.543324 +step:6299 train loss:3.547093 +step:6300 train loss:3.518583 +step:6301 train loss:3.611221 +step:6302 train loss:3.516434 +step:6303 train loss:3.586255 +step:6304 train loss:3.517283 +step:6305 train loss:3.520007 +step:6306 train loss:3.518044 +step:6307 train loss:3.499805 +step:6308 train loss:3.510780 +step:6309 train loss:3.564296 +step:6310 train loss:3.612836 +step:6311 train loss:3.511056 +step:6312 train loss:3.580738 +step:6313 train loss:3.531079 +step:6314 train loss:3.541620 +step:6315 train loss:3.499128 +step:6316 train loss:3.582336 +step:6317 train loss:3.479526 +step:6318 train loss:3.507931 +step:6319 train loss:3.539076 +step:6320 train loss:3.627889 +step:6321 train loss:3.557752 +step:6322 train loss:3.504575 +step:6323 train loss:3.552211 +step:6324 train loss:3.630456 +step:6325 train loss:3.541400 +step:6326 train loss:3.502725 +step:6327 train loss:3.520195 +step:6328 train loss:3.499600 +step:6329 train loss:3.546439 +step:6330 train loss:3.592405 +step:6331 train loss:3.513606 +step:6332 train loss:3.583212 +step:6333 train loss:3.518277 +step:6334 train loss:3.506524 +step:6335 train loss:3.538521 +step:6336 train loss:3.553807 +step:6337 train loss:3.502688 +step:6338 train loss:3.515157 +step:6339 train loss:3.540518 +step:6340 train loss:3.485315 +step:6341 train loss:3.538411 +step:6342 train loss:3.562562 +step:6343 train loss:3.519550 +step:6344 train loss:3.514089 +step:6345 train loss:3.514853 +step:6346 train loss:3.510763 +step:6347 train loss:3.507428 +step:6348 train loss:3.556240 +step:6349 train loss:3.567969 +step:6350 train loss:3.559628 +step:6351 train loss:3.553059 +step:6352 train loss:3.526418 +step:6353 train loss:3.526573 +step:6354 train loss:3.496175 +step:6355 train loss:3.490703 +step:6356 train loss:3.516680 +step:6357 train loss:3.544508 +step:6358 train loss:3.524378 +step:6359 train loss:3.555474 +step:6360 train loss:3.544616 +step:6361 train loss:3.506809 +step:6362 train loss:3.577078 +step:6363 train loss:3.542591 +step:6364 train loss:3.572168 +step:6365 train loss:3.449799 +step:6366 train loss:3.527370 +step:6367 train loss:3.515927 +step:6368 train loss:3.530395 +step:6369 train loss:3.537523 +step:6370 train loss:3.547361 +step:6371 train loss:3.649399 +step:6372 train loss:3.490538 +step:6373 train loss:3.576634 +step:6374 train loss:3.533843 +step:6375 train loss:3.578567 +step:6376 train loss:3.519020 +step:6377 train loss:3.527707 +step:6378 train loss:3.506344 +step:6379 train loss:3.594281 +step:6380 train loss:3.503319 +step:6381 train loss:3.556345 +step:6382 train loss:3.548278 +step:6383 train loss:3.522210 +step:6384 train loss:3.529320 +step:6385 train loss:3.588072 +step:6386 train loss:3.533639 +step:6387 train loss:3.554490 +step:6388 train loss:3.485657 +step:6389 train loss:3.565909 +step:6390 train loss:3.510663 +step:6391 train loss:3.578105 +step:6392 train loss:3.530358 +step:6393 train loss:3.505005 +step:6394 train loss:3.518147 +step:6395 train loss:3.512766 +step:6396 train loss:3.565048 +step:6397 train loss:3.480557 +step:6398 train loss:3.570986 +step:6399 train loss:3.476988 +step:6400 train loss:3.502028 +step:6401 train loss:3.539702 +step:6402 train loss:3.580209 +step:6403 train loss:3.532588 +step:6404 train loss:3.532460 +step:6405 train loss:3.505945 +step:6406 train loss:3.552590 +step:6407 train loss:3.545041 +step:6408 train loss:3.495667 +step:6409 train loss:3.490356 +step:6410 train loss:3.444379 +step:6411 train loss:3.506582 +step:6412 train loss:3.471091 +step:6413 train loss:3.546937 +step:6414 train loss:3.516805 +step:6415 train loss:3.519098 +step:6416 train loss:3.560301 +step:6417 train loss:3.494407 +step:6418 train loss:3.543956 +step:6419 train loss:3.561533 +step:6420 train loss:3.572404 +step:6421 train loss:3.539627 +step:6422 train loss:3.601295 +step:6423 train loss:3.508146 +step:6424 train loss:3.549236 +step:6425 train loss:3.572516 +step:6426 train loss:3.484518 +step:6427 train loss:3.560993 +step:6428 train loss:3.534034 +step:6429 train loss:3.498704 +step:6430 train loss:3.527173 +step:6431 train loss:3.538241 +step:6432 train loss:3.502958 +step:6433 train loss:3.510041 +step:6434 train loss:3.485685 +step:6435 train loss:3.527600 +step:6436 train loss:3.552675 +step:6437 train loss:3.649764 +step:6438 train loss:3.554943 +step:6439 train loss:3.550452 +step:6440 train loss:3.594727 +step:6441 train loss:3.514926 +step:6442 train loss:3.472486 +step:6443 train loss:3.519861 +step:6444 train loss:3.526856 +step:6445 train loss:3.570738 +step:6446 train loss:3.521774 +step:6447 train loss:3.555172 +step:6448 train loss:3.486958 +step:6449 train loss:3.481049 +step:6450 train loss:3.550903 +step:6451 train loss:3.483172 +step:6452 train loss:3.523415 +step:6453 train loss:3.519306 +step:6454 train loss:3.525273 +step:6455 train loss:3.514612 +step:6456 train loss:3.514816 +step:6457 train loss:3.516140 +step:6458 train loss:3.508776 +step:6459 train loss:3.470064 +step:6460 train loss:3.475173 +step:6461 train loss:3.564715 +step:6462 train loss:3.559266 +step:6463 train loss:3.560728 +step:6464 train loss:3.570062 +step:6465 train loss:3.466875 +step:6466 train loss:3.513121 +step:6467 train loss:3.500566 +step:6468 train loss:3.549308 +step:6469 train loss:3.532302 +step:6470 train loss:3.515560 +step:6471 train loss:3.562493 +step:6472 train loss:3.520285 +step:6473 train loss:3.557806 +step:6474 train loss:3.539415 +step:6475 train loss:3.559582 +step:6476 train loss:3.552396 +step:6477 train loss:3.548352 +step:6478 train loss:3.543326 +step:6479 train loss:3.580184 +step:6480 train loss:3.481943 +step:6481 train loss:3.482700 +step:6482 train loss:3.516499 +step:6483 train loss:3.543184 +step:6484 train loss:3.515516 +step:6485 train loss:3.575135 +step:6486 train loss:3.575201 +step:6487 train loss:3.558378 +step:6488 train loss:3.606221 +step:6489 train loss:3.522375 +step:6490 train loss:3.619898 +step:6491 train loss:3.535294 +step:6492 train loss:3.517687 +step:6493 train loss:3.625708 +step:6494 train loss:3.545867 +step:6495 train loss:3.584058 +step:6496 train loss:3.561516 +step:6497 train loss:3.523744 +step:6498 train loss:3.556831 +step:6499 train loss:3.602318 +step:6500 validation loss:3.475679 total_sharp:1.9816e-02 L1_sharp:1.9876e-01 L2_sharp:6.9145e-02 L3_sharp:8.5589e-03 L4_sharp:1.6824e-03 L5_sharp:1.1560e-03 L6_sharp:1.1877e-03 L7_sharp:1.2547e-03 L8_sharp:1.1041e-03 L9_sharp:6.2197e-04 L10_sharp:3.8378e-04 L11_sharp:4.4770e-04 L12_sharp:3.1350e-04 total_fnorm:2.2652e+00 total_l1_linf:1.9134e+04 total_spectral:2.2652e+00 L1_fnorm:4.1145e-01 L2_fnorm:3.3003e-01 L3_fnorm:3.7094e-01 L4_fnorm:4.5997e-01 L5_fnorm:5.5778e-01 L6_fnorm:5.7293e-01 L7_fnorm:5.9492e-01 L8_fnorm:5.9706e-01 L9_fnorm:5.9658e-01 L10_fnorm:5.9430e-01 L11_fnorm:5.7641e-01 L12_fnorm:5.9842e-01 L1_l1linf:3.5714e-01 L2_l1linf:3.9652e-01 L3_l1linf:3.8257e-01 L4_l1linf:4.0705e-01 L5_l1linf:3.9799e-01 L6_l1linf:3.9762e-01 L7_l1linf:4.0011e-01 L8_l1linf:4.0150e-01 L9_l1linf:4.0165e-01 L10_l1linf:4.0832e-01 L11_l1linf:4.0431e-01 L12_l1linf:4.1521e-01 L1_spectral:1.2034e-02 L2_spectral:1.2932e-02 L3_spectral:1.2582e-02 L4_spectral:1.4508e-02 L5_spectral:1.2043e-02 L6_spectral:1.2047e-02 L7_spectral:1.2042e-02 L8_spectral:1.2045e-02 L9_spectral:1.2055e-02 L10_spectral:1.2049e-02 L11_spectral:1.2049e-02 L12_spectral:1.2045e-02 v_norm:2.2652e+00 cos_v_-g_hvp:2.2194e-02 g_hvp_norm:7.5177e-01 cos_v_-g_t:2.2702e-02 g_t_norm:9.6518e-01 hv_norm:1.2360e+01 cos_v_hv:3.6315e-03 hg_norm:7.9479e+03 cos_g_hg:1.1311e-01 v_par:5.4230e-03 v_perp:2.2652e+00 L1_cos_v_neg_g:1.8105e-02 L1_v_norm:4.1145e-01 L2_cos_v_neg_g:2.6322e-02 L2_v_norm:3.3003e-01 L3_cos_v_neg_g:3.0336e-02 L3_v_norm:3.7094e-01 L4_cos_v_neg_g:2.0116e-02 L4_v_norm:4.5997e-01 L5_cos_v_neg_g:2.4378e-02 L5_v_norm:5.5778e-01 L6_cos_v_neg_g:2.3344e-02 L6_v_norm:5.7293e-01 L7_cos_v_neg_g:2.5801e-02 L7_v_norm:5.9492e-01 L8_cos_v_neg_g:2.3886e-02 L8_v_norm:5.9706e-01 L9_cos_v_neg_g:2.7058e-02 L9_v_norm:5.9658e-01 L10_cos_v_neg_g:2.6100e-02 L10_v_norm:5.9430e-01 L11_cos_v_neg_g:3.6636e-02 L11_v_norm:5.7641e-01 L12_cos_v_neg_g:6.6989e-02 L12_v_norm:5.9842e-01 +step:6500 train loss:3.531967 +step:6501 train loss:3.532083 +step:6502 train loss:3.611051 +step:6503 train loss:3.525254 +step:6504 train loss:3.530068 +step:6505 train loss:3.544027 +step:6506 train loss:3.575662 +step:6507 train loss:3.517866 +step:6508 train loss:3.503525 +step:6509 train loss:3.553783 +step:6510 train loss:3.541722 +step:6511 train loss:3.513264 +step:6512 train loss:3.521683 +step:6513 train loss:3.646416 +step:6514 train loss:3.614196 +step:6515 train loss:3.580250 +step:6516 train loss:3.567377 +step:6517 train loss:3.614210 +step:6518 train loss:3.533680 +step:6519 train loss:3.528019 +step:6520 train loss:3.521224 +step:6521 train loss:3.557792 +step:6522 train loss:3.559025 +step:6523 train loss:3.571663 +step:6524 train loss:3.618807 +step:6525 train loss:3.657379 +step:6526 train loss:3.597074 +step:6527 train loss:3.655967 +step:6528 train loss:3.556248 +step:6529 train loss:3.590403 +step:6530 train loss:3.516376 +step:6531 train loss:3.580623 +step:6532 train loss:3.540778 +step:6533 train loss:3.508418 +step:6534 train loss:3.529732 +step:6535 train loss:3.509534 +step:6536 train loss:3.588961 +step:6537 train loss:3.571624 +step:6538 train loss:3.617650 +step:6539 train loss:3.527220 +step:6540 train loss:3.610739 +step:6541 train loss:3.535009 +step:6542 train loss:3.535924 +step:6543 train loss:3.532482 +step:6544 train loss:3.523346 +step:6545 train loss:3.499794 +step:6546 train loss:3.539332 +step:6547 train loss:3.645336 +step:6548 train loss:3.613406 +step:6549 train loss:3.603752 +step:6550 train loss:3.496119 +step:6551 train loss:3.585604 +step:6552 train loss:3.543335 +step:6553 train loss:3.506440 +step:6554 train loss:3.527117 +step:6555 train loss:3.509843 +step:6556 train loss:3.507695 +step:6557 train loss:3.504624 +step:6558 train loss:3.516997 +step:6559 train loss:3.533136 +step:6560 train loss:3.503702 +step:6561 train loss:3.569114 +step:6562 train loss:3.571365 +step:6563 train loss:3.535350 +step:6564 train loss:3.559247 +step:6565 train loss:3.597973 +step:6566 train loss:3.682420 +step:6567 train loss:3.632796 +step:6568 train loss:3.525582 +step:6569 train loss:3.544765 +step:6570 train loss:3.551285 +step:6571 train loss:3.574244 +step:6572 train loss:3.616192 +step:6573 train loss:3.571597 +step:6574 train loss:3.521466 +step:6575 train loss:3.485047 +step:6576 train loss:3.559828 +step:6577 train loss:3.520834 +step:6578 train loss:3.588586 +step:6579 train loss:3.612615 +step:6580 train loss:3.642541 +step:6581 train loss:3.626655 +step:6582 train loss:3.679762 +step:6583 train loss:3.592780 +step:6584 train loss:3.636478 +step:6585 train loss:3.524417 +step:6586 train loss:3.585504 +step:6587 train loss:3.594186 +step:6588 train loss:3.547137 +step:6589 train loss:3.586977 +step:6590 train loss:3.562771 +step:6591 train loss:3.527875 +step:6592 train loss:3.581202 +step:6593 train loss:3.532387 +step:6594 train loss:3.539971 +step:6595 train loss:3.540306 +step:6596 train loss:3.561790 +step:6597 train loss:3.563611 +step:6598 train loss:3.551647 +step:6599 train loss:3.541279 +step:6600 train loss:3.534710 +step:6601 train loss:3.520083 +step:6602 train loss:3.575008 +step:6603 train loss:3.561144 +step:6604 train loss:3.595466 +step:6605 train loss:3.532192 +step:6606 train loss:3.572732 +step:6607 train loss:3.508705 +step:6608 train loss:3.571937 +step:6609 train loss:3.545979 +step:6610 train loss:3.547709 +step:6611 train loss:3.577015 +step:6612 train loss:3.587765 +step:6613 train loss:3.533193 +step:6614 train loss:3.607090 +step:6615 train loss:3.623126 +step:6616 train loss:3.564919 +step:6617 train loss:3.576586 +step:6618 train loss:3.522481 +step:6619 train loss:3.588285 +step:6620 train loss:3.505189 +step:6621 train loss:3.504519 +step:6622 train loss:3.533130 +step:6623 train loss:3.532724 +step:6624 train loss:3.575046 +step:6625 train loss:3.625119 +step:6626 train loss:3.521981 +step:6627 train loss:3.582948 +step:6628 train loss:3.528892 +step:6629 train loss:3.482083 +step:6630 train loss:3.516776 +step:6631 train loss:3.583827 +step:6632 train loss:3.501749 +step:6633 train loss:3.546455 +step:6634 train loss:3.608041 +step:6635 train loss:3.541409 +step:6636 train loss:3.566251 +step:6637 train loss:3.537929 +step:6638 train loss:3.497112 +step:6639 train loss:3.525164 +step:6640 train loss:3.563277 +step:6641 train loss:3.534548 +step:6642 train loss:3.463173 +step:6643 train loss:3.571966 +step:6644 train loss:3.508055 +step:6645 train loss:3.535458 +step:6646 train loss:3.530210 +step:6647 train loss:3.570314 +step:6648 train loss:3.528110 +step:6649 train loss:3.514269 +step:6650 train loss:3.532878 +step:6651 train loss:3.487043 +step:6652 train loss:3.571800 +step:6653 train loss:3.557748 +step:6654 train loss:3.531491 +step:6655 train loss:3.534998 +step:6656 train loss:3.530319 +step:6657 train loss:3.531177 +step:6658 train loss:3.513093 +step:6659 train loss:3.501278 +step:6660 train loss:3.504727 +step:6661 train loss:3.525290 +step:6662 train loss:3.591058 +step:6663 train loss:3.642044 +step:6664 train loss:3.594241 +step:6665 train loss:3.544025 +step:6666 train loss:3.517131 +step:6667 train loss:3.588877 +step:6668 train loss:3.534534 +step:6669 train loss:3.594451 +step:6670 train loss:3.506130 +step:6671 train loss:3.549429 +step:6672 train loss:3.511487 +step:6673 train loss:3.491580 +step:6674 train loss:3.551375 +step:6675 train loss:3.512942 +step:6676 train loss:3.514701 +step:6677 train loss:3.505238 +step:6678 train loss:3.546285 +step:6679 train loss:3.505671 +step:6680 train loss:3.532864 +step:6681 train loss:3.454497 +step:6682 train loss:3.469449 +step:6683 train loss:3.477286 +step:6684 train loss:3.455457 +step:6685 train loss:3.488642 +step:6686 train loss:3.504214 +step:6687 train loss:3.509725 +step:6688 train loss:3.511777 +step:6689 train loss:3.566778 +step:6690 train loss:3.513189 +step:6691 train loss:3.568942 +step:6692 train loss:3.474187 +step:6693 train loss:3.505472 +step:6694 train loss:3.491818 +step:6695 train loss:3.529892 +step:6696 train loss:3.555384 +step:6697 train loss:3.617455 +step:6698 train loss:3.494947 +step:6699 train loss:3.513438 +step:6700 train loss:3.568950 +step:6701 train loss:3.488201 +step:6702 train loss:3.472910 +step:6703 train loss:3.542307 +step:6704 train loss:3.600543 +step:6705 train loss:3.424447 +step:6706 train loss:3.556549 +step:6707 train loss:3.528215 +step:6708 train loss:3.480718 +step:6709 train loss:3.586520 +step:6710 train loss:3.499100 +step:6711 train loss:3.481827 +step:6712 train loss:3.473624 +step:6713 train loss:3.526755 +step:6714 train loss:3.499315 +step:6715 train loss:3.480989 +step:6716 train loss:3.464471 +step:6717 train loss:3.462164 +step:6718 train loss:3.511919 +step:6719 train loss:3.443816 +step:6720 train loss:3.466334 +step:6721 train loss:3.511691 +step:6722 train loss:3.475031 +step:6723 train loss:3.502653 +step:6724 train loss:3.506416 +step:6725 train loss:3.462594 +step:6726 train loss:3.464462 +step:6727 train loss:3.532905 +step:6728 train loss:3.533227 +step:6729 train loss:3.539762 +step:6730 train loss:3.532470 +step:6731 train loss:3.494197 +step:6732 train loss:3.554036 +step:6733 train loss:3.497671 +step:6734 train loss:3.479899 +step:6735 train loss:3.525634 +step:6736 train loss:3.479487 +step:6737 train loss:3.459722 +step:6738 train loss:3.510039 +step:6739 train loss:3.480489 +step:6740 train loss:3.470965 +step:6741 train loss:3.530337 +step:6742 train loss:3.508320 +step:6743 train loss:3.554758 +step:6744 train loss:3.607488 +step:6745 train loss:3.591471 +step:6746 train loss:3.542213 +step:6747 train loss:3.508610 +step:6748 train loss:3.509613 +step:6749 train loss:3.504409 +step:6750 validation loss:3.466811 +step:6750 train loss:3.443825 +step:6751 train loss:3.471277 +step:6752 train loss:3.526475 +step:6753 train loss:3.455225 +step:6754 train loss:3.448816 +step:6755 train loss:3.474844 +step:6756 train loss:3.526213 +step:6757 train loss:3.505143 +step:6758 train loss:3.554625 +step:6759 train loss:3.433486 +step:6760 train loss:3.463384 +step:6761 train loss:3.533705 +step:6762 train loss:3.488121 +step:6763 train loss:3.445983 +step:6764 train loss:3.522349 +step:6765 train loss:3.504576 +step:6766 train loss:3.487479 +step:6767 train loss:3.490674 +step:6768 train loss:3.483048 +step:6769 train loss:3.422784 +step:6770 train loss:3.557756 +step:6771 train loss:3.469519 +step:6772 train loss:3.555817 +step:6773 train loss:3.559039 +step:6774 train loss:3.584214 +step:6775 train loss:3.450782 +step:6776 train loss:3.510001 +step:6777 train loss:3.512318 +step:6778 train loss:3.477093 +step:6779 train loss:3.496857 +step:6780 train loss:3.499821 +step:6781 train loss:3.482126 +step:6782 train loss:3.557534 +step:6783 train loss:3.477017 +step:6784 train loss:3.459538 +step:6785 train loss:3.542683 +step:6786 train loss:3.532824 +step:6787 train loss:3.463609 +step:6788 train loss:3.540756 +step:6789 train loss:3.462159 +step:6790 train loss:3.468782 +step:6791 train loss:3.505559 +step:6792 train loss:3.418442 +step:6793 train loss:3.514695 +step:6794 train loss:3.504485 +step:6795 train loss:3.555420 +step:6796 train loss:3.432890 +step:6797 train loss:3.490961 +step:6798 train loss:3.475850 +step:6799 train loss:3.500544 +step:6800 train loss:3.552325 +step:6801 train loss:3.469480 +step:6802 train loss:3.452868 +step:6803 train loss:3.484271 +step:6804 train loss:3.538350 +step:6805 train loss:3.460525 +step:6806 train loss:3.479475 +step:6807 train loss:3.570194 +step:6808 train loss:3.500990 +step:6809 train loss:3.493349 +step:6810 train loss:3.489289 +step:6811 train loss:3.497265 +step:6812 train loss:3.481121 +step:6813 train loss:3.454709 +step:6814 train loss:3.484662 +step:6815 train loss:3.497771 +step:6816 train loss:3.545350 +step:6817 train loss:3.514609 +step:6818 train loss:3.490803 +step:6819 train loss:3.475603 +step:6820 train loss:3.456596 +step:6821 train loss:3.522579 +step:6822 train loss:3.492689 +step:6823 train loss:3.608469 +step:6824 train loss:3.748289 +step:6825 train loss:3.519259 +step:6826 train loss:3.485327 +step:6827 train loss:3.488506 +step:6828 train loss:3.536716 +step:6829 train loss:3.486894 +step:6830 train loss:3.463551 +step:6831 train loss:3.494705 +step:6832 train loss:3.467755 +step:6833 train loss:3.456686 +step:6834 train loss:3.472061 +step:6835 train loss:3.493597 +step:6836 train loss:3.528653 +step:6837 train loss:3.514977 +step:6838 train loss:3.487382 +step:6839 train loss:3.438491 +step:6840 train loss:3.520281 +step:6841 train loss:3.485810 +step:6842 train loss:3.433027 +step:6843 train loss:3.490342 +step:6844 train loss:3.473690 +step:6845 train loss:3.430810 +step:6846 train loss:3.498324 +step:6847 train loss:3.462164 +step:6848 train loss:3.470552 +step:6849 train loss:3.446457 +step:6850 train loss:3.497734 +step:6851 train loss:3.407415 +step:6852 train loss:3.468600 +step:6853 train loss:3.524892 +step:6854 train loss:3.490344 +step:6855 train loss:3.474374 +step:6856 train loss:3.545206 +step:6857 train loss:3.584593 +step:6858 train loss:3.456024 +step:6859 train loss:3.516622 +step:6860 train loss:3.471810 +step:6861 train loss:3.626903 +step:6862 train loss:3.501788 +step:6863 train loss:3.519886 +step:6864 train loss:3.534821 +step:6865 train loss:3.543311 +step:6866 train loss:3.453846 +step:6867 train loss:3.520907 +step:6868 train loss:3.484675 +step:6869 train loss:3.565044 +step:6870 train loss:3.483701 +step:6871 train loss:3.480823 +step:6872 train loss:3.463227 +step:6873 train loss:3.485061 +step:6874 train loss:3.504772 +step:6875 train loss:3.516980 +step:6876 train loss:3.527978 +step:6877 train loss:3.523974 +step:6878 train loss:3.499804 +step:6879 train loss:3.515899 +step:6880 train loss:3.492547 +step:6881 train loss:3.535366 +step:6882 train loss:3.505832 +step:6883 train loss:3.536695 +step:6884 train loss:3.475927 +step:6885 train loss:3.544881 +step:6886 train loss:3.427579 +step:6887 train loss:3.435428 +step:6888 train loss:3.429941 +step:6889 train loss:3.502468 +step:6890 train loss:3.464614 +step:6891 train loss:3.506424 +step:6892 train loss:3.473328 +step:6893 train loss:3.505523 +step:6894 train loss:3.473342 +step:6895 train loss:3.510083 +step:6896 train loss:3.524146 +step:6897 train loss:3.536097 +step:6898 train loss:3.509561 +step:6899 train loss:3.494829 +step:6900 train loss:3.511816 +step:6901 train loss:3.492134 +step:6902 train loss:3.466678 +step:6903 train loss:3.475486 +step:6904 train loss:3.459480 +step:6905 train loss:3.548567 +step:6906 train loss:3.561211 +step:6907 train loss:3.416991 +step:6908 train loss:3.453103 +step:6909 train loss:3.476525 +step:6910 train loss:3.448716 +step:6911 train loss:3.536651 +step:6912 train loss:3.478798 +step:6913 train loss:3.448468 +step:6914 train loss:3.455732 +step:6915 train loss:3.497217 +step:6916 train loss:3.452707 +step:6917 train loss:3.516973 +step:6918 train loss:3.438517 +step:6919 train loss:3.414917 +step:6920 train loss:3.514371 +step:6921 train loss:3.423837 +step:6922 train loss:3.506952 +step:6923 train loss:3.458317 +step:6924 train loss:3.498951 +step:6925 train loss:3.527855 +step:6926 train loss:3.491010 +step:6927 train loss:3.434444 +step:6928 train loss:3.497514 +step:6929 train loss:3.436729 +step:6930 train loss:3.526986 +step:6931 train loss:3.442249 +step:6932 train loss:3.466374 +step:6933 train loss:3.477305 +step:6934 train loss:3.408882 +step:6935 train loss:3.485459 +step:6936 train loss:3.501694 +step:6937 train loss:3.537517 +step:6938 train loss:3.501214 +step:6939 train loss:3.482159 +step:6940 train loss:3.465077 +step:6941 train loss:3.455894 +step:6942 train loss:3.501180 +step:6943 train loss:3.420745 +step:6944 train loss:3.502550 +step:6945 train loss:3.479782 +step:6946 train loss:3.484363 +step:6947 train loss:3.451888 +step:6948 train loss:3.586653 +step:6949 train loss:3.705015 +step:6950 train loss:3.513544 +step:6951 train loss:3.508300 +step:6952 train loss:3.549851 +step:6953 train loss:3.469046 +step:6954 train loss:3.489707 +step:6955 train loss:3.468257 +step:6956 train loss:3.526653 +step:6957 train loss:3.469623 +step:6958 train loss:3.487867 +step:6959 train loss:3.489008 +step:6960 train loss:3.402871 +step:6961 train loss:3.478308 +step:6962 train loss:3.457597 +step:6963 train loss:3.494246 +step:6964 train loss:3.474218 +step:6965 train loss:3.484626 +step:6966 train loss:3.473657 +step:6967 train loss:3.441827 +step:6968 train loss:3.428117 +step:6969 train loss:3.490098 +step:6970 train loss:3.483958 +step:6971 train loss:3.460698 +step:6972 train loss:3.506369 +step:6973 train loss:3.541113 +step:6974 train loss:3.545170 +step:6975 train loss:3.480972 +step:6976 train loss:3.534162 +step:6977 train loss:3.523995 +step:6978 train loss:3.471569 +step:6979 train loss:3.450692 +step:6980 train loss:3.474138 +step:6981 train loss:3.597390 +step:6982 train loss:3.522280 +step:6983 train loss:3.473047 +step:6984 train loss:3.502866 +step:6985 train loss:3.479722 +step:6986 train loss:3.533882 +step:6987 train loss:3.508363 +step:6988 train loss:3.449309 +step:6989 train loss:3.527654 +step:6990 train loss:3.448810 +step:6991 train loss:3.521064 +step:6992 train loss:3.542394 +step:6993 train loss:3.574778 +step:6994 train loss:3.501553 +step:6995 train loss:3.510900 +step:6996 train loss:3.451924 +step:6997 train loss:3.513946 +step:6998 train loss:3.549612 +step:6999 train loss:3.457326 +step:7000 validation loss:3.467644 total_sharp:3.3607e-03 L1_sharp:7.0355e-03 L2_sharp:6.8653e-03 L3_sharp:2.7729e-03 L4_sharp:1.7531e-03 L5_sharp:1.1749e-03 L6_sharp:1.5449e-03 L7_sharp:1.1865e-03 L8_sharp:1.4401e-03 L9_sharp:8.0322e-04 L10_sharp:4.6136e-04 L11_sharp:5.2861e-04 L12_sharp:3.3167e-04 total_fnorm:2.2749e+00 total_l1_linf:1.9266e+04 total_spectral:2.2749e+00 L1_fnorm:4.7391e-01 L2_fnorm:3.5243e-01 L3_fnorm:3.7221e-01 L4_fnorm:4.3754e-01 L5_fnorm:5.6053e-01 L6_fnorm:5.4226e-01 L7_fnorm:5.9456e-01 L8_fnorm:5.9564e-01 L9_fnorm:5.9617e-01 L10_fnorm:5.9389e-01 L11_fnorm:5.7410e-01 L12_fnorm:5.9844e-01 L1_l1linf:3.6386e-01 L2_l1linf:4.2434e-01 L3_l1linf:4.2985e-01 L4_l1linf:4.6577e-01 L5_l1linf:4.1552e-01 L6_l1linf:4.0167e-01 L7_l1linf:4.0086e-01 L8_l1linf:4.0034e-01 L9_l1linf:4.0406e-01 L10_l1linf:4.0974e-01 L11_l1linf:4.2353e-01 L12_l1linf:4.2908e-01 L1_spectral:1.2034e-02 L2_spectral:1.5894e-02 L3_spectral:1.4251e-02 L4_spectral:1.7749e-02 L5_spectral:1.2043e-02 L6_spectral:1.2044e-02 L7_spectral:1.2047e-02 L8_spectral:1.2047e-02 L9_spectral:1.2047e-02 L10_spectral:1.2050e-02 L11_spectral:1.2046e-02 L12_spectral:1.2044e-02 v_norm:2.2749e+00 cos_v_-g_hvp:1.8041e-02 g_hvp_norm:8.7194e-01 cos_v_-g_t:2.7439e-02 g_t_norm:7.0182e-01 hv_norm:4.5043e+00 cos_v_hv:1.6973e-03 hg_norm:7.0241e+03 cos_g_hg:-3.0327e-02 v_par:3.8558e-03 v_perp:2.2749e+00 L1_cos_v_neg_g:5.2360e-03 L1_v_norm:4.7391e-01 L2_cos_v_neg_g:3.0145e-04 L2_v_norm:3.5243e-01 L3_cos_v_neg_g:2.1723e-02 L3_v_norm:3.7221e-01 L4_cos_v_neg_g:2.4512e-02 L4_v_norm:4.3754e-01 L5_cos_v_neg_g:2.6509e-02 L5_v_norm:5.6053e-01 L6_cos_v_neg_g:2.6217e-02 L6_v_norm:5.4226e-01 L7_cos_v_neg_g:2.5110e-02 L7_v_norm:5.9456e-01 L8_cos_v_neg_g:2.4660e-02 L8_v_norm:5.9564e-01 L9_cos_v_neg_g:2.7678e-02 L9_v_norm:5.9617e-01 L10_cos_v_neg_g:2.8154e-02 L10_v_norm:5.9389e-01 L11_cos_v_neg_g:3.7745e-02 L11_v_norm:5.7410e-01 L12_cos_v_neg_g:6.8260e-02 L12_v_norm:5.9844e-01 +step:7000 train loss:3.471078 +step:7001 train loss:3.458624 +step:7002 train loss:3.509286 +step:7003 train loss:3.429010 +step:7004 train loss:3.434053 +step:7005 train loss:3.427993 +step:7006 train loss:3.540272 +step:7007 train loss:3.472484 +step:7008 train loss:3.505765 +step:7009 train loss:3.466111 +step:7010 train loss:3.509400 +step:7011 train loss:3.555632 +step:7012 train loss:3.522087 +step:7013 train loss:3.514159 +step:7014 train loss:3.528872 +step:7015 train loss:3.534583 +step:7016 train loss:3.520522 +step:7017 train loss:3.512880 +step:7018 train loss:3.495243 +step:7019 train loss:3.658026 +step:7020 train loss:3.424910 +step:7021 train loss:3.501701 +step:7022 train loss:3.493126 +step:7023 train loss:3.485777 +step:7024 train loss:3.517299 +step:7025 train loss:3.517689 +step:7026 train loss:3.481761 +step:7027 train loss:3.579643 +step:7028 train loss:3.486072 +step:7029 train loss:3.455889 +step:7030 train loss:3.482663 +step:7031 train loss:3.475855 +step:7032 train loss:3.522175 +step:7033 train loss:3.505513 +step:7034 train loss:3.532919 +step:7035 train loss:3.560351 +step:7036 train loss:3.471929 +step:7037 train loss:3.579405 +step:7038 train loss:3.476794 +step:7039 train loss:3.525679 +step:7040 train loss:3.511767 +step:7041 train loss:3.550878 +step:7042 train loss:3.520084 +step:7043 train loss:3.583333 +step:7044 train loss:3.561600 +step:7045 train loss:3.629663 +step:7046 train loss:3.480326 +step:7047 train loss:3.501170 +step:7048 train loss:3.566184 +step:7049 train loss:3.418452 +step:7050 train loss:3.504012 +step:7051 train loss:3.498471 +step:7052 train loss:3.560609 +step:7053 train loss:3.419985 +step:7054 train loss:3.548035 +step:7055 train loss:3.539072 +step:7056 train loss:3.518422 +step:7057 train loss:3.507106 +step:7058 train loss:3.561033 +step:7059 train loss:3.559335 +step:7060 train loss:3.573017 +step:7061 train loss:3.524531 +step:7062 train loss:3.557757 +step:7063 train loss:3.648451 +step:7064 train loss:3.622910 +step:7065 train loss:3.493241 +step:7066 train loss:3.534076 +step:7067 train loss:3.568511 +step:7068 train loss:3.578304 +step:7069 train loss:3.543219 +step:7070 train loss:3.571725 +step:7071 train loss:3.589391 +step:7072 train loss:3.534384 +step:7073 train loss:3.562074 +step:7074 train loss:3.596488 +step:7075 train loss:3.596418 +step:7076 train loss:3.563769 +step:7077 train loss:3.501953 +step:7078 train loss:3.544035 +step:7079 train loss:3.526329 +step:7080 train loss:3.633436 +step:7081 train loss:3.568437 +step:7082 train loss:3.560346 +step:7083 train loss:3.596651 +step:7084 train loss:3.529968 +step:7085 train loss:3.594919 +step:7086 train loss:3.578010 +step:7087 train loss:3.589631 +step:7088 train loss:3.550108 +step:7089 train loss:3.540202 +step:7090 train loss:3.509037 +step:7091 train loss:3.574538 +step:7092 train loss:3.555573 +step:7093 train loss:3.533540 +step:7094 train loss:3.513351 +step:7095 train loss:3.516873 +step:7096 train loss:3.534886 +step:7097 train loss:3.551380 +step:7098 train loss:3.542583 +step:7099 train loss:3.520699 +step:7100 train loss:3.574590 +step:7101 train loss:3.545373 +step:7102 train loss:3.556960 +step:7103 train loss:3.732029 +step:7104 train loss:3.524323 +step:7105 train loss:3.574709 +step:7106 train loss:3.599157 +step:7107 train loss:3.553211 +step:7108 train loss:3.518952 +step:7109 train loss:3.537726 +step:7110 train loss:3.555161 +step:7111 train loss:3.626033 +step:7112 train loss:3.534940 +step:7113 train loss:3.529569 +step:7114 train loss:3.528502 +step:7115 train loss:3.568558 +step:7116 train loss:3.516994 +step:7117 train loss:3.518827 +step:7118 train loss:3.600119 +step:7119 train loss:3.548406 +step:7120 train loss:3.514713 +step:7121 train loss:3.569299 +step:7122 train loss:3.608623 +step:7123 train loss:3.535790 +step:7124 train loss:3.569902 +step:7125 train loss:3.530444 +step:7126 train loss:3.589618 +step:7127 train loss:3.544434 +step:7128 train loss:3.549756 +step:7129 train loss:3.571910 +step:7130 train loss:3.531773 +step:7131 train loss:3.530647 +step:7132 train loss:3.533326 +step:7133 train loss:3.551481 +step:7134 train loss:3.529574 +step:7135 train loss:3.535364 +step:7136 train loss:3.524275 +step:7137 train loss:3.567942 +step:7138 train loss:3.538922 +step:7139 train loss:3.520206 +step:7140 train loss:3.504775 +step:7141 train loss:3.511927 +step:7142 train loss:3.529537 +step:7143 train loss:3.517719 +step:7144 train loss:3.505521 +step:7145 train loss:3.578256 +step:7146 train loss:3.532842 +step:7147 train loss:3.495417 +step:7148 train loss:3.495919 +step:7149 train loss:3.541076 +step:7150 train loss:3.672408 +step:7151 train loss:3.524376 +step:7152 train loss:3.476256 +step:7153 train loss:3.541673 +step:7154 train loss:3.583282 +step:7155 train loss:3.525413 +step:7156 train loss:3.564087 +step:7157 train loss:3.609223 +step:7158 train loss:3.574065 +step:7159 train loss:3.508650 +step:7160 train loss:3.557965 +step:7161 train loss:3.591063 +step:7162 train loss:3.577410 +step:7163 train loss:3.573629 +step:7164 train loss:3.541879 +step:7165 train loss:3.571490 +step:7166 train loss:3.534222 +step:7167 train loss:3.578654 +step:7168 train loss:3.579703 +step:7169 train loss:3.570201 +step:7170 train loss:3.558761 +step:7171 train loss:3.579574 +step:7172 train loss:3.594856 +step:7173 train loss:3.571958 +step:7174 train loss:3.570560 +step:7175 train loss:3.555559 +step:7176 train loss:3.549047 +step:7177 train loss:3.500709 +step:7178 train loss:3.594367 +step:7179 train loss:3.601176 +step:7180 train loss:3.570719 +step:7181 train loss:3.540953 +step:7182 train loss:3.554117 +step:7183 train loss:3.663994 +step:7184 train loss:3.590532 +step:7185 train loss:3.527471 +step:7186 train loss:3.546792 +step:7187 train loss:3.565576 +step:7188 train loss:3.578460 +step:7189 train loss:3.524569 +step:7190 train loss:3.522918 +step:7191 train loss:3.501321 +step:7192 train loss:3.561460 +step:7193 train loss:3.540297 +step:7194 train loss:3.532536 +step:7195 train loss:3.527932 +step:7196 train loss:3.532006 +step:7197 train loss:3.570193 +step:7198 train loss:3.571798 +step:7199 train loss:3.564022 +step:7200 train loss:3.602814 +step:7201 train loss:3.503123 +step:7202 train loss:3.519050 +step:7203 train loss:3.509880 +step:7204 train loss:3.516068 +step:7205 train loss:3.582527 +step:7206 train loss:3.561853 +step:7207 train loss:3.566044 +step:7208 train loss:3.503572 +step:7209 train loss:3.491781 +step:7210 train loss:3.519780 +step:7211 train loss:3.590093 +step:7212 train loss:3.613364 +step:7213 train loss:3.557463 +step:7214 train loss:3.625719 +step:7215 train loss:3.573252 +step:7216 train loss:3.597656 +step:7217 train loss:3.555933 +step:7218 train loss:3.522430 +step:7219 train loss:3.557693 +step:7220 train loss:3.559006 +step:7221 train loss:3.590829 +step:7222 train loss:3.626955 +step:7223 train loss:3.593463 +step:7224 train loss:3.553332 +step:7225 train loss:3.619484 +step:7226 train loss:3.587116 +step:7227 train loss:3.550548 +step:7228 train loss:3.568610 +step:7229 train loss:3.544426 +step:7230 train loss:3.526318 +step:7231 train loss:3.571725 +step:7232 train loss:3.540076 +step:7233 train loss:3.534354 +step:7234 train loss:3.589663 +step:7235 train loss:3.589742 +step:7236 train loss:3.509403 +step:7237 train loss:3.602409 +step:7238 train loss:3.526329 +step:7239 train loss:3.512845 +step:7240 train loss:3.534816 +step:7241 train loss:3.577002 +step:7242 train loss:3.603941 +step:7243 train loss:3.613508 +step:7244 train loss:3.588300 +step:7245 train loss:3.564005 +step:7246 train loss:3.492760 +step:7247 train loss:3.519022 +step:7248 train loss:3.479561 +step:7249 train loss:3.527043 +step:7250 validation loss:3.486009 +step:7250 train loss:3.558820 +step:7251 train loss:3.460297 +step:7252 train loss:3.496670 +step:7253 train loss:3.456667 +step:7254 train loss:3.559079 +step:7255 train loss:3.487810 +step:7256 train loss:3.523056 +step:7257 train loss:3.553707 +step:7258 train loss:3.603755 +step:7259 train loss:3.528450 +step:7260 train loss:3.485313 +step:7261 train loss:3.489075 +step:7262 train loss:3.522058 +step:7263 train loss:3.536630 +step:7264 train loss:3.513761 +step:7265 train loss:3.522867 +step:7266 train loss:3.497268 +step:7267 train loss:3.512649 +step:7268 train loss:3.485212 +step:7269 train loss:3.496994 +step:7270 train loss:3.545820 +step:7271 train loss:3.528538 +step:7272 train loss:3.622522 +step:7273 train loss:3.530893 +step:7274 train loss:3.513304 +step:7275 train loss:3.543603 +step:7276 train loss:3.480242 +step:7277 train loss:3.504805 +step:7278 train loss:3.569717 +step:7279 train loss:3.608360 +step:7280 train loss:3.574604 +step:7281 train loss:3.462173 +step:7282 train loss:3.559569 +step:7283 train loss:3.514652 +step:7284 train loss:3.525527 +step:7285 train loss:3.527153 +step:7286 train loss:3.491476 +step:7287 train loss:3.580920 +step:7288 train loss:3.514513 +step:7289 train loss:3.563452 +step:7290 train loss:3.499169 +step:7291 train loss:3.561762 +step:7292 train loss:3.548017 +step:7293 train loss:3.493969 +step:7294 train loss:3.510667 +step:7295 train loss:3.575373 +step:7296 train loss:3.502008 +step:7297 train loss:3.618479 +step:7298 train loss:3.546706 +step:7299 train loss:3.576571 +step:7300 train loss:3.546373 +step:7301 train loss:3.549510 +step:7302 train loss:3.571824 +step:7303 train loss:3.448711 +step:7304 train loss:3.505569 +step:7305 train loss:3.515874 +step:7306 train loss:3.561245 +step:7307 train loss:3.599591 +step:7308 train loss:3.490149 +step:7309 train loss:3.534895 +step:7310 train loss:3.574535 +step:7311 train loss:3.492214 +step:7312 train loss:3.621998 +step:7313 train loss:3.527050 +step:7314 train loss:3.504899 +step:7315 train loss:3.544158 +step:7316 train loss:3.513745 +step:7317 train loss:3.508375 +step:7318 train loss:3.465318 +step:7319 train loss:3.527351 +step:7320 train loss:3.542823 +step:7321 train loss:3.513952 +step:7322 train loss:3.565724 +step:7323 train loss:3.569483 +step:7324 train loss:3.563126 +step:7325 train loss:3.610270 +step:7326 train loss:3.484205 +step:7327 train loss:3.501618 +step:7328 train loss:3.503470 +step:7329 train loss:3.556126 +step:7330 train loss:3.495595 +step:7331 train loss:3.527651 +step:7332 train loss:3.563755 +step:7333 train loss:3.592470 +step:7334 train loss:3.496054 +step:7335 train loss:3.524746 +step:7336 train loss:3.502208 +step:7337 train loss:3.560156 +step:7338 train loss:3.497751 +step:7339 train loss:3.516604 +step:7340 train loss:3.536892 +step:7341 train loss:3.465327 +step:7342 train loss:3.536888 +step:7343 train loss:3.497340 +step:7344 train loss:3.555696 +step:7345 train loss:3.564781 +step:7346 train loss:3.487563 +step:7347 train loss:3.529107 +step:7348 train loss:3.589834 +step:7349 train loss:3.534330 +step:7350 train loss:3.513218 +step:7351 train loss:3.554148 +step:7352 train loss:3.568961 +step:7353 train loss:3.489808 +step:7354 train loss:3.521676 +step:7355 train loss:3.566310 +step:7356 train loss:3.444437 +step:7357 train loss:3.600443 +step:7358 train loss:3.495040 +step:7359 train loss:3.640368 +step:7360 train loss:3.565338 +step:7361 train loss:3.568595 +step:7362 train loss:3.505460 +step:7363 train loss:3.532542 +step:7364 train loss:3.474442 +step:7365 train loss:3.513632 +step:7366 train loss:3.479311 +step:7367 train loss:3.518882 +step:7368 train loss:3.538638 +step:7369 train loss:3.540504 +step:7370 train loss:3.587780 +step:7371 train loss:3.582837 +step:7372 train loss:3.544516 +step:7373 train loss:3.544366 +step:7374 train loss:3.506093 +step:7375 train loss:3.579285 +step:7376 train loss:3.547167 +step:7377 train loss:3.450896 +step:7378 train loss:3.537937 +step:7379 train loss:3.535981 +step:7380 train loss:3.480986 +step:7381 train loss:3.493811 +step:7382 train loss:3.490555 +step:7383 train loss:3.563417 +step:7384 train loss:3.547950 +step:7385 train loss:3.438652 +step:7386 train loss:3.527936 +step:7387 train loss:3.517770 +step:7388 train loss:3.533490 +step:7389 train loss:3.514888 +step:7390 train loss:3.476489 +step:7391 train loss:3.530898 +step:7392 train loss:3.542736 +step:7393 train loss:3.494655 +step:7394 train loss:3.567670 +step:7395 train loss:3.689174 +step:7396 train loss:3.522259 +step:7397 train loss:3.529886 +step:7398 train loss:3.561120 +step:7399 train loss:3.528870 +step:7400 train loss:3.529523 +step:7401 train loss:3.583015 +step:7402 train loss:3.451309 +step:7403 train loss:3.551471 +step:7404 train loss:3.535725 +step:7405 train loss:3.495109 +step:7406 train loss:3.575234 +step:7407 train loss:3.577754 +step:7408 train loss:3.460705 +step:7409 train loss:3.533601 +step:7410 train loss:3.507399 +step:7411 train loss:3.527178 +step:7412 train loss:3.531675 +step:7413 train loss:3.530088 +step:7414 train loss:3.500351 +step:7415 train loss:3.480423 +step:7416 train loss:3.603844 +step:7417 train loss:3.646623 +step:7418 train loss:3.572228 +step:7419 train loss:3.677139 +step:7420 train loss:3.544942 +step:7421 train loss:3.554909 +step:7422 train loss:3.571503 +step:7423 train loss:3.557883 +step:7424 train loss:3.627070 +step:7425 train loss:3.480311 +step:7426 train loss:3.499079 +step:7427 train loss:3.491019 +step:7428 train loss:3.510947 +step:7429 train loss:3.621515 +step:7430 train loss:3.569006 +step:7431 train loss:3.513342 +step:7432 train loss:3.586894 +step:7433 train loss:3.541973 +step:7434 train loss:3.534124 +step:7435 train loss:3.539880 +step:7436 train loss:3.518992 +step:7437 train loss:3.594134 +step:7438 train loss:3.525417 +step:7439 train loss:3.609265 +step:7440 train loss:3.606945 +step:7441 train loss:3.589938 +step:7442 train loss:3.597054 +step:7443 train loss:3.599518 +step:7444 train loss:3.578284 +step:7445 train loss:3.567131 +step:7446 train loss:3.506659 +step:7447 train loss:3.568200 +step:7448 train loss:3.553223 +step:7449 train loss:3.569996 +step:7450 train loss:3.579859 +step:7451 train loss:3.594743 +step:7452 train loss:3.583873 +step:7453 train loss:3.626886 +step:7454 train loss:3.565635 +step:7455 train loss:3.553825 +step:7456 train loss:3.623505 +step:7457 train loss:3.606270 +step:7458 train loss:3.553089 +step:7459 train loss:3.629083 +step:7460 train loss:3.594412 +step:7461 train loss:3.539873 +step:7462 train loss:3.598872 +step:7463 train loss:3.757995 +step:7464 train loss:3.689442 +step:7465 train loss:3.829192 +step:7466 train loss:3.820292 +step:7467 train loss:3.794259 +step:7468 train loss:3.942172 +step:7469 train loss:4.076751 +step:7470 train loss:3.947114 +step:7471 train loss:3.850039 +step:7472 train loss:3.887487 +step:7473 train loss:3.811176 +step:7474 train loss:3.878164 +step:7475 train loss:3.856603 +step:7476 train loss:3.823114 +step:7477 train loss:3.869174 +step:7478 train loss:3.846990 +step:7479 train loss:3.930442 +step:7480 train loss:3.922498 +step:7481 train loss:3.837268 +step:7482 train loss:3.913388 +step:7483 train loss:3.859288 +step:7484 train loss:3.894315 +step:7485 train loss:3.935185 +step:7486 train loss:3.958134 +step:7487 train loss:3.854689 +step:7488 train loss:3.803566 +step:7489 train loss:3.803970 +step:7490 train loss:3.796342 +step:7491 train loss:3.864333 +step:7492 train loss:3.818889 +step:7493 train loss:3.799364 +step:7494 train loss:3.775492 +step:7495 train loss:3.785174 +step:7496 train loss:3.768555 +step:7497 train loss:3.782190 +step:7498 train loss:3.851893 +step:7499 train loss:3.803863 +step:7500 validation loss:3.728822 total_sharp:3.6183e-01 L1_sharp:3.6466e+00 L2_sharp:1.8553e+00 L3_sharp:-8.2517e-03 L4_sharp:4.6314e-03 L5_sharp:4.2201e-03 L6_sharp:2.9110e-03 L7_sharp:1.7824e-03 L8_sharp:1.3807e-03 L9_sharp:7.9862e-04 L10_sharp:5.0823e-04 L11_sharp:5.8891e-04 L12_sharp:1.0236e-03 total_fnorm:2.0488e+00 total_l1_linf:1.6574e+04 total_spectral:2.0488e+00 L1_fnorm:3.5118e-01 L2_fnorm:1.2178e-01 L3_fnorm:2.3842e-01 L4_fnorm:3.5350e-01 L5_fnorm:4.2967e-01 L6_fnorm:4.6920e-01 L7_fnorm:5.5436e-01 L8_fnorm:5.6881e-01 L9_fnorm:5.8263e-01 L10_fnorm:5.8741e-01 L11_fnorm:5.6928e-01 L12_fnorm:6.0599e-01 L1_l1linf:6.9237e-01 L2_l1linf:1.4725e+00 L3_l1linf:8.7343e-01 L4_l1linf:9.1223e-01 L5_l1linf:8.8148e-01 L6_l1linf:7.7387e-01 L7_l1linf:6.4832e-01 L8_l1linf:5.7952e-01 L9_l1linf:6.0018e-01 L10_l1linf:6.1469e-01 L11_l1linf:6.3423e-01 L12_l1linf:6.3132e-01 L1_spectral:1.6177e-02 L2_spectral:3.0973e-02 L3_spectral:1.9647e-02 L4_spectral:2.0261e-02 L5_spectral:1.9496e-02 L6_spectral:1.7062e-02 L7_spectral:1.4423e-02 L8_spectral:1.3011e-02 L9_spectral:1.3302e-02 L10_spectral:1.3600e-02 L11_spectral:1.4051e-02 L12_spectral:1.3879e-02 v_norm:2.0488e+00 cos_v_-g_hvp:2.5292e-03 g_hvp_norm:9.2831e+00 cos_v_-g_t:1.3169e-02 g_t_norm:8.2253e+00 hv_norm:4.4074e+02 cos_v_hv:1.6820e-03 hg_norm:1.3081e+07 cos_g_hg:8.2914e-02 v_par:7.3446e-04 v_perp:2.0488e+00 L1_cos_v_neg_g:-4.5949e-04 L1_v_norm:3.5118e-01 L2_cos_v_neg_g:1.2410e-02 L2_v_norm:1.2178e-01 L3_cos_v_neg_g:3.0363e-02 L3_v_norm:2.3842e-01 L4_cos_v_neg_g:2.2269e-02 L4_v_norm:3.5350e-01 L5_cos_v_neg_g:2.6903e-02 L5_v_norm:4.2967e-01 L6_cos_v_neg_g:2.8603e-02 L6_v_norm:4.6920e-01 L7_cos_v_neg_g:2.7000e-02 L7_v_norm:5.5436e-01 L8_cos_v_neg_g:2.4491e-02 L8_v_norm:5.6881e-01 L9_cos_v_neg_g:2.7198e-02 L9_v_norm:5.8263e-01 L10_cos_v_neg_g:2.6969e-02 L10_v_norm:5.8741e-01 L11_cos_v_neg_g:3.6193e-02 L11_v_norm:5.6928e-01 L12_cos_v_neg_g:6.3062e-02 L12_v_norm:6.0599e-01 +step:7500 train loss:3.735318 +step:7501 train loss:3.772911 +step:7502 train loss:3.789272 +step:7503 train loss:3.717433 +step:7504 train loss:3.773632 +step:7505 train loss:3.862762 +step:7506 train loss:3.831201 +step:7507 train loss:3.865099 +step:7508 train loss:3.730736 +step:7509 train loss:3.751790 +step:7510 train loss:3.758664 +step:7511 train loss:3.712874 +step:7512 train loss:3.781962 +step:7513 train loss:3.728218 +step:7514 train loss:3.774102 +step:7515 train loss:3.760274 +step:7516 train loss:3.722500 +step:7517 train loss:3.747435 +step:7518 train loss:3.705912 +step:7519 train loss:3.720673 +step:7520 train loss:3.721534 +step:7521 train loss:3.724778 +step:7522 train loss:3.746840 +step:7523 train loss:3.680785 +step:7524 train loss:3.673099 +step:7525 train loss:3.705822 +step:7526 train loss:3.702190 +step:7527 train loss:3.782143 +step:7528 train loss:3.686353 +step:7529 train loss:3.645915 +step:7530 train loss:3.754364 +step:7531 train loss:3.677889 +step:7532 train loss:3.841579 +step:7533 train loss:3.721537 +step:7534 train loss:3.629472 +step:7535 train loss:3.663406 +step:7536 train loss:3.665261 +step:7537 train loss:3.752683 +step:7538 train loss:3.672153 +step:7539 train loss:3.673232 +step:7540 train loss:3.729688 +step:7541 train loss:3.639664 +step:7542 train loss:3.614636 +step:7543 train loss:3.698495 +step:7544 train loss:3.736536 +step:7545 train loss:3.685106 +step:7546 train loss:3.761713 +step:7547 train loss:3.833713 +step:7548 train loss:3.667635 +step:7549 train loss:3.648676 +step:7550 train loss:3.721100 +step:7551 train loss:3.649725 +step:7552 train loss:3.720411 +step:7553 train loss:3.717581 +step:7554 train loss:3.699003 +step:7555 train loss:3.724227 +step:7556 train loss:3.656263 +step:7557 train loss:3.681944 +step:7558 train loss:3.654379 +step:7559 train loss:3.695581 +step:7560 train loss:3.729248 +step:7561 train loss:3.706153 +step:7562 train loss:3.688503 +step:7563 train loss:3.731863 +step:7564 train loss:3.666856 +step:7565 train loss:3.785890 +step:7566 train loss:3.726392 +step:7567 train loss:3.640238 +step:7568 train loss:3.634927 +step:7569 train loss:3.621579 +step:7570 train loss:3.681138 +step:7571 train loss:3.676885 +step:7572 train loss:3.612599 +step:7573 train loss:3.798709 +step:7574 train loss:3.814771 +step:7575 train loss:3.752907 +step:7576 train loss:3.784727 +step:7577 train loss:3.769009 +step:7578 train loss:3.727003 +step:7579 train loss:3.790628 +step:7580 train loss:3.673208 +step:7581 train loss:3.732189 +step:7582 train loss:3.783170 +step:7583 train loss:3.780456 +step:7584 train loss:3.786956 +step:7585 train loss:3.673723 +step:7586 train loss:3.756505 +step:7587 train loss:3.711278 +step:7588 train loss:3.681710 +step:7589 train loss:3.789695 +step:7590 train loss:3.820310 +step:7591 train loss:3.821733 +step:7592 train loss:3.774477 +step:7593 train loss:3.738915 +step:7594 train loss:3.771892 +step:7595 train loss:3.740424 +step:7596 train loss:3.649125 +step:7597 train loss:3.703337 +step:7598 train loss:3.665593 +step:7599 train loss:3.707603 +step:7600 train loss:3.713758 +step:7601 train loss:3.713011 +step:7602 train loss:3.717089 +step:7603 train loss:3.669716 +step:7604 train loss:3.705643 +step:7605 train loss:3.715911 +step:7606 train loss:3.695607 +step:7607 train loss:3.730765 +step:7608 train loss:3.712311 +step:7609 train loss:3.657793 +step:7610 train loss:3.763768 +step:7611 train loss:3.891341 +step:7612 train loss:3.722050 +step:7613 train loss:3.773373 +step:7614 train loss:3.729270 +step:7615 train loss:3.716663 +step:7616 train loss:3.719530 +step:7617 train loss:3.733671 +step:7618 train loss:3.723215 +step:7619 train loss:3.708750 +step:7620 train loss:3.647272 +step:7621 train loss:3.670125 +step:7622 train loss:3.728763 +step:7623 train loss:3.679999 +step:7624 train loss:3.756365 +step:7625 train loss:3.737222 +step:7626 train loss:3.814952 +step:7627 train loss:3.709274 +step:7628 train loss:3.691672 +step:7629 train loss:3.736570 +step:7630 train loss:3.727418 +step:7631 train loss:3.730465 +step:7632 train loss:3.754763 +step:7633 train loss:3.673180 +step:7634 train loss:3.650329 +step:7635 train loss:3.698769 +step:7636 train loss:3.756639 +step:7637 train loss:3.769168 +step:7638 train loss:3.731541 +step:7639 train loss:3.761492 +step:7640 train loss:3.710893 +step:7641 train loss:3.803927 +step:7642 train loss:3.696504 +step:7643 train loss:3.647382 +step:7644 train loss:3.702975 +step:7645 train loss:3.716639 +step:7646 train loss:3.711801 +step:7647 train loss:3.751626 +step:7648 train loss:3.726366 +step:7649 train loss:3.683532 +step:7650 train loss:3.666440 +step:7651 train loss:3.787897 +step:7652 train loss:3.741315 +step:7653 train loss:3.697480 +step:7654 train loss:3.744432 +step:7655 train loss:3.718376 +step:7656 train loss:3.712546 +step:7657 train loss:3.689332 +step:7658 train loss:3.718795 +step:7659 train loss:3.669006 +step:7660 train loss:3.664089 +step:7661 train loss:3.668931 +step:7662 train loss:3.621218 +step:7663 train loss:3.623667 +step:7664 train loss:3.672600 +step:7665 train loss:3.655941 +step:7666 train loss:3.667896 +step:7667 train loss:3.712849 +step:7668 train loss:3.734424 +step:7669 train loss:3.800963 +step:7670 train loss:3.663014 +step:7671 train loss:3.736902 +step:7672 train loss:3.628151 +step:7673 train loss:3.672048 +step:7674 train loss:3.629081 +step:7675 train loss:3.704973 +step:7676 train loss:3.719202 +step:7677 train loss:3.630622 +step:7678 train loss:3.627184 +step:7679 train loss:3.579390 +step:7680 train loss:3.646642 +step:7681 train loss:3.600743 +step:7682 train loss:3.690100 +step:7683 train loss:3.606949 +step:7684 train loss:3.648382 +step:7685 train loss:3.611645 +step:7686 train loss:3.674830 +step:7687 train loss:3.650885 +step:7688 train loss:3.677448 +step:7689 train loss:3.653963 +step:7690 train loss:3.584760 +step:7691 train loss:3.614377 +step:7692 train loss:3.637433 +step:7693 train loss:3.651870 +step:7694 train loss:3.664916 +step:7695 train loss:3.624897 +step:7696 train loss:3.737187 +step:7697 train loss:3.608241 +step:7698 train loss:3.661329 +step:7699 train loss:3.631142 +step:7700 train loss:3.635965 +step:7701 train loss:3.655136 +step:7702 train loss:3.621684 +step:7703 train loss:3.604130 +step:7704 train loss:3.624132 +step:7705 train loss:3.655140 +step:7706 train loss:3.557116 +step:7707 train loss:3.539963 +step:7708 train loss:3.600208 +step:7709 train loss:3.636847 +step:7710 train loss:3.607012 +step:7711 train loss:3.619864 +step:7712 train loss:3.636260 +step:7713 train loss:3.651802 +step:7714 train loss:3.638427 +step:7715 train loss:3.596863 +step:7716 train loss:3.667053 +step:7717 train loss:3.645436 +step:7718 train loss:3.665524 +step:7719 train loss:3.560419 +step:7720 train loss:3.603409 +step:7721 train loss:3.613070 +step:7722 train loss:3.586746 +step:7723 train loss:3.634913 +step:7724 train loss:3.581043 +step:7725 train loss:3.627852 +step:7726 train loss:3.593878 +step:7727 train loss:3.612612 +step:7728 train loss:3.571152 +step:7729 train loss:3.563107 +step:7730 train loss:3.591445 +step:7731 train loss:3.602839 +step:7732 train loss:3.602206 +step:7733 train loss:3.593846 +step:7734 train loss:3.548558 +step:7735 train loss:3.572007 +step:7736 train loss:3.638354 +step:7737 train loss:3.611683 +step:7738 train loss:3.613529 +step:7739 train loss:3.634418 +step:7740 train loss:3.647831 +step:7741 train loss:3.583150 +step:7742 train loss:3.599095 +step:7743 train loss:3.590722 +step:7744 train loss:3.600735 +step:7745 train loss:3.583037 +step:7746 train loss:3.629342 +step:7747 train loss:3.603710 +step:7748 train loss:3.595512 +step:7749 train loss:3.533803 +step:7750 validation loss:3.574978 +step:7750 train loss:3.655324 +step:7751 train loss:3.630724 +step:7752 train loss:3.617999 +step:7753 train loss:3.655602 +step:7754 train loss:3.697271 +step:7755 train loss:3.633738 +step:7756 train loss:3.605297 +step:7757 train loss:3.609269 +step:7758 train loss:3.529738 +step:7759 train loss:3.620621 +step:7760 train loss:3.547484 +step:7761 train loss:3.624167 +step:7762 train loss:3.547161 +step:7763 train loss:3.574566 +step:7764 train loss:3.609866 +step:7765 train loss:3.564353 +step:7766 train loss:3.611215 +step:7767 train loss:3.550632 +step:7768 train loss:3.626714 +step:7769 train loss:3.570472 +step:7770 train loss:3.585528 +step:7771 train loss:3.591049 +step:7772 train loss:3.589194 +step:7773 train loss:3.552883 +step:7774 train loss:3.620831 +step:7775 train loss:3.538505 +step:7776 train loss:3.590425 +step:7777 train loss:3.568464 +step:7778 train loss:3.593574 +step:7779 train loss:3.668159 +step:7780 train loss:3.518908 +step:7781 train loss:3.579016 +step:7782 train loss:3.647396 +step:7783 train loss:3.656689 +step:7784 train loss:3.571512 +step:7785 train loss:3.600008 +step:7786 train loss:3.589301 +step:7787 train loss:3.627163 +step:7788 train loss:3.658525 +step:7789 train loss:3.573164 +step:7790 train loss:3.563632 +step:7791 train loss:3.536917 +step:7792 train loss:3.590754 +step:7793 train loss:3.623630 +step:7794 train loss:3.640984 +step:7795 train loss:3.581049 +step:7796 train loss:3.606267 +step:7797 train loss:3.513182 +step:7798 train loss:3.642543 +step:7799 train loss:3.596112 +step:7800 train loss:3.616100 +step:7801 train loss:3.549229 +step:7802 train loss:3.545087 +step:7803 train loss:3.609884 +step:7804 train loss:3.597766 +step:7805 train loss:3.613313 +step:7806 train loss:3.542228 +step:7807 train loss:3.645704 +step:7808 train loss:3.582463 +step:7809 train loss:3.651527 +step:7810 train loss:3.553917 +step:7811 train loss:3.670082 +step:7812 train loss:3.550563 +step:7813 train loss:3.586130 +step:7814 train loss:3.556070 +step:7815 train loss:3.590591 +step:7816 train loss:3.592310 +step:7817 train loss:3.630367 +step:7818 train loss:3.588612 +step:7819 train loss:3.594008 +step:7820 train loss:3.599301 +step:7821 train loss:3.590822 +step:7822 train loss:3.584040 +step:7823 train loss:3.622023 +step:7824 train loss:3.560987 +step:7825 train loss:3.618549 +step:7826 train loss:3.604770 +step:7827 train loss:3.604500 +step:7828 train loss:3.596795 +step:7829 train loss:3.653939 +step:7830 train loss:3.566366 +step:7831 train loss:3.644936 +step:7832 train loss:3.565455 +step:7833 train loss:3.512527 +step:7834 train loss:3.658490 +step:7835 train loss:3.508319 +step:7836 train loss:3.630744 +step:7837 train loss:3.603479 +step:7838 train loss:3.555067 +step:7839 train loss:3.574540 +step:7840 train loss:3.528550 +step:7841 train loss:3.703528 +step:7842 train loss:3.558039 +step:7843 train loss:3.570711 +step:7844 train loss:3.643447 +step:7845 train loss:3.610005 +step:7846 train loss:3.616737 +step:7847 train loss:3.569226 +step:7848 train loss:3.631231 +step:7849 train loss:3.555212 +step:7850 train loss:3.560133 +step:7851 train loss:3.624701 +step:7852 train loss:3.576919 +step:7853 train loss:3.594959 +step:7854 train loss:3.540514 +step:7855 train loss:3.589114 +step:7856 train loss:3.619486 +step:7857 train loss:3.559158 +step:7858 train loss:3.601562 +step:7859 train loss:3.561836 +step:7860 train loss:3.617436 +step:7861 train loss:3.602064 +step:7862 train loss:3.628128 +step:7863 train loss:3.578871 +step:7864 train loss:3.547645 +step:7865 train loss:3.584926 +step:7866 train loss:3.582853 +step:7867 train loss:3.608522 +step:7868 train loss:3.592388 +step:7869 train loss:3.578334 +step:7870 train loss:3.588497 +step:7871 train loss:3.582725 +step:7872 train loss:3.603353 +step:7873 train loss:3.589737 +step:7874 train loss:3.533938 +step:7875 train loss:3.584543 +step:7876 train loss:3.586781 +step:7877 train loss:3.605521 +step:7878 train loss:3.612910 +step:7879 train loss:3.529631 +step:7880 train loss:3.554517 +step:7881 train loss:3.578316 +step:7882 train loss:3.608374 +step:7883 train loss:3.544706 +step:7884 train loss:3.549242 +step:7885 train loss:3.566837 +step:7886 train loss:3.538168 +step:7887 train loss:3.579539 +step:7888 train loss:3.572842 +step:7889 train loss:3.587984 +step:7890 train loss:3.540406 +step:7891 train loss:3.544619 +step:7892 train loss:3.572261 +step:7893 train loss:3.559448 +step:7894 train loss:3.594515 +step:7895 train loss:3.546131 +step:7896 train loss:3.620831 +step:7897 train loss:3.586105 +step:7898 train loss:3.619875 +step:7899 train loss:3.630498 +step:7900 train loss:3.523891 +step:7901 train loss:3.572454 +step:7902 train loss:3.513660 +step:7903 train loss:3.596591 +step:7904 train loss:3.571474 +step:7905 train loss:3.535747 +step:7906 train loss:3.633272 +step:7907 train loss:3.584620 +step:7908 train loss:3.573567 +step:7909 train loss:3.764964 +step:7910 train loss:3.544321 +step:7911 train loss:3.640698 +step:7912 train loss:3.558556 +step:7913 train loss:3.567106 +step:7914 train loss:3.614686 +step:7915 train loss:3.537972 +step:7916 train loss:3.628079 +step:7917 train loss:3.547135 +step:7918 train loss:3.580082 +step:7919 train loss:3.541104 +step:7920 train loss:3.634478 +step:7921 train loss:3.547861 +step:7922 train loss:3.460791 +step:7923 train loss:3.585289 +step:7924 train loss:3.619267 +step:7925 train loss:3.615040 +step:7926 train loss:3.564786 +step:7927 train loss:3.528263 +step:7928 train loss:3.574657 +step:7929 train loss:3.537191 +step:7930 train loss:3.579790 +step:7931 train loss:3.608315 +step:7932 train loss:3.554731 +step:7933 train loss:3.579397 +step:7934 train loss:3.551551 +step:7935 train loss:3.597464 +step:7936 train loss:3.563699 +step:7937 train loss:3.569102 +step:7938 train loss:3.596854 +step:7939 train loss:3.543350 +step:7940 train loss:3.609750 +step:7941 train loss:3.504491 +step:7942 train loss:3.558729 +step:7943 train loss:3.578537 +step:7944 train loss:3.538337 +step:7945 train loss:3.583995 +step:7946 train loss:3.531027 +step:7947 train loss:3.598061 +step:7948 train loss:3.537808 +step:7949 train loss:3.531988 +step:7950 train loss:3.621290 +step:7951 train loss:3.507842 +step:7952 train loss:3.585186 +step:7953 train loss:3.605763 +step:7954 train loss:3.659479 +step:7955 train loss:3.560285 +step:7956 train loss:3.623677 +step:7957 train loss:3.588832 +step:7958 train loss:3.517475 +step:7959 train loss:3.553229 +step:7960 train loss:3.619678 +step:7961 train loss:3.522730 +step:7962 train loss:3.553701 +step:7963 train loss:3.496256 +step:7964 train loss:3.581173 +step:7965 train loss:3.568006 +step:7966 train loss:3.541459 +step:7967 train loss:3.606597 +step:7968 train loss:3.518150 +step:7969 train loss:3.583800 +step:7970 train loss:3.552730 +step:7971 train loss:3.546145 +step:7972 train loss:3.640570 +step:7973 train loss:3.544844 +step:7974 train loss:3.604333 +step:7975 train loss:3.549741 +step:7976 train loss:3.542461 +step:7977 train loss:3.562032 +step:7978 train loss:3.564557 +step:7979 train loss:3.635061 +step:7980 train loss:3.534736 +step:7981 train loss:3.539596 +step:7982 train loss:3.542677 +step:7983 train loss:3.588309 +step:7984 train loss:3.601372 +step:7985 train loss:3.520367 +step:7986 train loss:3.576664 +step:7987 train loss:3.593085 +step:7988 train loss:3.530218 +step:7989 train loss:3.591318 +step:7990 train loss:3.542758 +step:7991 train loss:3.590433 +step:7992 train loss:3.566966 +step:7993 train loss:3.521145 +step:7994 train loss:3.569599 +step:7995 train loss:3.559719 +step:7996 train loss:3.662335 +step:7997 train loss:3.552056 +step:7998 train loss:3.495610 +step:7999 train loss:3.589972 +step:8000 validation loss:3.531259 total_sharp:2.1163e-02 L1_sharp:2.2478e-01 L2_sharp:3.7804e-02 L3_sharp:1.2062e-02 L4_sharp:4.1148e-03 L5_sharp:2.1889e-03 L6_sharp:2.0860e-03 L7_sharp:1.4033e-03 L8_sharp:1.5118e-03 L9_sharp:9.4793e-04 L10_sharp:5.4321e-04 L11_sharp:5.6963e-04 L12_sharp:1.1764e-03 total_fnorm:2.1362e+00 total_l1_linf:1.7604e+04 total_spectral:2.1362e+00 L1_fnorm:3.7409e-01 L2_fnorm:2.2250e-01 L3_fnorm:2.6193e-01 L4_fnorm:3.8485e-01 L5_fnorm:4.7147e-01 L6_fnorm:5.1572e-01 L7_fnorm:5.8142e-01 L8_fnorm:5.8744e-01 L9_fnorm:5.9057e-01 L10_fnorm:5.9218e-01 L11_fnorm:5.7008e-01 L12_fnorm:6.0183e-01 L1_l1linf:4.4680e-01 L2_l1linf:4.5100e-01 L3_l1linf:4.2059e-01 L4_l1linf:4.8865e-01 L5_l1linf:4.4572e-01 L6_l1linf:4.2828e-01 L7_l1linf:3.9674e-01 L8_l1linf:3.9599e-01 L9_l1linf:4.0837e-01 L10_l1linf:4.2373e-01 L11_l1linf:4.6290e-01 L12_l1linf:4.6223e-01 L1_spectral:1.2034e-02 L2_spectral:1.2102e-02 L3_spectral:1.3981e-02 L4_spectral:1.9414e-02 L5_spectral:1.3708e-02 L6_spectral:1.2039e-02 L7_spectral:1.2048e-02 L8_spectral:1.2046e-02 L9_spectral:1.2046e-02 L10_spectral:1.2053e-02 L11_spectral:1.2044e-02 L12_spectral:1.2047e-02 v_norm:2.1362e+00 cos_v_-g_hvp:1.7792e-02 g_hvp_norm:1.1744e+00 cos_v_-g_t:2.0222e-02 g_t_norm:2.4564e+00 hv_norm:2.9622e+01 cos_v_hv:1.5262e-03 hg_norm:3.0411e+04 cos_g_hg:4.5415e-02 v_par:3.0392e-03 v_perp:2.1362e+00 L1_cos_v_neg_g:2.4494e-02 L1_v_norm:3.7409e-01 L2_cos_v_neg_g:3.0696e-02 L2_v_norm:2.2250e-01 L3_cos_v_neg_g:1.9967e-02 L3_v_norm:2.6193e-01 L4_cos_v_neg_g:1.7642e-02 L4_v_norm:3.8485e-01 L5_cos_v_neg_g:2.2033e-02 L5_v_norm:4.7147e-01 L6_cos_v_neg_g:2.4310e-02 L6_v_norm:5.1572e-01 L7_cos_v_neg_g:2.3942e-02 L7_v_norm:5.8142e-01 L8_cos_v_neg_g:2.3862e-02 L8_v_norm:5.8744e-01 L9_cos_v_neg_g:2.5541e-02 L9_v_norm:5.9057e-01 L10_cos_v_neg_g:2.6533e-02 L10_v_norm:5.9218e-01 L11_cos_v_neg_g:3.4875e-02 L11_v_norm:5.7008e-01 L12_cos_v_neg_g:6.7747e-02 L12_v_norm:6.0183e-01 +step:8000 train loss:3.515430 +step:8001 train loss:3.529218 +step:8002 train loss:3.686599 +step:8003 train loss:3.573942 +step:8004 train loss:3.552986 +step:8005 train loss:3.580213 +step:8006 train loss:3.528464 +step:8007 train loss:3.523154 +step:8008 train loss:3.491256 +step:8009 train loss:3.634396 +step:8010 train loss:3.569013 +step:8011 train loss:3.616258 +step:8012 train loss:3.745452 +step:8013 train loss:3.634599 +step:8014 train loss:3.563227 +step:8015 train loss:3.536829 +step:8016 train loss:3.577849 +step:8017 train loss:3.575367 +step:8018 train loss:3.615581 +step:8019 train loss:3.592804 +step:8020 train loss:3.594445 +step:8021 train loss:3.536134 +step:8022 train loss:3.589539 +step:8023 train loss:3.662005 +step:8024 train loss:3.598794 +step:8025 train loss:3.578942 +step:8026 train loss:3.640197 +step:8027 train loss:3.590058 +step:8028 train loss:3.570949 +step:8029 train loss:3.609008 +step:8030 train loss:3.579769 +step:8031 train loss:3.595062 +step:8032 train loss:3.586593 +step:8033 train loss:3.622558 +step:8034 train loss:3.519041 +step:8035 train loss:3.589413 +step:8036 train loss:3.556510 +step:8037 train loss:3.584844 +step:8038 train loss:3.529136 +step:8039 train loss:3.481530 +step:8040 train loss:3.589173 +step:8041 train loss:3.548180 +step:8042 train loss:3.560233 +step:8043 train loss:3.633790 +step:8044 train loss:3.564474 +step:8045 train loss:3.628535 +step:8046 train loss:3.616811 +step:8047 train loss:3.597810 +step:8048 train loss:3.636024 +step:8049 train loss:3.558831 +step:8050 train loss:3.605241 +step:8051 train loss:3.599210 +step:8052 train loss:3.601694 +step:8053 train loss:3.533590 +step:8054 train loss:3.565174 +step:8055 train loss:3.566622 +step:8056 train loss:3.574118 +step:8057 train loss:3.621807 +step:8058 train loss:3.625135 +step:8059 train loss:3.593399 +step:8060 train loss:3.609665 +step:8061 train loss:3.577173 +step:8062 train loss:3.572633 +step:8063 train loss:3.526028 +step:8064 train loss:3.604951 +step:8065 train loss:3.558997 +step:8066 train loss:3.551434 +step:8067 train loss:3.536845 +step:8068 train loss:3.603921 +step:8069 train loss:3.626283 +step:8070 train loss:3.544125 +step:8071 train loss:3.555418 +step:8072 train loss:3.543778 +step:8073 train loss:3.582787 +step:8074 train loss:3.565877 +step:8075 train loss:3.566528 +step:8076 train loss:3.505415 +step:8077 train loss:3.582885 +step:8078 train loss:3.521456 +step:8079 train loss:3.575284 +step:8080 train loss:3.619030 +step:8081 train loss:3.659601 +step:8082 train loss:3.777999 +step:8083 train loss:3.568009 +step:8084 train loss:3.587251 +step:8085 train loss:3.670082 +step:8086 train loss:3.531787 +step:8087 train loss:3.550111 +step:8088 train loss:3.616184 +step:8089 train loss:3.583502 +step:8090 train loss:3.548494 +step:8091 train loss:3.576625 +step:8092 train loss:3.609039 +step:8093 train loss:3.591556 +step:8094 train loss:3.516441 +step:8095 train loss:3.590867 +step:8096 train loss:3.535733 +step:8097 train loss:3.553203 +step:8098 train loss:3.586387 +step:8099 train loss:3.550458 +step:8100 train loss:3.593516 +step:8101 train loss:3.563172 +step:8102 train loss:3.620281 +step:8103 train loss:3.577549 +step:8104 train loss:3.538908 +step:8105 train loss:3.560725 +step:8106 train loss:3.634725 +step:8107 train loss:3.547362 +step:8108 train loss:3.528910 +step:8109 train loss:3.612939 +step:8110 train loss:3.500373 +step:8111 train loss:3.568662 +step:8112 train loss:3.548928 +step:8113 train loss:3.554358 +step:8114 train loss:3.536225 +step:8115 train loss:3.589568 +step:8116 train loss:3.563698 +step:8117 train loss:3.539646 +step:8118 train loss:3.519771 +step:8119 train loss:3.542065 +step:8120 train loss:3.598082 +step:8121 train loss:3.552198 +step:8122 train loss:3.624982 +step:8123 train loss:3.598376 +step:8124 train loss:3.584898 +step:8125 train loss:3.612492 +step:8126 train loss:3.543204 +step:8127 train loss:3.560097 +step:8128 train loss:3.559929 +step:8129 train loss:3.606386 +step:8130 train loss:3.607857 +step:8131 train loss:3.617874 +step:8132 train loss:3.579350 +step:8133 train loss:3.548346 +step:8134 train loss:3.594597 +step:8135 train loss:3.541220 +step:8136 train loss:3.590548 +step:8137 train loss:3.591956 +step:8138 train loss:3.550113 +step:8139 train loss:3.555237 +step:8140 train loss:3.509538 +step:8141 train loss:3.592830 +step:8142 train loss:3.571539 +step:8143 train loss:3.541585 +step:8144 train loss:3.565420 +step:8145 train loss:3.552639 +step:8146 train loss:3.526709 +step:8147 train loss:3.544428 +step:8148 train loss:3.585341 +step:8149 train loss:3.538987 +step:8150 train loss:3.498471 +step:8151 train loss:3.532391 +step:8152 train loss:3.583689 +step:8153 train loss:3.588350 +step:8154 train loss:3.484554 +step:8155 train loss:3.510014 +step:8156 train loss:3.619071 +step:8157 train loss:3.547009 +step:8158 train loss:3.586618 +step:8159 train loss:3.579735 +step:8160 train loss:3.556932 +step:8161 train loss:3.574020 +step:8162 train loss:3.577377 +step:8163 train loss:3.588294 +step:8164 train loss:3.508993 +step:8165 train loss:3.594809 +step:8166 train loss:3.541583 +step:8167 train loss:3.626292 +step:8168 train loss:3.547850 +step:8169 train loss:3.555020 +step:8170 train loss:3.610291 +step:8171 train loss:3.515907 +step:8172 train loss:3.538213 +step:8173 train loss:3.507205 +step:8174 train loss:3.550120 +step:8175 train loss:3.527867 +step:8176 train loss:3.551861 +step:8177 train loss:3.584513 +step:8178 train loss:3.554152 +step:8179 train loss:3.581832 +step:8180 train loss:3.568852 +step:8181 train loss:3.536653 +step:8182 train loss:3.495446 +step:8183 train loss:3.586174 +step:8184 train loss:3.591326 +step:8185 train loss:3.515912 +step:8186 train loss:3.540308 +step:8187 train loss:3.592498 +step:8188 train loss:3.569662 +step:8189 train loss:3.585031 +step:8190 train loss:3.568045 +step:8191 train loss:3.558257 +step:8192 train loss:3.596775 +step:8193 train loss:3.582166 +step:8194 train loss:3.567898 +step:8195 train loss:3.548701 +step:8196 train loss:3.540613 +step:8197 train loss:3.507878 +step:8198 train loss:3.580789 +step:8199 train loss:3.547109 +step:8200 train loss:3.503179 +step:8201 train loss:3.566650 +step:8202 train loss:3.525306 +step:8203 train loss:3.585260 +step:8204 train loss:3.602139 +step:8205 train loss:3.628749 +step:8206 train loss:3.567568 +step:8207 train loss:3.607343 +step:8208 train loss:3.566907 +step:8209 train loss:3.570517 +step:8210 train loss:3.552822 +step:8211 train loss:3.540531 +step:8212 train loss:3.592439 +step:8213 train loss:3.557325 +step:8214 train loss:3.544451 +step:8215 train loss:3.549881 +step:8216 train loss:3.577136 +step:8217 train loss:3.588848 +step:8218 train loss:3.561185 +step:8219 train loss:3.533705 +step:8220 train loss:3.543274 +step:8221 train loss:3.565336 +step:8222 train loss:3.545306 +step:8223 train loss:3.591048 +step:8224 train loss:3.553084 +step:8225 train loss:3.580769 +step:8226 train loss:3.570158 +step:8227 train loss:3.613774 +step:8228 train loss:3.554685 +step:8229 train loss:3.542935 +step:8230 train loss:3.518900 +step:8231 train loss:3.538485 +step:8232 train loss:3.534581 +step:8233 train loss:3.611635 +step:8234 train loss:3.569412 +step:8235 train loss:3.576373 +step:8236 train loss:3.607450 +step:8237 train loss:3.580843 +step:8238 train loss:3.599484 +step:8239 train loss:3.534256 +step:8240 train loss:3.541007 +step:8241 train loss:3.576793 +step:8242 train loss:3.562488 +step:8243 train loss:3.489864 +step:8244 train loss:3.554589 +step:8245 train loss:3.608377 +step:8246 train loss:3.573936 +step:8247 train loss:3.508455 +step:8248 train loss:3.542172 +step:8249 train loss:3.586213 +step:8250 validation loss:3.483324 +step:8250 train loss:3.546030 +step:8251 train loss:3.510188 +step:8252 train loss:3.576604 +step:8253 train loss:3.534202 +step:8254 train loss:3.599210 +step:8255 train loss:3.571517 +step:8256 train loss:3.571824 +step:8257 train loss:3.594552 +step:8258 train loss:3.551812 +step:8259 train loss:3.648580 +step:8260 train loss:3.587068 +step:8261 train loss:3.545733 +step:8262 train loss:3.596212 +step:8263 train loss:3.599703 +step:8264 train loss:3.550012 +step:8265 train loss:3.571536 +step:8266 train loss:3.615460 +step:8267 train loss:3.624358 +step:8268 train loss:3.595224 +step:8269 train loss:3.563707 +step:8270 train loss:3.560371 +step:8271 train loss:3.607374 +step:8272 train loss:3.556838 +step:8273 train loss:3.584831 +step:8274 train loss:3.589496 +step:8275 train loss:3.510278 +step:8276 train loss:3.510509 +step:8277 train loss:3.572141 +step:8278 train loss:3.575032 +step:8279 train loss:3.546235 +step:8280 train loss:3.588372 +step:8281 train loss:3.649184 +step:8282 train loss:3.548733 +step:8283 train loss:3.552247 +step:8284 train loss:3.522378 +step:8285 train loss:3.500844 +step:8286 train loss:3.566365 +step:8287 train loss:3.533791 +step:8288 train loss:3.558506 +step:8289 train loss:3.628145 +step:8290 train loss:3.522576 +step:8291 train loss:3.541537 +step:8292 train loss:3.557345 +step:8293 train loss:3.556986 +step:8294 train loss:3.568678 +step:8295 train loss:3.611418 +step:8296 train loss:3.535042 +step:8297 train loss:3.548958 +step:8298 train loss:3.545001 +step:8299 train loss:3.599720 +step:8300 train loss:3.543503 +step:8301 train loss:3.624853 +step:8302 train loss:3.516022 +step:8303 train loss:3.555433 +step:8304 train loss:3.475636 +step:8305 train loss:3.576690 +step:8306 train loss:3.514243 +step:8307 train loss:3.549003 +step:8308 train loss:3.538018 +step:8309 train loss:3.562986 +step:8310 train loss:3.598554 +step:8311 train loss:3.541019 +step:8312 train loss:3.572611 +step:8313 train loss:3.575309 +step:8314 train loss:3.556679 +step:8315 train loss:3.497042 +step:8316 train loss:3.587737 +step:8317 train loss:3.606647 +step:8318 train loss:3.572894 +step:8319 train loss:3.576642 +step:8320 train loss:3.574948 +step:8321 train loss:3.532655 +step:8322 train loss:3.574108 +step:8323 train loss:3.545790 +step:8324 train loss:3.567029 +step:8325 train loss:3.550959 +step:8326 train loss:3.521815 +step:8327 train loss:3.581630 +step:8328 train loss:3.576296 +step:8329 train loss:3.550945 +step:8330 train loss:3.635677 +step:8331 train loss:3.578532 +step:8332 train loss:3.554403 +step:8333 train loss:3.480153 +step:8334 train loss:3.557475 +step:8335 train loss:3.521726 +step:8336 train loss:3.501080 +step:8337 train loss:3.545043 +step:8338 train loss:3.574713 +step:8339 train loss:3.573174 +step:8340 train loss:3.490864 +step:8341 train loss:3.542930 +step:8342 train loss:3.517192 +step:8343 train loss:3.578505 +step:8344 train loss:3.528831 +step:8345 train loss:3.629098 +step:8346 train loss:3.547375 +step:8347 train loss:3.576621 +step:8348 train loss:3.552467 +step:8349 train loss:3.590860 +step:8350 train loss:3.537369 +step:8351 train loss:3.595210 +step:8352 train loss:3.595352 +step:8353 train loss:3.557545 +step:8354 train loss:3.563353 +step:8355 train loss:3.567130 +step:8356 train loss:3.524142 +step:8357 train loss:3.589441 +step:8358 train loss:3.620921 +step:8359 train loss:3.601670 +step:8360 train loss:3.584832 +step:8361 train loss:3.598803 +step:8362 train loss:3.603161 +step:8363 train loss:3.551829 +step:8364 train loss:3.552392 +step:8365 train loss:3.554773 +step:8366 train loss:3.505490 +step:8367 train loss:3.562519 +step:8368 train loss:3.590401 +step:8369 train loss:3.530113 +step:8370 train loss:3.622414 +step:8371 train loss:3.516753 +step:8372 train loss:3.549152 +step:8373 train loss:3.552250 +step:8374 train loss:3.566151 +step:8375 train loss:3.527438 +step:8376 train loss:3.505336 +step:8377 train loss:3.553223 +step:8378 train loss:3.484738 +step:8379 train loss:3.551267 +step:8380 train loss:3.504400 +step:8381 train loss:3.547326 +step:8382 train loss:3.531255 +step:8383 train loss:3.513362 +step:8384 train loss:3.516764 +step:8385 train loss:3.554695 +step:8386 train loss:3.564569 +step:8387 train loss:3.524554 +step:8388 train loss:3.555980 +step:8389 train loss:3.658104 +step:8390 train loss:3.526537 +step:8391 train loss:3.538044 +step:8392 train loss:3.529167 +step:8393 train loss:3.546535 +step:8394 train loss:3.601214 +step:8395 train loss:3.573559 +step:8396 train loss:3.585970 +step:8397 train loss:3.526414 +step:8398 train loss:3.542292 +step:8399 train loss:3.530010 +step:8400 train loss:3.534073 +step:8401 train loss:3.558742 +step:8402 train loss:3.567917 +step:8403 train loss:3.553210 +step:8404 train loss:3.553079 +step:8405 train loss:3.602626 +step:8406 train loss:3.550927 +step:8407 train loss:3.520949 +step:8408 train loss:3.591307 +step:8409 train loss:3.542965 +step:8410 train loss:3.501678 +step:8411 train loss:3.529425 +step:8412 train loss:3.590558 +step:8413 train loss:3.498863 +step:8414 train loss:3.569997 +step:8415 train loss:3.539141 +step:8416 train loss:3.510814 +step:8417 train loss:3.516353 +step:8418 train loss:3.521739 +step:8419 train loss:3.553332 +step:8420 train loss:3.515815 +step:8421 train loss:3.529466 +step:8422 train loss:3.545079 +step:8423 train loss:3.549763 +step:8424 train loss:3.571559 +step:8425 train loss:3.504994 +step:8426 train loss:3.519474 +step:8427 train loss:3.552912 +step:8428 train loss:3.595633 +step:8429 train loss:3.565685 +step:8430 train loss:3.582208 +step:8431 train loss:3.559433 +step:8432 train loss:3.482736 +step:8433 train loss:3.522053 +step:8434 train loss:3.521118 +step:8435 train loss:3.540752 +step:8436 train loss:3.529966 +step:8437 train loss:3.507902 +step:8438 train loss:3.513291 +step:8439 train loss:3.569541 +step:8440 train loss:3.483159 +step:8441 train loss:3.539327 +step:8442 train loss:3.550074 +step:8443 train loss:3.571530 +step:8444 train loss:3.564116 +step:8445 train loss:3.551129 +step:8446 train loss:3.530399 +step:8447 train loss:3.573559 +step:8448 train loss:3.588126 +step:8449 train loss:3.565936 +step:8450 train loss:3.521896 +step:8451 train loss:3.545894 +step:8452 train loss:3.557537 +step:8453 train loss:3.466756 +step:8454 train loss:3.592872 +step:8455 train loss:3.550255 +step:8456 train loss:3.564697 +step:8457 train loss:3.478237 +step:8458 train loss:3.543210 +step:8459 train loss:3.535228 +step:8460 train loss:3.557897 +step:8461 train loss:3.533785 +step:8462 train loss:3.542074 +step:8463 train loss:3.553936 +step:8464 train loss:3.524255 +step:8465 train loss:3.601405 +step:8466 train loss:3.527388 +step:8467 train loss:3.499001 +step:8468 train loss:3.545177 +step:8469 train loss:3.542754 +step:8470 train loss:3.520365 +step:8471 train loss:3.526400 +step:8472 train loss:3.594945 +step:8473 train loss:3.520133 +step:8474 train loss:3.507558 +step:8475 train loss:3.544729 +step:8476 train loss:3.606688 +step:8477 train loss:3.520618 +step:8478 train loss:3.507948 +step:8479 train loss:3.490186 +step:8480 train loss:3.512712 +step:8481 train loss:3.530666 +step:8482 train loss:3.523143 +step:8483 train loss:3.560432 +step:8484 train loss:3.517362 +step:8485 train loss:3.570912 +step:8486 train loss:3.520645 +step:8487 train loss:3.496152 +step:8488 train loss:3.518612 +step:8489 train loss:3.590798 +step:8490 train loss:3.513155 +step:8491 train loss:3.486368 +step:8492 train loss:3.555684 +step:8493 train loss:3.537493 +step:8494 train loss:3.579627 +step:8495 train loss:3.613021 +step:8496 train loss:3.553504 +step:8497 train loss:3.503594 +step:8498 train loss:3.510396 +step:8499 train loss:3.537492 +step:8500 validation loss:3.470108 total_sharp:-6.6594e-03 L1_sharp:-4.9323e-03 L2_sharp:1.5236e-02 L3_sharp:1.1150e-02 L4_sharp:2.6617e-03 L5_sharp:2.2441e-03 L6_sharp:2.2582e-03 L7_sharp:1.4657e-03 L8_sharp:1.5175e-03 L9_sharp:9.3573e-04 L10_sharp:5.1021e-04 L11_sharp:6.2577e-04 L12_sharp:7.3653e-04 total_fnorm:1.6283e+00 total_l1_linf:1.3494e+04 total_spectral:1.6283e+00 L1_fnorm:2.8011e-01 L2_fnorm:1.9140e-01 L3_fnorm:2.1352e-01 L4_fnorm:2.9493e-01 L5_fnorm:3.6626e-01 L6_fnorm:3.8605e-01 L7_fnorm:4.3922e-01 L8_fnorm:4.4197e-01 L9_fnorm:4.4397e-01 L10_fnorm:4.4601e-01 L11_fnorm:4.2891e-01 L12_fnorm:4.5022e-01 L1_l1linf:2.8452e-01 L2_l1linf:3.4010e-01 L3_l1linf:3.2456e-01 L4_l1linf:4.1841e-01 L5_l1linf:3.4065e-01 L6_l1linf:3.4916e-01 L7_l1linf:2.9846e-01 L8_l1linf:2.9895e-01 L9_l1linf:3.0849e-01 L10_l1linf:3.3307e-01 L11_l1linf:3.6534e-01 L12_l1linf:3.6530e-01 L1_spectral:9.0316e-03 L2_spectral:9.0265e-03 L3_spectral:9.0293e-03 L4_spectral:1.5646e-02 L5_spectral:1.1363e-02 L6_spectral:9.0342e-03 L7_spectral:9.0392e-03 L8_spectral:9.0437e-03 L9_spectral:9.0391e-03 L10_spectral:9.0437e-03 L11_spectral:9.0412e-03 L12_spectral:9.0386e-03 v_norm:1.6283e+00 cos_v_-g_hvp:2.1838e-02 g_hvp_norm:8.3394e-01 cos_v_-g_t:2.7433e-02 g_t_norm:1.1950e+00 hv_norm:2.2228e+01 cos_v_hv:-4.8782e-04 hg_norm:8.0800e+04 cos_g_hg:-2.1146e-01 v_par:2.8623e-03 v_perp:1.6282e+00 L1_cos_v_neg_g:2.4587e-02 L1_v_norm:2.8011e-01 L2_cos_v_neg_g:3.2339e-02 L2_v_norm:1.9140e-01 L3_cos_v_neg_g:2.2009e-02 L3_v_norm:2.1352e-01 L4_cos_v_neg_g:2.0157e-02 L4_v_norm:2.9493e-01 L5_cos_v_neg_g:2.1265e-02 L5_v_norm:3.6626e-01 L6_cos_v_neg_g:2.5428e-02 L6_v_norm:3.8605e-01 L7_cos_v_neg_g:2.4205e-02 L7_v_norm:4.3922e-01 L8_cos_v_neg_g:2.3809e-02 L8_v_norm:4.4197e-01 L9_cos_v_neg_g:2.5403e-02 L9_v_norm:4.4397e-01 L10_cos_v_neg_g:2.5596e-02 L10_v_norm:4.4601e-01 L11_cos_v_neg_g:3.6362e-02 L11_v_norm:4.2891e-01 L12_cos_v_neg_g:6.0812e-02 L12_v_norm:4.5022e-01 +step:8500 train loss:3.470217 +step:8501 train loss:3.528736 +step:8502 train loss:3.528489 +step:8503 train loss:3.499040 +step:8504 train loss:3.523614 +step:8505 train loss:3.553441 +step:8506 train loss:3.538453 +step:8507 train loss:3.506709 +step:8508 train loss:3.516803 +step:8509 train loss:3.578326 +step:8510 train loss:3.528086 +step:8511 train loss:3.507792 +step:8512 train loss:3.504061 +step:8513 train loss:3.547944 +step:8514 train loss:3.515871 +step:8515 train loss:3.483787 +step:8516 train loss:3.525552 +step:8517 train loss:3.511337 +step:8518 train loss:3.547112 +step:8519 train loss:3.556055 +step:8520 train loss:3.509125 +step:8521 train loss:3.490551 +step:8522 train loss:3.509883 +step:8523 train loss:3.541718 +step:8524 train loss:3.580065 +step:8525 train loss:3.537135 +step:8526 train loss:3.494261 +step:8527 train loss:3.553419 +step:8528 train loss:3.553665 +step:8529 train loss:3.520121 +step:8530 train loss:3.523113 +step:8531 train loss:3.535040 +step:8532 train loss:3.555421 +step:8533 train loss:3.620899 +step:8534 train loss:3.565217 +step:8535 train loss:3.593958 +step:8536 train loss:3.488510 +step:8537 train loss:3.528818 +step:8538 train loss:3.483850 +step:8539 train loss:3.590703 +step:8540 train loss:3.551364 +step:8541 train loss:3.507697 +step:8542 train loss:3.584496 +step:8543 train loss:3.505117 +step:8544 train loss:3.592482 +step:8545 train loss:3.489526 +step:8546 train loss:3.574376 +step:8547 train loss:3.514600 +step:8548 train loss:3.551869 +step:8549 train loss:3.503706 +step:8550 train loss:3.562684 +step:8551 train loss:3.564721 +step:8552 train loss:3.506633 +step:8553 train loss:3.535270 +step:8554 train loss:3.527013 +step:8555 train loss:3.622905 +step:8556 train loss:3.558457 +step:8557 train loss:3.536731 +step:8558 train loss:3.496175 +step:8559 train loss:3.515976 +step:8560 train loss:3.579200 +step:8561 train loss:3.509822 +step:8562 train loss:3.538777 +step:8563 train loss:3.516300 +step:8564 train loss:3.617594 +step:8565 train loss:3.507861 +step:8566 train loss:3.524259 +step:8567 train loss:3.529718 +step:8568 train loss:3.520158 +step:8569 train loss:3.538064 +step:8570 train loss:3.596177 +step:8571 train loss:3.538645 +step:8572 train loss:3.488822 +step:8573 train loss:3.507334 +step:8574 train loss:3.509947 +step:8575 train loss:3.546016 +step:8576 train loss:3.575773 +step:8577 train loss:3.550911 +step:8578 train loss:3.549469 +step:8579 train loss:3.608427 +step:8580 train loss:3.582879 +step:8581 train loss:3.524652 +step:8582 train loss:3.492771 +step:8583 train loss:3.535632 +step:8584 train loss:3.562896 +step:8585 train loss:3.544607 +step:8586 train loss:3.467688 +step:8587 train loss:3.545735 +step:8588 train loss:3.567381 +step:8589 train loss:3.485089 +step:8590 train loss:3.524508 +step:8591 train loss:3.550240 +step:8592 train loss:3.556254 +step:8593 train loss:3.567819 +step:8594 train loss:3.507497 +step:8595 train loss:3.517017 +step:8596 train loss:3.528105 +step:8597 train loss:3.581948 +step:8598 train loss:3.553531 +step:8599 train loss:3.574551 +step:8600 train loss:3.546875 +step:8601 train loss:3.572186 +step:8602 train loss:3.522123 +step:8603 train loss:3.485242 +step:8604 train loss:3.520942 +step:8605 train loss:3.490579 +step:8606 train loss:3.624779 +step:8607 train loss:3.528972 +step:8608 train loss:3.493255 +step:8609 train loss:3.568358 +step:8610 train loss:3.503192 +step:8611 train loss:3.517983 +step:8612 train loss:3.526637 +step:8613 train loss:3.538059 +step:8614 train loss:3.589820 +step:8615 train loss:3.564555 +step:8616 train loss:3.558966 +step:8617 train loss:3.490391 +step:8618 train loss:3.531987 +step:8619 train loss:3.505420 +step:8620 train loss:3.512043 +step:8621 train loss:3.533113 +step:8622 train loss:3.526763 +step:8623 train loss:3.455689 +step:8624 train loss:3.474156 +step:8625 train loss:3.546346 +step:8626 train loss:3.550552 +step:8627 train loss:3.523264 +step:8628 train loss:3.479350 +step:8629 train loss:3.550259 +step:8630 train loss:3.560971 +step:8631 train loss:3.566238 +step:8632 train loss:3.641074 +step:8633 train loss:3.570653 +step:8634 train loss:3.528340 +step:8635 train loss:3.528326 +step:8636 train loss:3.527736 +step:8637 train loss:3.584332 +step:8638 train loss:3.564977 +step:8639 train loss:3.453372 +step:8640 train loss:3.533403 +step:8641 train loss:3.502104 +step:8642 train loss:3.539540 +step:8643 train loss:3.535159 +step:8644 train loss:3.522115 +step:8645 train loss:3.546054 +step:8646 train loss:3.501378 +step:8647 train loss:3.481401 +step:8648 train loss:3.527460 +step:8649 train loss:3.483695 +step:8650 train loss:3.525075 +step:8651 train loss:3.550622 +step:8652 train loss:3.558747 +step:8653 train loss:3.515326 +step:8654 train loss:3.523490 +step:8655 train loss:3.554978 +step:8656 train loss:3.562921 +step:8657 train loss:3.542454 +step:8658 train loss:3.603834 +step:8659 train loss:3.564756 +step:8660 train loss:3.524187 +step:8661 train loss:3.489721 +step:8662 train loss:3.477885 +step:8663 train loss:3.481843 +step:8664 train loss:3.547492 +step:8665 train loss:3.497619 +step:8666 train loss:3.531884 +step:8667 train loss:3.538568 +step:8668 train loss:3.513091 +step:8669 train loss:3.556591 +step:8670 train loss:3.553659 +step:8671 train loss:3.597422 +step:8672 train loss:3.590874 +step:8673 train loss:3.519664 +step:8674 train loss:3.488261 +step:8675 train loss:3.505538 +step:8676 train loss:3.507719 +step:8677 train loss:3.534900 +step:8678 train loss:3.560647 +step:8679 train loss:3.566249 +step:8680 train loss:3.516187 +step:8681 train loss:3.498569 +step:8682 train loss:3.517178 +step:8683 train loss:3.600794 +step:8684 train loss:3.559721 +step:8685 train loss:3.509062 +step:8686 train loss:3.505038 +step:8687 train loss:3.550042 +step:8688 train loss:3.510252 +step:8689 train loss:3.488843 +step:8690 train loss:3.523069 +step:8691 train loss:3.542646 +step:8692 train loss:3.461326 +step:8693 train loss:3.596060 +step:8694 train loss:3.581193 +step:8695 train loss:3.507753 +step:8696 train loss:3.567172 +step:8697 train loss:3.500673 +step:8698 train loss:3.545896 +step:8699 train loss:3.458173 +step:8700 train loss:3.497592 +step:8701 train loss:3.555266 +step:8702 train loss:3.468893 +step:8703 train loss:3.507581 +step:8704 train loss:3.511777 +step:8705 train loss:3.520980 +step:8706 train loss:3.486990 +step:8707 train loss:3.539211 +step:8708 train loss:3.522401 +step:8709 train loss:3.495853 +step:8710 train loss:3.500422 +step:8711 train loss:3.521374 +step:8712 train loss:3.478168 +step:8713 train loss:3.466811 +step:8714 train loss:3.479384 +step:8715 train loss:3.489261 +step:8716 train loss:3.442067 +step:8717 train loss:3.495241 +step:8718 train loss:3.515909 +step:8719 train loss:3.517443 +step:8720 train loss:3.496908 +step:8721 train loss:3.476887 +step:8722 train loss:3.490851 +step:8723 train loss:3.494494 +step:8724 train loss:3.508695 +step:8725 train loss:3.496781 +step:8726 train loss:3.549571 +step:8727 train loss:3.519405 +step:8728 train loss:3.536782 +step:8729 train loss:3.560491 +step:8730 train loss:3.525263 +step:8731 train loss:3.595261 +step:8732 train loss:3.486932 +step:8733 train loss:3.548290 +step:8734 train loss:3.588518 +step:8735 train loss:3.409108 +step:8736 train loss:3.539490 +step:8737 train loss:3.516261 +step:8738 train loss:3.486883 +step:8739 train loss:3.522719 +step:8740 train loss:3.514545 +step:8741 train loss:3.585776 +step:8742 train loss:3.545679 +step:8743 train loss:3.500088 +step:8744 train loss:3.618587 +step:8745 train loss:3.605759 +step:8746 train loss:3.488029 +step:8747 train loss:3.560185 +step:8748 train loss:3.557219 +step:8749 train loss:3.532413 +step:8750 validation loss:3.455081 +step:8750 train loss:3.548387 +step:8751 train loss:3.588670 +step:8752 train loss:3.487584 +step:8753 train loss:3.552438 +step:8754 train loss:3.469709 +step:8755 train loss:3.497217 +step:8756 train loss:3.504826 +step:8757 train loss:3.501771 +step:8758 train loss:3.524718 +step:8759 train loss:3.541459 +step:8760 train loss:3.497190 +step:8761 train loss:3.520645 +step:8762 train loss:3.514416 +step:8763 train loss:3.504458 +step:8764 train loss:3.543089 +step:8765 train loss:3.551153 +step:8766 train loss:3.603552 +step:8767 train loss:3.538448 +step:8768 train loss:3.606790 +step:8769 train loss:3.500299 +step:8770 train loss:3.495024 +step:8771 train loss:3.516926 +step:8772 train loss:3.559869 +step:8773 train loss:3.531531 +step:8774 train loss:3.577876 +step:8775 train loss:3.590634 +step:8776 train loss:3.546988 +step:8777 train loss:3.549824 +step:8778 train loss:3.532517 +step:8779 train loss:3.562179 +step:8780 train loss:3.495700 +step:8781 train loss:3.521373 +step:8782 train loss:3.549011 +step:8783 train loss:3.550144 +step:8784 train loss:3.529160 +step:8785 train loss:3.549302 +step:8786 train loss:3.521616 +step:8787 train loss:3.587765 +step:8788 train loss:3.563487 +step:8789 train loss:3.485704 +step:8790 train loss:3.509171 +step:8791 train loss:3.477959 +step:8792 train loss:3.526199 +step:8793 train loss:3.592385 +step:8794 train loss:3.502069 +step:8795 train loss:3.516911 +step:8796 train loss:3.518147 +step:8797 train loss:3.507223 +step:8798 train loss:3.483832 +step:8799 train loss:3.464812 +step:8800 train loss:3.589969 +step:8801 train loss:3.513582 +step:8802 train loss:3.487626 +step:8803 train loss:3.582015 +step:8804 train loss:3.547006 +step:8805 train loss:3.513363 +step:8806 train loss:3.518353 +step:8807 train loss:3.520655 +step:8808 train loss:3.584918 +step:8809 train loss:3.505983 +step:8810 train loss:3.507753 +step:8811 train loss:3.645296 +step:8812 train loss:3.608006 +step:8813 train loss:3.622247 +step:8814 train loss:3.500494 +step:8815 train loss:3.554579 +step:8816 train loss:3.493961 +step:8817 train loss:3.501399 +step:8818 train loss:3.496776 +step:8819 train loss:3.519256 +step:8820 train loss:3.512803 +step:8821 train loss:3.587427 +step:8822 train loss:3.467505 +step:8823 train loss:3.470354 +step:8824 train loss:3.488856 +step:8825 train loss:3.471914 +step:8826 train loss:3.591350 +step:8827 train loss:3.632230 +step:8828 train loss:3.586435 +step:8829 train loss:3.500329 +step:8830 train loss:3.493609 +step:8831 train loss:3.535328 +step:8832 train loss:3.548730 +step:8833 train loss:3.467030 +step:8834 train loss:3.526587 +step:8835 train loss:3.489805 +step:8836 train loss:3.595532 +step:8837 train loss:3.526065 +step:8838 train loss:3.454997 +step:8839 train loss:3.568507 +step:8840 train loss:3.513064 +step:8841 train loss:3.518486 +step:8842 train loss:3.522360 +step:8843 train loss:3.503852 +step:8844 train loss:3.536372 +step:8845 train loss:3.494750 +step:8846 train loss:3.492481 +step:8847 train loss:3.514849 +step:8848 train loss:3.500515 +step:8849 train loss:3.540027 +step:8850 train loss:3.513672 +step:8851 train loss:3.478473 +step:8852 train loss:3.508868 +step:8853 train loss:3.482172 +step:8854 train loss:3.458851 +step:8855 train loss:3.505913 +step:8856 train loss:3.472124 +step:8857 train loss:3.508211 +step:8858 train loss:3.526376 +step:8859 train loss:3.560402 +step:8860 train loss:3.491442 +step:8861 train loss:3.462424 +step:8862 train loss:3.513197 +step:8863 train loss:3.433464 +step:8864 train loss:3.409432 +step:8865 train loss:3.503090 +step:8866 train loss:3.522042 +step:8867 train loss:3.467464 +step:8868 train loss:3.543665 +step:8869 train loss:3.514169 +step:8870 train loss:3.543403 +step:8871 train loss:3.513957 +step:8872 train loss:3.538320 +step:8873 train loss:3.499247 +step:8874 train loss:3.569963 +step:8875 train loss:3.516232 +step:8876 train loss:3.517968 +step:8877 train loss:3.507862 +step:8878 train loss:3.558347 +step:8879 train loss:3.482191 +step:8880 train loss:3.487282 +step:8881 train loss:3.458449 +step:8882 train loss:3.477976 +step:8883 train loss:3.542839 +step:8884 train loss:3.482636 +step:8885 train loss:3.518526 +step:8886 train loss:3.499442 +step:8887 train loss:3.511379 +step:8888 train loss:3.503546 +step:8889 train loss:3.564544 +step:8890 train loss:3.532461 +step:8891 train loss:3.450622 +step:8892 train loss:3.462258 +step:8893 train loss:3.472591 +step:8894 train loss:3.539244 +step:8895 train loss:3.558467 +step:8896 train loss:3.522674 +step:8897 train loss:3.600580 +step:8898 train loss:3.586324 +step:8899 train loss:3.515787 +step:8900 train loss:3.549483 +step:8901 train loss:3.483048 +step:8902 train loss:3.508178 +step:8903 train loss:3.506877 +step:8904 train loss:3.502416 +step:8905 train loss:3.469520 +step:8906 train loss:3.568048 +step:8907 train loss:3.485708 +step:8908 train loss:3.493914 +step:8909 train loss:3.503150 +step:8910 train loss:3.520948 +step:8911 train loss:3.465917 +step:8912 train loss:3.476101 +step:8913 train loss:3.488362 +step:8914 train loss:3.509775 +step:8915 train loss:3.507370 +step:8916 train loss:3.415202 +step:8917 train loss:3.493098 +step:8918 train loss:3.461891 +step:8919 train loss:3.412815 +step:8920 train loss:3.472240 +step:8921 train loss:3.532610 +step:8922 train loss:3.430297 +step:8923 train loss:3.491108 +step:8924 train loss:3.444496 +step:8925 train loss:3.511543 +step:8926 train loss:3.560339 +step:8927 train loss:3.496280 +step:8928 train loss:3.510888 +step:8929 train loss:3.467530 +step:8930 train loss:3.501179 +step:8931 train loss:3.482643 +step:8932 train loss:3.491162 +step:8933 train loss:3.469868 +step:8934 train loss:3.496584 +step:8935 train loss:3.545631 +step:8936 train loss:3.483696 +step:8937 train loss:3.514262 +step:8938 train loss:3.500660 +step:8939 train loss:3.494815 +step:8940 train loss:3.500387 +step:8941 train loss:3.533920 +step:8942 train loss:3.485732 +step:8943 train loss:3.522446 +step:8944 train loss:3.617738 +step:8945 train loss:3.491558 +step:8946 train loss:3.493275 +step:8947 train loss:3.461462 +step:8948 train loss:3.451698 +step:8949 train loss:3.443516 +step:8950 train loss:3.446551 +step:8951 train loss:3.430594 +step:8952 train loss:3.482915 +step:8953 train loss:3.475400 +step:8954 train loss:3.515850 +step:8955 train loss:3.465072 +step:8956 train loss:3.551352 +step:8957 train loss:3.411801 +step:8958 train loss:3.485215 +step:8959 train loss:3.459239 +step:8960 train loss:3.507282 +step:8961 train loss:3.461237 +step:8962 train loss:3.439435 +step:8963 train loss:3.448733 +step:8964 train loss:3.531261 +step:8965 train loss:3.507845 +step:8966 train loss:3.544696 +step:8967 train loss:3.504331 +step:8968 train loss:3.487354 +step:8969 train loss:3.440278 +step:8970 train loss:3.521737 +step:8971 train loss:3.483027 +step:8972 train loss:3.513450 +step:8973 train loss:3.540694 +step:8974 train loss:3.506109 +step:8975 train loss:3.509690 +step:8976 train loss:3.436401 +step:8977 train loss:3.495756 +step:8978 train loss:3.494270 +step:8979 train loss:3.452941 +step:8980 train loss:3.430342 +step:8981 train loss:3.452486 +step:8982 train loss:3.465394 +step:8983 train loss:3.436040 +step:8984 train loss:3.475819 +step:8985 train loss:3.499183 +step:8986 train loss:3.508070 +step:8987 train loss:3.536572 +step:8988 train loss:3.477036 +step:8989 train loss:3.499310 +step:8990 train loss:3.521425 +step:8991 train loss:3.513101 +step:8992 train loss:3.451709 +step:8993 train loss:3.500577 +step:8994 train loss:3.447040 +step:8995 train loss:3.499674 +step:8996 train loss:3.570646 +step:8997 train loss:3.487248 +step:8998 train loss:3.485698 +step:8999 train loss:3.393779 +step:9000 validation loss:3.438011 total_sharp:1.6189e-02 L1_sharp:2.2470e-01 L2_sharp:1.3995e-01 L3_sharp:1.7518e-02 L4_sharp:3.3625e-03 L5_sharp:4.5238e-03 L6_sharp:2.9226e-03 L7_sharp:2.1100e-03 L8_sharp:1.8146e-03 L9_sharp:1.1301e-03 L10_sharp:6.7136e-04 L11_sharp:7.5852e-04 L12_sharp:1.0389e-03 total_fnorm:1.0718e+00 total_l1_linf:8.8383e+03 total_spectral:1.0718e+00 L1_fnorm:1.6410e-01 L2_fnorm:1.0442e-01 L3_fnorm:1.3439e-01 L4_fnorm:1.8553e-01 L5_fnorm:2.1808e-01 L6_fnorm:2.5004e-01 L7_fnorm:2.8887e-01 L8_fnorm:2.9248e-01 L9_fnorm:2.9526e-01 L10_fnorm:2.9592e-01 L11_fnorm:2.8462e-01 L12_fnorm:3.0019e-01 L1_l1linf:2.2042e-01 L2_l1linf:2.8742e-01 L3_l1linf:2.5148e-01 L4_l1linf:2.6812e-01 L5_l1linf:2.7318e-01 L6_l1linf:2.5316e-01 L7_l1linf:2.3625e-01 L8_l1linf:2.1941e-01 L9_l1linf:2.4565e-01 L10_l1linf:2.5598e-01 L11_l1linf:2.6909e-01 L12_l1linf:2.8367e-01 L1_spectral:6.0216e-03 L2_spectral:6.3481e-03 L3_spectral:6.0196e-03 L4_spectral:9.8692e-03 L5_spectral:1.0275e-02 L6_spectral:6.4344e-03 L7_spectral:6.0277e-03 L8_spectral:6.0298e-03 L9_spectral:6.0291e-03 L10_spectral:6.0301e-03 L11_spectral:6.0302e-03 L12_spectral:6.2564e-03 v_norm:1.0718e+00 cos_v_-g_hvp:2.2560e-02 g_hvp_norm:7.5320e-01 cos_v_-g_t:2.0473e-02 g_t_norm:1.6801e+00 hv_norm:9.6986e+00 cos_v_hv:1.7891e-03 hg_norm:5.5490e+03 cos_g_hg:-8.3868e-03 v_par:1.6259e-03 v_perp:1.0718e+00 L1_cos_v_neg_g:1.2786e-02 L1_v_norm:1.6410e-01 L2_cos_v_neg_g:-7.1794e-03 L2_v_norm:1.0442e-01 L3_cos_v_neg_g:2.2301e-02 L3_v_norm:1.3439e-01 L4_cos_v_neg_g:1.7981e-02 L4_v_norm:1.8553e-01 L5_cos_v_neg_g:2.7730e-02 L5_v_norm:2.1808e-01 L6_cos_v_neg_g:2.6924e-02 L6_v_norm:2.5004e-01 L7_cos_v_neg_g:2.8022e-02 L7_v_norm:2.8887e-01 L8_cos_v_neg_g:2.6745e-02 L8_v_norm:2.9248e-01 L9_cos_v_neg_g:2.9378e-02 L9_v_norm:2.9526e-01 L10_cos_v_neg_g:3.3106e-02 L10_v_norm:2.9592e-01 L11_cos_v_neg_g:4.5681e-02 L11_v_norm:2.8462e-01 L12_cos_v_neg_g:7.7934e-02 L12_v_norm:3.0019e-01 +step:9000 train loss:3.493386 +step:9001 train loss:3.493763 +step:9002 train loss:3.497276 +step:9003 train loss:3.457269 +step:9004 train loss:3.480966 +step:9005 train loss:3.485546 +step:9006 train loss:3.493605 +step:9007 train loss:3.484200 +step:9008 train loss:3.472653 +step:9009 train loss:3.540920 +step:9010 train loss:3.456634 +step:9011 train loss:3.532576 +step:9012 train loss:3.463594 +step:9013 train loss:3.498529 +step:9014 train loss:3.443919 +step:9015 train loss:3.529663 +step:9016 train loss:3.535403 +step:9017 train loss:3.534304 +step:9018 train loss:3.534273 +step:9019 train loss:3.480366 +step:9020 train loss:3.520425 +step:9021 train loss:3.466178 +step:9022 train loss:3.554260 +step:9023 train loss:3.510119 +step:9024 train loss:3.486871 +step:9025 train loss:3.483744 +step:9026 train loss:3.569550 +step:9027 train loss:3.494685 +step:9028 train loss:3.497914 +step:9029 train loss:3.487298 +step:9030 train loss:3.447614 +step:9031 train loss:3.438669 +step:9032 train loss:3.521629 +step:9033 train loss:3.473567 +step:9034 train loss:3.547079 +step:9035 train loss:3.530113 +step:9036 train loss:3.514186 +step:9037 train loss:3.519685 +step:9038 train loss:3.424261 +step:9039 train loss:3.475794 +step:9040 train loss:3.464295 +step:9041 train loss:3.482327 +step:9042 train loss:3.435276 +step:9043 train loss:3.569880 +step:9044 train loss:3.419132 +step:9045 train loss:3.446881 +step:9046 train loss:3.493309 +step:9047 train loss:3.424985 +step:9048 train loss:3.513283 +step:9049 train loss:3.478877 +step:9050 train loss:3.507712 +step:9051 train loss:3.482240 +step:9052 train loss:3.523425 +step:9053 train loss:3.458838 +step:9054 train loss:3.529691 +step:9055 train loss:3.482758 +step:9056 train loss:3.530224 +step:9057 train loss:3.534777 +step:9058 train loss:3.471327 +step:9059 train loss:3.496534 +step:9060 train loss:3.441363 +step:9061 train loss:3.489979 +step:9062 train loss:3.476646 +step:9063 train loss:3.470813 +step:9064 train loss:3.504809 +step:9065 train loss:3.520596 +step:9066 train loss:3.443876 +step:9067 train loss:3.456843 +step:9068 train loss:3.504964 +step:9069 train loss:3.501998 +step:9070 train loss:3.474104 +step:9071 train loss:3.478676 +step:9072 train loss:3.519756 +step:9073 train loss:3.466923 +step:9074 train loss:3.582752 +step:9075 train loss:3.497031 +step:9076 train loss:3.490543 +step:9077 train loss:3.527431 +step:9078 train loss:3.449096 +step:9079 train loss:3.537728 +step:9080 train loss:3.561988 +step:9081 train loss:3.498758 +step:9082 train loss:3.512303 +step:9083 train loss:3.453713 +step:9084 train loss:3.504768 +step:9085 train loss:3.487463 +step:9086 train loss:3.475620 +step:9087 train loss:3.525275 +step:9088 train loss:3.473729 +step:9089 train loss:3.509852 +step:9090 train loss:3.501059 +step:9091 train loss:3.573313 +step:9092 train loss:3.541254 +step:9093 train loss:3.628390 +step:9094 train loss:3.455404 +step:9095 train loss:3.535307 +step:9096 train loss:3.455838 +step:9097 train loss:3.480968 +step:9098 train loss:3.513818 +step:9099 train loss:3.487874 +step:9100 train loss:3.433131 +step:9101 train loss:3.510514 +step:9102 train loss:3.454848 +step:9103 train loss:3.477358 +step:9104 train loss:3.536330 +step:9105 train loss:3.541332 +step:9106 train loss:3.493893 +step:9107 train loss:3.544807 +step:9108 train loss:3.495739 +step:9109 train loss:3.502817 +step:9110 train loss:3.491376 +step:9111 train loss:3.513792 +step:9112 train loss:3.426799 +step:9113 train loss:3.458812 +step:9114 train loss:3.496945 +step:9115 train loss:3.544040 +step:9116 train loss:3.554035 +step:9117 train loss:3.486085 +step:9118 train loss:3.495630 +step:9119 train loss:3.529159 +step:9120 train loss:3.536692 +step:9121 train loss:3.524907 +step:9122 train loss:3.486732 +step:9123 train loss:3.473937 +step:9124 train loss:3.530028 +step:9125 train loss:3.529840 +step:9126 train loss:3.538989 +step:9127 train loss:3.484210 +step:9128 train loss:3.562619 +step:9129 train loss:3.504025 +step:9130 train loss:3.502723 +step:9131 train loss:3.478093 +step:9132 train loss:3.479522 +step:9133 train loss:3.517544 +step:9134 train loss:3.461978 +step:9135 train loss:3.481683 +step:9136 train loss:3.464297 +step:9137 train loss:3.441384 +step:9138 train loss:3.533045 +step:9139 train loss:3.487430 +step:9140 train loss:3.585411 +step:9141 train loss:3.498603 +step:9142 train loss:3.490980 +step:9143 train loss:3.521319 +step:9144 train loss:3.462442 +step:9145 train loss:3.498982 +step:9146 train loss:3.509986 +step:9147 train loss:3.480296 +step:9148 train loss:3.469324 +step:9149 train loss:3.472671 +step:9150 train loss:3.458650 +step:9151 train loss:3.521901 +step:9152 train loss:3.476087 +step:9153 train loss:3.473606 +step:9154 train loss:3.397015 +step:9155 train loss:3.465974 +step:9156 train loss:3.503475 +step:9157 train loss:3.552096 +step:9158 train loss:3.508251 +step:9159 train loss:3.446994 +step:9160 train loss:3.496165 +step:9161 train loss:3.438830 +step:9162 train loss:3.466493 +step:9163 train loss:3.458016 +step:9164 train loss:3.461004 +step:9165 train loss:3.522904 +step:9166 train loss:3.502180 +step:9167 train loss:3.530715 +step:9168 train loss:3.471745 +step:9169 train loss:3.467567 +step:9170 train loss:3.469271 +step:9171 train loss:3.526120 +step:9172 train loss:3.514734 +step:9173 train loss:3.494541 +step:9174 train loss:3.458273 +step:9175 train loss:3.473937 +step:9176 train loss:3.532397 +step:9177 train loss:3.471127 +step:9178 train loss:3.465072 +step:9179 train loss:3.473726 +step:9180 train loss:3.392278 +step:9181 train loss:3.502603 +step:9182 train loss:3.489104 +step:9183 train loss:3.547513 +step:9184 train loss:3.506604 +step:9185 train loss:3.512954 +step:9186 train loss:3.480171 +step:9187 train loss:3.488951 +step:9188 train loss:3.504683 +step:9189 train loss:3.465367 +step:9190 train loss:3.522654 +step:9191 train loss:3.538397 +step:9192 train loss:3.495855 +step:9193 train loss:3.429568 +step:9194 train loss:3.511100 +step:9195 train loss:3.416707 +step:9196 train loss:3.467269 +step:9197 train loss:3.512238 +step:9198 train loss:3.515920 +step:9199 train loss:3.531256 +step:9200 train loss:3.459476 +step:9201 train loss:3.521583 +step:9202 train loss:3.459447 +step:9203 train loss:3.570638 +step:9204 train loss:3.559015 +step:9205 train loss:3.503484 +step:9206 train loss:3.486053 +step:9207 train loss:3.459737 +step:9208 train loss:3.496475 +step:9209 train loss:3.504939 +step:9210 train loss:3.471121 +step:9211 train loss:3.441427 +step:9212 train loss:3.533645 +step:9213 train loss:3.430807 +step:9214 train loss:3.432217 +step:9215 train loss:3.442379 +step:9216 train loss:3.477412 +step:9217 train loss:3.504601 +step:9218 train loss:3.418665 +step:9219 train loss:3.430341 +step:9220 train loss:3.414965 +step:9221 train loss:3.478453 +step:9222 train loss:3.439556 +step:9223 train loss:3.518867 +step:9224 train loss:3.468011 +step:9225 train loss:3.482774 +step:9226 train loss:3.432501 +step:9227 train loss:3.503607 +step:9228 train loss:3.529962 +step:9229 train loss:3.521598 +step:9230 train loss:3.522834 +step:9231 train loss:3.464372 +step:9232 train loss:3.491528 +step:9233 train loss:3.425440 +step:9234 train loss:3.541385 +step:9235 train loss:3.482479 +step:9236 train loss:3.493586 +step:9237 train loss:3.473385 +step:9238 train loss:3.428649 +step:9239 train loss:3.469270 +step:9240 train loss:3.505217 +step:9241 train loss:3.546271 +step:9242 train loss:3.460231 +step:9243 train loss:3.466686 +step:9244 train loss:3.444890 +step:9245 train loss:3.503956 +step:9246 train loss:3.440657 +step:9247 train loss:3.511941 +step:9248 train loss:3.393539 +step:9249 train loss:3.508613 +step:9250 validation loss:3.421602 +step:9250 train loss:3.520623 +step:9251 train loss:3.510221 +step:9252 train loss:3.455122 +step:9253 train loss:3.545965 +step:9254 train loss:3.463289 +step:9255 train loss:3.471671 +step:9256 train loss:3.443061 +step:9257 train loss:3.488573 +step:9258 train loss:3.489783 +step:9259 train loss:3.498732 +step:9260 train loss:3.491407 +step:9261 train loss:3.504584 +step:9262 train loss:3.453655 +step:9263 train loss:3.466486 +step:9264 train loss:3.474607 +step:9265 train loss:3.457090 +step:9266 train loss:3.478951 +step:9267 train loss:3.461071 +step:9268 train loss:3.540928 +step:9269 train loss:3.458682 +step:9270 train loss:3.495359 +step:9271 train loss:3.500370 +step:9272 train loss:3.459684 +step:9273 train loss:3.490989 +step:9274 train loss:3.462667 +step:9275 train loss:3.437529 +step:9276 train loss:3.453935 +step:9277 train loss:3.396840 +step:9278 train loss:3.502217 +step:9279 train loss:3.464797 +step:9280 train loss:3.413576 +step:9281 train loss:3.487593 +step:9282 train loss:3.455301 +step:9283 train loss:3.459786 +step:9284 train loss:3.503744 +step:9285 train loss:3.487803 +step:9286 train loss:3.493255 +step:9287 train loss:3.488161 +step:9288 train loss:3.493428 +step:9289 train loss:3.489049 +step:9290 train loss:3.491439 +step:9291 train loss:3.513148 +step:9292 train loss:3.503798 +step:9293 train loss:3.457232 +step:9294 train loss:3.441987 +step:9295 train loss:3.452912 +step:9296 train loss:3.442436 +step:9297 train loss:3.465967 +step:9298 train loss:3.403415 +step:9299 train loss:3.486363 +step:9300 train loss:3.490351 +step:9301 train loss:3.440068 +step:9302 train loss:3.470763 +step:9303 train loss:3.505688 +step:9304 train loss:3.487611 +step:9305 train loss:3.465252 +step:9306 train loss:3.577409 +step:9307 train loss:3.461506 +step:9308 train loss:3.482657 +step:9309 train loss:3.564306 +step:9310 train loss:3.443840 +step:9311 train loss:3.487114 +step:9312 train loss:3.452465 +step:9313 train loss:3.445506 +step:9314 train loss:3.483479 +step:9315 train loss:3.461557 +step:9316 train loss:3.485187 +step:9317 train loss:3.521729 +step:9318 train loss:3.514805 +step:9319 train loss:3.484500 +step:9320 train loss:3.486115 +step:9321 train loss:3.465966 +step:9322 train loss:3.518937 +step:9323 train loss:3.489954 +step:9324 train loss:3.439812 +step:9325 train loss:3.487307 +step:9326 train loss:3.531005 +step:9327 train loss:3.409715 +step:9328 train loss:3.484944 +step:9329 train loss:3.424072 +step:9330 train loss:3.500416 +step:9331 train loss:3.513809 +step:9332 train loss:3.505874 +step:9333 train loss:3.518772 +step:9334 train loss:3.465283 +step:9335 train loss:3.504411 +step:9336 train loss:3.503428 +step:9337 train loss:3.437922 +step:9338 train loss:3.464606 +step:9339 train loss:3.451936 +step:9340 train loss:3.473976 +step:9341 train loss:3.439015 +step:9342 train loss:3.428244 +step:9343 train loss:3.480667 +step:9344 train loss:3.475500 +step:9345 train loss:3.510509 +step:9346 train loss:3.436458 +step:9347 train loss:3.421414 +step:9348 train loss:3.491961 +step:9349 train loss:3.422914 +step:9350 train loss:3.484748 +step:9351 train loss:3.398308 +step:9352 train loss:3.420442 +step:9353 train loss:3.440554 +step:9354 train loss:3.455399 +step:9355 train loss:3.465091 +step:9356 train loss:3.445780 +step:9357 train loss:3.479698 +step:9358 train loss:3.453605 +step:9359 train loss:3.532164 +step:9360 train loss:3.375065 +step:9361 train loss:3.472850 +step:9362 train loss:3.462538 +step:9363 train loss:3.463716 +step:9364 train loss:3.349653 +step:9365 train loss:3.487761 +step:9366 train loss:3.470189 +step:9367 train loss:3.476259 +step:9368 train loss:3.446350 +step:9369 train loss:3.401759 +step:9370 train loss:3.502689 +step:9371 train loss:3.447368 +step:9372 train loss:3.430274 +step:9373 train loss:3.418768 +step:9374 train loss:3.458473 +step:9375 train loss:3.411282 +step:9376 train loss:3.514401 +step:9377 train loss:3.435948 +step:9378 train loss:3.412634 +step:9379 train loss:3.358118 +step:9380 train loss:3.457300 +step:9381 train loss:3.436003 +step:9382 train loss:3.540781 +step:9383 train loss:3.457739 +step:9384 train loss:3.486157 +step:9385 train loss:3.520843 +step:9386 train loss:3.402133 +step:9387 train loss:3.499892 +step:9388 train loss:3.399387 +step:9389 train loss:3.468309 +step:9390 train loss:3.499216 +step:9391 train loss:3.484059 +step:9392 train loss:3.467750 +step:9393 train loss:3.517154 +step:9394 train loss:3.433074 +step:9395 train loss:3.480848 +step:9396 train loss:3.477468 +step:9397 train loss:3.442954 +step:9398 train loss:3.479433 +step:9399 train loss:3.420620 +step:9400 train loss:3.473222 +step:9401 train loss:3.469990 +step:9402 train loss:3.459339 +step:9403 train loss:3.418501 +step:9404 train loss:3.416377 +step:9405 train loss:3.497317 +step:9406 train loss:3.447052 +step:9407 train loss:3.484141 +step:9408 train loss:3.424757 +step:9409 train loss:3.508784 +step:9410 train loss:3.416006 +step:9411 train loss:3.464947 +step:9412 train loss:3.465728 +step:9413 train loss:3.423439 +step:9414 train loss:3.520069 +step:9415 train loss:3.491733 +step:9416 train loss:3.485147 +step:9417 train loss:3.401071 +step:9418 train loss:3.527952 +step:9419 train loss:3.462181 +step:9420 train loss:3.443310 +step:9421 train loss:3.416851 +step:9422 train loss:3.458786 +step:9423 train loss:3.380822 +step:9424 train loss:3.490278 +step:9425 train loss:3.437830 +step:9426 train loss:3.434154 +step:9427 train loss:3.466797 +step:9428 train loss:3.478092 +step:9429 train loss:3.470447 +step:9430 train loss:3.458288 +step:9431 train loss:3.460822 +step:9432 train loss:3.421365 +step:9433 train loss:3.495516 +step:9434 train loss:3.418438 +step:9435 train loss:3.573095 +step:9436 train loss:3.769860 +step:9437 train loss:3.420635 +step:9438 train loss:3.444794 +step:9439 train loss:3.449707 +step:9440 train loss:3.443340 +step:9441 train loss:3.414314 +step:9442 train loss:3.474959 +step:9443 train loss:3.471579 +step:9444 train loss:3.469013 +step:9445 train loss:3.443678 +step:9446 train loss:3.400460 +step:9447 train loss:3.435145 +step:9448 train loss:3.401675 +step:9449 train loss:3.442714 +step:9450 train loss:3.427693 +step:9451 train loss:3.397783 +step:9452 train loss:3.407381 +step:9453 train loss:3.465435 +step:9454 train loss:3.407218 +step:9455 train loss:3.727380 +step:9456 train loss:3.408534 +step:9457 train loss:3.531657 +step:9458 train loss:3.524810 +step:9459 train loss:3.563874 +step:9460 train loss:3.411997 +step:9461 train loss:3.462769 +step:9462 train loss:3.514156 +step:9463 train loss:3.444422 +step:9464 train loss:3.469576 +step:9465 train loss:3.448832 +step:9466 train loss:3.482702 +step:9467 train loss:3.448969 +step:9468 train loss:3.478717 +step:9469 train loss:3.427276 +step:9470 train loss:3.460413 +step:9471 train loss:3.467922 +step:9472 train loss:3.429190 +step:9473 train loss:3.473562 +step:9474 train loss:3.424698 +step:9475 train loss:3.490602 +step:9476 train loss:3.448082 +step:9477 train loss:3.441019 +step:9478 train loss:3.517720 +step:9479 train loss:3.405263 +step:9480 train loss:3.506213 +step:9481 train loss:3.413929 +step:9482 train loss:3.408231 +step:9483 train loss:3.457327 +step:9484 train loss:3.448663 +step:9485 train loss:3.450945 +step:9486 train loss:3.433207 +step:9487 train loss:3.459645 +step:9488 train loss:3.420234 +step:9489 train loss:3.508730 +step:9490 train loss:3.441930 +step:9491 train loss:3.491060 +step:9492 train loss:3.486806 +step:9493 train loss:3.392286 +step:9494 train loss:3.471532 +step:9495 train loss:3.377619 +step:9496 train loss:3.513100 +step:9497 train loss:3.421238 +step:9498 train loss:3.404479 +step:9499 train loss:3.548032 +step:9500 validation loss:3.409174 total_sharp:-3.9221e-02 L1_sharp:-3.9769e+00 L2_sharp:-4.6336e-01 L3_sharp:7.5472e-02 L4_sharp:2.4668e-02 L5_sharp:1.4571e-02 L6_sharp:4.9907e-03 L7_sharp:2.4841e-03 L8_sharp:2.3892e-03 L9_sharp:1.3214e-03 L10_sharp:6.3961e-04 L11_sharp:6.9390e-04 L12_sharp:7.5624e-04 total_fnorm:5.2455e-01 total_l1_linf:4.2583e+03 total_spectral:5.2455e-01 L1_fnorm:5.6799e-02 L2_fnorm:4.5486e-02 L3_fnorm:5.7563e-02 L4_fnorm:8.1296e-02 L5_fnorm:1.0459e-01 L6_fnorm:1.2609e-01 L7_fnorm:1.4522e-01 L8_fnorm:1.4699e-01 L9_fnorm:1.4806e-01 L10_fnorm:1.4884e-01 L11_fnorm:1.4280e-01 L12_fnorm:1.4976e-01 L1_l1linf:1.5473e-01 L2_l1linf:1.6099e-01 L3_l1linf:1.6674e-01 L4_l1linf:1.7139e-01 L5_l1linf:1.4610e-01 L6_l1linf:1.3586e-01 L7_l1linf:1.2360e-01 L8_l1linf:1.2612e-01 L9_l1linf:1.3304e-01 L10_l1linf:1.3855e-01 L11_l1linf:1.4282e-01 L12_l1linf:1.3793e-01 L1_spectral:3.3480e-03 L2_spectral:3.5769e-03 L3_spectral:3.7167e-03 L4_spectral:4.9277e-03 L5_spectral:5.2140e-03 L6_spectral:3.3418e-03 L7_spectral:3.0179e-03 L8_spectral:3.0166e-03 L9_spectral:3.0182e-03 L10_spectral:3.0693e-03 L11_spectral:3.1591e-03 L12_spectral:3.0769e-03 v_norm:5.2455e-01 cos_v_-g_hvp:1.5340e-02 g_hvp_norm:1.0738e+00 cos_v_-g_t:1.1221e-02 g_t_norm:2.6262e+00 hv_norm:1.0955e+01 cos_v_hv:-1.8780e-03 hg_norm:1.7345e+05 cos_g_hg:-1.2037e-01 v_par:9.2482e-04 v_perp:5.2454e-01 L1_cos_v_neg_g:-2.5194e-02 L1_v_norm:5.6799e-02 L2_cos_v_neg_g:-1.4098e-02 L2_v_norm:4.5486e-02 L3_cos_v_neg_g:3.3795e-02 L3_v_norm:5.7563e-02 L4_cos_v_neg_g:4.2265e-02 L4_v_norm:8.1296e-02 L5_cos_v_neg_g:3.0580e-02 L5_v_norm:1.0459e-01 L6_cos_v_neg_g:2.5632e-02 L6_v_norm:1.2609e-01 L7_cos_v_neg_g:2.3447e-02 L7_v_norm:1.4522e-01 L8_cos_v_neg_g:2.2991e-02 L8_v_norm:1.4699e-01 L9_cos_v_neg_g:2.6298e-02 L9_v_norm:1.4806e-01 L10_cos_v_neg_g:2.8036e-02 L10_v_norm:1.4884e-01 L11_cos_v_neg_g:3.8467e-02 L11_v_norm:1.4280e-01 L12_cos_v_neg_g:7.1533e-02 L12_v_norm:1.4976e-01 +step:9500 train loss:3.465165 +step:9501 train loss:3.497106 +step:9502 train loss:3.375821 +step:9503 train loss:3.478698 +step:9504 train loss:3.455156 +step:9505 train loss:3.455914 +step:9506 train loss:3.459863 +step:9507 train loss:3.399014 +step:9508 train loss:3.490055 +step:9509 train loss:3.481207 +step:9510 train loss:3.525215 +step:9511 train loss:3.410689 +step:9512 train loss:3.489734 +step:9513 train loss:3.499832 +step:9514 train loss:3.487787 +step:9515 train loss:3.443507 +step:9516 train loss:3.439844 +step:9517 train loss:3.545043 +step:9518 train loss:3.394918 +step:9519 train loss:3.437869 +step:9520 train loss:3.445097 +step:9521 train loss:3.469995 +step:9522 train loss:3.484404 +step:9523 train loss:3.455727 +step:9524 train loss:3.478937 +step:9525 train loss:3.427558 +step:9526 train loss:3.405748 +step:9527 train loss:3.483099 +step:9528 train loss:3.419869 +step:9529 train loss:3.514831 +step:9530 train loss:3.400853 +step:9531 train loss:3.438493 +step:9532 train loss:3.398619 +step:9533 train loss:3.452957 +step:9534 train loss:3.462615 +step:9535 train loss:3.460446 +step:9536 train loss:3.442897 +step:9537 train loss:3.476412 +step:9538 train loss:3.441191 +step:9539 train loss:3.471666 +step:9540 train loss:3.460861 +step:9541 train loss:3.423905 +step:9542 train loss:3.440281 +step:9543 train loss:3.423432 +step:9544 train loss:3.436041 +step:9545 train loss:3.414669 +step:9546 train loss:3.477549 +step:9547 train loss:3.454888 +step:9548 train loss:3.424638 +step:9549 train loss:3.440665 +step:9550 train loss:3.410934 +step:9551 train loss:3.481475 +step:9552 train loss:3.449103 +step:9553 train loss:3.444725 +step:9554 train loss:3.493182 +step:9555 train loss:3.455455 +step:9556 train loss:3.498044 +step:9557 train loss:3.442657 +step:9558 train loss:3.466099 +step:9559 train loss:3.452644 +step:9560 train loss:3.453181 +step:9561 train loss:3.451645 +step:9562 train loss:3.431542 +step:9563 train loss:3.411631 +step:9564 train loss:3.472393 +step:9565 train loss:3.444058 +step:9566 train loss:3.438376 +step:9567 train loss:3.454326 +step:9568 train loss:3.387816 +step:9569 train loss:3.391230 +step:9570 train loss:3.432953 +step:9571 train loss:3.484081 +step:9572 train loss:3.507410 +step:9573 train loss:3.475402 +step:9574 train loss:3.448627 +step:9575 train loss:3.486684 +step:9576 train loss:3.473407 +step:9577 train loss:3.518076 +step:9578 train loss:3.469071 +step:9579 train loss:3.413420 +step:9580 train loss:3.444955 +step:9581 train loss:3.471165 +step:9582 train loss:3.423005 +step:9583 train loss:3.409194 +step:9584 train loss:3.437430 +step:9585 train loss:3.471042 +step:9586 train loss:3.485423 +step:9587 train loss:3.413567 +step:9588 train loss:3.464073 +step:9589 train loss:3.452282 +step:9590 train loss:3.471737 +step:9591 train loss:3.415836 +step:9592 train loss:3.411144 +step:9593 train loss:3.430882 +step:9594 train loss:3.472846 +step:9595 train loss:3.466768 +step:9596 train loss:3.455960 +step:9597 train loss:3.435353 +step:9598 train loss:3.453165 +step:9599 train loss:3.446682 +step:9600 train loss:3.513866 +step:9601 train loss:3.411835 +step:9602 train loss:3.450279 +step:9603 train loss:3.481941 +step:9604 train loss:3.441934 +step:9605 train loss:3.475760 +step:9606 train loss:3.417505 +step:9607 train loss:3.424975 +step:9608 train loss:3.459263 +step:9609 train loss:3.419985 +step:9610 train loss:3.463247 +step:9611 train loss:3.443176 +step:9612 train loss:3.388021 +step:9613 train loss:3.441796 +step:9614 train loss:3.430140 +step:9615 train loss:3.513359 +step:9616 train loss:3.544368 +step:9617 train loss:3.518181 +step:9618 train loss:3.435085 +step:9619 train loss:3.469729 +step:9620 train loss:3.478876 +step:9621 train loss:3.484782 +step:9622 train loss:3.426339 +step:9623 train loss:3.426612 +step:9624 train loss:3.441269 +step:9625 train loss:3.394876 +step:9626 train loss:3.447261 +step:9627 train loss:3.526797 +step:9628 train loss:3.451917 +step:9629 train loss:3.434667 +step:9630 train loss:3.505570 +step:9631 train loss:3.417341 +step:9632 train loss:3.440734 +step:9633 train loss:3.415146 +step:9634 train loss:3.461692 +step:9635 train loss:3.392557 +step:9636 train loss:3.454866 +step:9637 train loss:3.411504 +step:9638 train loss:3.370863 +step:9639 train loss:3.470622 +step:9640 train loss:3.450172 +step:9641 train loss:3.527917 +step:9642 train loss:3.448800 +step:9643 train loss:3.642763 +step:9644 train loss:3.450611 +step:9645 train loss:3.426056 +step:9646 train loss:3.414859 +step:9647 train loss:3.425042 +step:9648 train loss:3.445740 +step:9649 train loss:3.476898 +step:9650 train loss:3.463093 +step:9651 train loss:3.479305 +step:9652 train loss:3.420323 +step:9653 train loss:3.460991 +step:9654 train loss:3.485067 +step:9655 train loss:3.418631 +step:9656 train loss:3.503491 +step:9657 train loss:3.450701 +step:9658 train loss:3.443266 +step:9659 train loss:3.465632 +step:9660 train loss:3.411603 +step:9661 train loss:3.418643 +step:9662 train loss:3.403445 +step:9663 train loss:3.481649 +step:9664 train loss:3.456643 +step:9665 train loss:3.408202 +step:9666 train loss:3.381973 +step:9667 train loss:3.444607 +step:9668 train loss:3.481075 +step:9669 train loss:3.435914 +step:9670 train loss:3.448409 +step:9671 train loss:3.449308 +step:9672 train loss:3.506561 +step:9673 train loss:3.454610 +step:9674 train loss:3.462732 +step:9675 train loss:3.407700 +step:9676 train loss:3.459754 +step:9677 train loss:3.457242 +step:9678 train loss:3.478667 +step:9679 train loss:3.426868 +step:9680 train loss:3.477413 +step:9681 train loss:3.489930 +step:9682 train loss:3.444700 +step:9683 train loss:3.422385 +step:9684 train loss:3.454725 +step:9685 train loss:3.447162 +step:9686 train loss:3.444788 +step:9687 train loss:3.422216 +step:9688 train loss:3.435038 +step:9689 train loss:3.444731 +step:9690 train loss:3.429745 +step:9691 train loss:3.433704 +step:9692 train loss:3.431326 +step:9693 train loss:3.465858 +step:9694 train loss:3.409415 +step:9695 train loss:3.452386 +step:9696 train loss:3.367754 +step:9697 train loss:3.472727 +step:9698 train loss:3.413582 +step:9699 train loss:3.419382 +step:9700 train loss:3.396789 +step:9701 train loss:3.466641 +step:9702 train loss:3.461007 +step:9703 train loss:3.434615 +step:9704 train loss:3.434575 +step:9705 train loss:3.428685 +step:9706 train loss:3.466299 +step:9707 train loss:3.434991 +step:9708 train loss:3.421629 +step:9709 train loss:3.472390 +step:9710 train loss:3.387397 +step:9711 train loss:3.485538 +step:9712 train loss:3.422218 +step:9713 train loss:3.428693 +step:9714 train loss:3.435786 +step:9715 train loss:3.404186 +step:9716 train loss:3.411618 +step:9717 train loss:3.418986 +step:9718 train loss:3.478505 +step:9719 train loss:3.439609 +step:9720 train loss:3.442331 +step:9721 train loss:3.450501 +step:9722 train loss:3.412004 +step:9723 train loss:3.410307 +step:9724 train loss:3.439294 +step:9725 train loss:3.447752 +step:9726 train loss:3.458204 +step:9727 train loss:3.487566 +step:9728 train loss:3.490222 +step:9729 train loss:3.459357 +step:9730 train loss:3.488669 +step:9731 train loss:3.564497 +step:9732 train loss:3.504792 +step:9733 train loss:3.462764 +step:9734 train loss:3.468516 +step:9735 train loss:3.532673 +step:9736 train loss:3.422090 +step:9737 train loss:3.446126 +step:9738 train loss:3.456976 +step:9739 train loss:3.513972 +step:9740 train loss:3.475886 +step:9741 train loss:3.474489 +step:9742 train loss:3.509193 +step:9743 train loss:3.443970 +step:9744 train loss:3.438023 +step:9745 train loss:3.428900 +step:9746 train loss:3.417728 +step:9747 train loss:3.407339 +step:9748 train loss:3.448025 +step:9749 train loss:3.421886 +step:9750 validation loss:3.388302 +step:9750 train loss:3.368514 +step:9751 train loss:3.487440 +step:9752 train loss:3.380383 +step:9753 train loss:3.537825 +step:9754 train loss:3.420516 +step:9755 train loss:3.454546 +step:9756 train loss:3.422359 +step:9757 train loss:3.395378 +step:9758 train loss:3.420837 +step:9759 train loss:3.368752 +step:9760 train loss:3.438588 +step:9761 train loss:3.369761 +step:9762 train loss:3.380092 +step:9763 train loss:3.434096 +step:9764 train loss:3.425857 +step:9765 train loss:3.439120 +step:9766 train loss:3.439848 +step:9767 train loss:3.414711 +step:9768 train loss:3.443141 +step:9769 train loss:3.443598 +step:9770 train loss:3.419041 +step:9771 train loss:3.399972 +step:9772 train loss:3.413370 +step:9773 train loss:3.476215 +step:9774 train loss:3.417825 +step:9775 train loss:3.440530 +step:9776 train loss:3.477317 +step:9777 train loss:3.407704 +step:9778 train loss:3.472962 +step:9779 train loss:3.417022 +step:9780 train loss:3.376720 +step:9781 train loss:3.392454 +step:9782 train loss:3.403714 +step:9783 train loss:3.409841 +step:9784 train loss:3.510101 +step:9785 train loss:3.389415 +step:9786 train loss:3.441065 +step:9787 train loss:3.420479 +step:9788 train loss:3.454594 +step:9789 train loss:3.444488 +step:9790 train loss:3.406443 +step:9791 train loss:3.417181 +step:9792 train loss:3.408703 +step:9793 train loss:3.421107 +step:9794 train loss:3.476619 +step:9795 train loss:3.453717 +step:9796 train loss:3.415340 +step:9797 train loss:3.429761 +step:9798 train loss:3.418369 +step:9799 train loss:3.475512 +step:9800 train loss:3.441013 +step:9801 train loss:3.501512 +step:9802 train loss:3.428001 +step:9803 train loss:3.470940 +step:9804 train loss:3.425989 +step:9805 train loss:3.403743 +step:9806 train loss:3.460513 +step:9807 train loss:3.445880 +step:9808 train loss:3.398916 +step:9809 train loss:3.388975 +step:9810 train loss:3.369563 +step:9811 train loss:3.478128 +step:9812 train loss:3.413489 +step:9813 train loss:3.430624 +step:9814 train loss:3.481280 +step:9815 train loss:3.424051 +step:9816 train loss:3.455722 +step:9817 train loss:3.394070 +step:9818 train loss:3.433599 +step:9819 train loss:3.378378 +step:9820 train loss:3.477048 +step:9821 train loss:3.424007 +step:9822 train loss:3.471577 +step:9823 train loss:3.474949 +step:9824 train loss:3.453471 +step:9825 train loss:3.455679 +step:9826 train loss:3.465609 +step:9827 train loss:3.455652 +step:9828 train loss:3.497183 +step:9829 train loss:3.445338 +step:9830 train loss:3.376688 +step:9831 train loss:3.442029 +step:9832 train loss:3.421795 +step:9833 train loss:3.449998 +step:9834 train loss:3.406372 +step:9835 train loss:3.484374 +step:9836 train loss:3.382470 +step:9837 train loss:3.462034 +step:9838 train loss:3.455577 +step:9839 train loss:3.474703 +step:9840 train loss:3.479044 +step:9841 train loss:3.392151 +step:9842 train loss:3.443548 +step:9843 train loss:3.425045 +step:9844 train loss:3.508198 +step:9845 train loss:3.425658 +step:9846 train loss:3.436048 +step:9847 train loss:3.470261 +step:9848 train loss:3.473811 +step:9849 train loss:3.456297 +step:9850 train loss:3.445046 +step:9851 train loss:3.464098 +step:9852 train loss:3.368797 +step:9853 train loss:3.457914 +step:9854 train loss:3.414702 +step:9855 train loss:3.424476 +step:9856 train loss:3.429191 +step:9857 train loss:3.374060 +step:9858 train loss:3.408983 +step:9859 train loss:3.414465 +step:9860 train loss:3.461896 +step:9861 train loss:3.431846 +step:9862 train loss:3.520765 +step:9863 train loss:3.427069 +step:9864 train loss:3.413482 +step:9865 train loss:3.408458 +step:9866 train loss:3.483044 +step:9867 train loss:3.411729 +step:9868 train loss:3.403847 +step:9869 train loss:3.477312 +step:9870 train loss:3.442617 +step:9871 train loss:3.410300 +step:9872 train loss:3.464378 +step:9873 train loss:3.404006 +step:9874 train loss:3.430474 +step:9875 train loss:3.429074 +step:9876 train loss:3.401016 +step:9877 train loss:3.380339 +step:9878 train loss:3.416748 +step:9879 train loss:3.424935 +step:9880 train loss:3.524918 +step:9881 train loss:3.408790 +step:9882 train loss:3.399969 +step:9883 train loss:3.399315 +step:9884 train loss:3.429129 +step:9885 train loss:3.420878 +step:9886 train loss:3.427065 +step:9887 train loss:3.475715 +step:9888 train loss:3.439626 +step:9889 train loss:3.495185 +step:9890 train loss:3.489381 +step:9891 train loss:3.431256 +step:9892 train loss:3.364596 +step:9893 train loss:3.430032 +step:9894 train loss:3.466781 +step:9895 train loss:3.417739 +step:9896 train loss:3.393881 +step:9897 train loss:3.422854 +step:9898 train loss:3.467619 +step:9899 train loss:3.387778 +step:9900 train loss:3.497293 +step:9901 train loss:3.379718 +step:9902 train loss:3.421215 +step:9903 train loss:3.424284 +step:9904 train loss:3.460889 +step:9905 train loss:3.419168 +step:9906 train loss:3.438908 +step:9907 train loss:3.364949 +step:9908 train loss:3.432781 +step:9909 train loss:3.394814 +step:9910 train loss:3.438538 +step:9911 train loss:3.446183 +step:9912 train loss:3.480184 +step:9913 train loss:3.457523 +step:9914 train loss:3.440967 +step:9915 train loss:3.431635 +step:9916 train loss:3.414891 +step:9917 train loss:3.448152 +step:9918 train loss:3.419573 +step:9919 train loss:3.458890 +step:9920 train loss:3.407163 +step:9921 train loss:3.450503 +step:9922 train loss:3.395447 +step:9923 train loss:3.447702 +step:9924 train loss:3.445653 +step:9925 train loss:3.434952 +step:9926 train loss:3.479209 +step:9927 train loss:3.360949 +step:9928 train loss:3.512939 +step:9929 train loss:3.426294 +step:9930 train loss:3.443469 +step:9931 train loss:3.422885 +step:9932 train loss:3.441025 +step:9933 train loss:3.441214 +step:9934 train loss:3.420222 +step:9935 train loss:3.380702 +step:9936 train loss:3.429627 +step:9937 train loss:3.447634 +step:9938 train loss:3.410310 +step:9939 train loss:3.428574 +step:9940 train loss:3.423971 +step:9941 train loss:3.519518 +step:9942 train loss:3.438299 +step:9943 train loss:3.386085 +step:9944 train loss:3.484756 +step:9945 train loss:3.392034 +step:9946 train loss:3.450383 +step:9947 train loss:3.407269 +step:9948 train loss:3.406817 +step:9949 train loss:3.376256 +step:9950 train loss:3.528375 +step:9951 train loss:3.359494 +step:9952 train loss:3.436834 +step:9953 train loss:3.424418 +step:9954 train loss:3.385580 +step:9955 train loss:3.490532 +step:9956 train loss:3.443621 +step:9957 train loss:3.441552 +step:9958 train loss:3.438758 +step:9959 train loss:3.514585 +step:9960 train loss:3.449640 +step:9961 train loss:3.498292 +step:9962 train loss:3.424345 +step:9963 train loss:3.453827 +step:9964 train loss:3.486665 +step:9965 train loss:3.370034 +step:9966 train loss:3.499814 +step:9967 train loss:3.392499 +step:9968 train loss:3.487024 +step:9969 train loss:3.344750 +step:9970 train loss:3.492100 +step:9971 train loss:3.453872 +step:9972 train loss:3.423320 +step:9973 train loss:3.466713 +step:9974 train loss:3.445344 +step:9975 train loss:3.479699 +step:9976 train loss:3.470118 +step:9977 train loss:3.455548 +step:9978 train loss:3.435871 +step:9979 train loss:3.485320 +step:9980 train loss:3.415845 +step:9981 train loss:3.461844 +step:9982 train loss:3.440582 +step:9983 train loss:3.381850 +step:9984 train loss:3.438405 +step:9985 train loss:3.385358 +step:9986 train loss:3.460232 +step:9987 train loss:3.429383 +step:9988 train loss:3.394912 +step:9989 train loss:3.493055 +step:9990 train loss:3.405942 +step:9991 train loss:3.468544 +step:9992 train loss:3.362345 +step:9993 train loss:3.431507 +step:9994 train loss:3.377294 +step:9995 train loss:3.397280 +step:9996 train loss:3.455280 +step:9997 train loss:3.443479 +step:9998 train loss:3.446624 +step:9999 train loss:3.433585 +step:10000 validation loss:3.379481 total_sharp:-1.7622e-02 L1_sharp:-1.2475e+00 L2_sharp:-3.7211e-01 L3_sharp:-5.8981e-02 L4_sharp:-1.5295e-02 L5_sharp:6.1903e-03 L6_sharp:5.7083e-03 L7_sharp:3.5888e-03 L8_sharp:3.7242e-03 L9_sharp:1.9427e-03 L10_sharp:1.0923e-03 L11_sharp:1.1067e-03 L12_sharp:2.2636e-03 total_fnorm:1.0445e-03 total_l1_linf:8.4636e+00 total_spectral:1.0445e-03 L1_fnorm:1.2647e-04 L2_fnorm:8.2597e-05 L3_fnorm:1.0537e-04 L4_fnorm:1.4854e-04 L5_fnorm:2.0017e-04 L6_fnorm:2.3679e-04 L7_fnorm:2.8244e-04 L8_fnorm:2.8497e-04 L9_fnorm:2.9014e-04 L10_fnorm:2.9312e-04 L11_fnorm:2.8245e-04 L12_fnorm:3.0328e-04 L1_l1linf:3.1477e-04 L2_l1linf:4.0403e-04 L3_l1linf:3.9950e-04 L4_l1linf:3.9811e-04 L5_l1linf:3.7518e-04 L6_l1linf:3.6179e-04 L7_l1linf:3.7628e-04 L8_l1linf:3.8080e-04 L9_l1linf:3.7997e-04 L10_l1linf:3.7476e-04 L11_l1linf:3.4983e-04 L12_l1linf:3.4371e-04 L1_spectral:7.0977e-06 L2_spectral:8.7370e-06 L3_spectral:8.7672e-06 L4_spectral:9.8047e-06 L5_spectral:1.0788e-05 L6_spectral:7.9885e-06 L7_spectral:8.2902e-06 L8_spectral:8.3619e-06 L9_spectral:8.3798e-06 L10_spectral:8.3100e-06 L11_spectral:7.7381e-06 L12_spectral:7.6081e-06 v_norm:1.0445e-03 cos_v_-g_hvp:2.2178e-02 g_hvp_norm:5.7381e-01 cos_v_-g_t:3.7713e-02 g_t_norm:8.0376e-01 hv_norm:1.1699e-02 cos_v_hv:-1.5734e-03 hg_norm:4.5108e+03 cos_g_hg:-5.0232e-02 v_par:2.2353e-06 v_perp:1.0445e-03 L1_cos_v_neg_g:2.3425e-02 L1_v_norm:1.2647e-04 L2_cos_v_neg_g:-7.2476e-03 L2_v_norm:8.2603e-05 L3_cos_v_neg_g:7.3965e-03 L3_v_norm:1.0537e-04 L4_cos_v_neg_g:5.1665e-03 L4_v_norm:1.4854e-04 L5_cos_v_neg_g:1.9411e-02 L5_v_norm:2.0017e-04 L6_cos_v_neg_g:2.6490e-02 L6_v_norm:2.3680e-04 L7_cos_v_neg_g:2.6134e-02 L7_v_norm:2.8244e-04 L8_cos_v_neg_g:2.7317e-02 L8_v_norm:2.8497e-04 L9_cos_v_neg_g:3.0218e-02 L9_v_norm:2.9014e-04 L10_cos_v_neg_g:3.1763e-02 L10_v_norm:2.9312e-04 L11_cos_v_neg_g:4.1384e-02 L11_v_norm:2.8245e-04 L12_cos_v_neg_g:7.0278e-02 L12_v_norm:3.0328e-04