Upload folder using huggingface_hub
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +289 -35
- .gitignore +171 -0
- NOTES.md +1079 -0
- README.md +57 -0
- architecture.py +174 -0
- archive-misc/architecture-v1.py +175 -0
- archive-misc/ascii_percentage.py +56 -0
- archive-misc/bpe_test.py +29 -0
- archive-misc/check-memorization.py +20 -0
- archive-misc/concatenator.py +19 -0
- archive-misc/dataset.py +79 -0
- archive-misc/dset_splitter.py +40 -0
- archive-misc/entropy_upper_bound.py +57 -0
- archive-misc/eval_old.py +91 -0
- archive-misc/f1_score.py +19 -0
- archive-misc/plot_metrics.py +182 -0
- archive-misc/plots/acc_trainstep_plot.png +3 -0
- archive-misc/plots/loss_epoch_plot.png +3 -0
- archive-misc/plots/perplexityval_epoch_plot.png +3 -0
- archive-misc/plots/topkacc_epoch_plot.png +3 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv +803 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv +803 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv +803 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard (1).csv +803 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv +803 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv +681 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv +681 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv +931 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv +681 -0
- archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv +681 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv +31 -0
- archive-misc/runs_jsons/topkacc_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv +31 -0
- archive-misc/runs_jsons/topkacc_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv +31 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,289 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Auto detect text files and perform LF normalization
|
| 2 |
+
* text=auto
|
| 3 |
+
archive-misc/plots/acc_trainstep_plot.png filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
archive-misc/plots/loss_epoch_plot.png filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
archive-misc/plots/perplexityval_epoch_plot.png filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
archive-misc/plots/topkacc_epoch_plot.png filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
archive-misc/test-data/bpe_model.model filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
dummy-data-dir/encoded_chunked.pt filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
readme-imgs/code-decoder-v2-loss-curve-1.png filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
readme-imgs/code-decoder-v3-loss-curve.png filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
readme-imgs/comparison.png filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
readme-imgs/funny-curve.png filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
readme-imgs/normal-training-curve.png filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
readme-imgs/shakespeare-test-v2-loss-curve-preliminary.png filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
readme-imgs/v22-another-run.png filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
readme-imgs/v30-results.png filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
readme-imgs/v30-v3-v4-topk.png filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
readme-imgs/v30-v3-v4.png filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
readme-imgs/val-loss-v21.png filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum/tensorboard/events.out.tfevents.1750241564.masked_hostname.local.17039.3 filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum/tensorboard/events.out.tfevents.1750379119.masked_hostname.local.77463.3 filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum/tensorboard/events.out.tfevents.1750541174.masked_hostname.local.49638.3 filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum-loss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum-loss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum-loss/tensorboard/events.out.tfevents.1750391064.masked_hostname.local.77463.4 filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
runs/!code-decoder-v31-mega-licensed-1/anticurriculum-loss/tensorboard/events.out.tfevents.1750541176.masked_hostname.local.49638.4 filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-loss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-loss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-loss/tensorboard/events.out.tfevents.1749974866.masked_hostname.local.17039.1 filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-loss/tensorboard/events.out.tfevents.1750379114.masked_hostname.local.77463.1 filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-loss/tensorboard/events.out.tfevents.1750541170.masked_hostname.local.49638.1 filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749861906.masked_hostname.local.66051.0 filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749872246.masked_hostname.local.80667.0 filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749872428.masked_hostname.local.80898.0 filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749872685.masked_hostname.local.81245.0 filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749873458.masked_hostname.local.83857.0 filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749913774.masked_hostname.local.13022.0 filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749914351.masked_hostname.local.17039.0 filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1750379092.masked_hostname.local.77463.0 filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
runs/!code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1750541167.masked_hostname.local.49638.0 filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid/tensorboard/events.out.tfevents.1750512403.masked_hostname.local.77463.5 filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid/tensorboard/events.out.tfevents.1750541178.masked_hostname.local.49638.5 filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid-loss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid-loss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
runs/!code-decoder-v31-mega-licensed-1/hybrid-loss/tensorboard/events.out.tfevents.1750630092.masked_hostname.local.49638.6 filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
runs/!code-decoder-v31-mega-licensed-1/noop/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
runs/!code-decoder-v31-mega-licensed-1/noop/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 54 |
+
runs/!code-decoder-v31-mega-licensed-1/noop/tensorboard/events.out.tfevents.1750113611.masked_hostname.local.17039.2 filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
runs/!code-decoder-v31-mega-licensed-1/noop/tensorboard/events.out.tfevents.1750379116.masked_hostname.local.77463.2 filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
runs/!code-decoder-v31-mega-licensed-1/noop/tensorboard/events.out.tfevents.1750541172.masked_hostname.local.49638.2 filter=lfs diff=lfs merge=lfs -text
|
| 57 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 58 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 59 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential/tensorboard/events.out.tfevents.1750742710.masked_hostname.local.49638.7 filter=lfs diff=lfs merge=lfs -text
|
| 60 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential-loss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential-loss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
runs/!code-decoder-v31-mega-licensed-1/sequential-loss/tensorboard/events.out.tfevents.1750849720.masked_hostname.local.49638.8 filter=lfs diff=lfs merge=lfs -text
|
| 63 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-loss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 64 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-loss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 65 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-loss/tensorboard/events.out.tfevents.1749720205.masked_hostname.local.46145.1 filter=lfs diff=lfs merge=lfs -text
|
| 66 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 67 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 68 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749608158.masked_hostname.local.96353.0 filter=lfs diff=lfs merge=lfs -text
|
| 69 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749652888.masked_hostname.local.19434.0 filter=lfs diff=lfs merge=lfs -text
|
| 70 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749653149.masked_hostname.local.22903.0 filter=lfs diff=lfs merge=lfs -text
|
| 71 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749653403.masked_hostname.local.25777.0 filter=lfs diff=lfs merge=lfs -text
|
| 72 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749653624.masked_hostname.local.28482.0 filter=lfs diff=lfs merge=lfs -text
|
| 73 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749653941.masked_hostname.local.32491.0 filter=lfs diff=lfs merge=lfs -text
|
| 74 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749654908.masked_hostname.local.45838.0 filter=lfs diff=lfs merge=lfs -text
|
| 75 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/curriculum-noloss/tensorboard/events.out.tfevents.1749654948.masked_hostname.local.46145.0 filter=lfs diff=lfs merge=lfs -text
|
| 76 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/noop/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 77 |
+
runs/INTERRUPTED-code-decoder-v31-mega-licensed-1/noop/tensorboard/events.out.tfevents.1749855977.masked_hostname.local.46145.2 filter=lfs diff=lfs merge=lfs -text
|
| 78 |
+
runs/code-decoder-v1/tensorboard/events.out.tfevents.1732335020.masked_hostname.local.31440.0 filter=lfs diff=lfs merge=lfs -text
|
| 79 |
+
runs/code-decoder-v1/tensorboard/events.out.tfevents.1732335097.masked_hostname.local.31440.1 filter=lfs diff=lfs merge=lfs -text
|
| 80 |
+
runs/code-decoder-v10-vanilla-smaller-batchfirst/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 81 |
+
runs/code-decoder-v10-vanilla-smaller-batchfirst/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 82 |
+
runs/code-decoder-v10-vanilla-smaller-batchfirst/tensorboard/events.out.tfevents.1734105685.masked_hostname.local.9029.0 filter=lfs diff=lfs merge=lfs -text
|
| 83 |
+
runs/code-decoder-v11-vanilla-alphabet/tensorboard/events.out.tfevents.1734228819.masked_hostname.local.43324.0 filter=lfs diff=lfs merge=lfs -text
|
| 84 |
+
runs/code-decoder-v12-dummy/tensorboard/events.out.tfevents.1734403315.masked_hostname.local.44383.0 filter=lfs diff=lfs merge=lfs -text
|
| 85 |
+
runs/code-decoder-v13-rescaling-smaller/tensorboard/events.out.tfevents.1734484193.masked_hostname.local.53545.0 filter=lfs diff=lfs merge=lfs -text
|
| 86 |
+
runs/code-decoder-v13-rescaling-smaller-retrained/tensorboard/events.out.tfevents.1734484495.masked_hostname.local.53801.0 filter=lfs diff=lfs merge=lfs -text
|
| 87 |
+
runs/code-decoder-v14-tester/tensorboard/events.out.tfevents.1734485671.masked_hostname.local.54813.0 filter=lfs diff=lfs merge=lfs -text
|
| 88 |
+
runs/code-decoder-v15-sliding/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 89 |
+
runs/code-decoder-v15-sliding/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 90 |
+
runs/code-decoder-v15-sliding/tensorboard/events.out.tfevents.1734747885.masked_hostname.local.29752.0 filter=lfs diff=lfs merge=lfs -text
|
| 91 |
+
runs/code-decoder-v15-sliding/tensorboard/events.out.tfevents.1734747943.masked_hostname.local.29821.0 filter=lfs diff=lfs merge=lfs -text
|
| 92 |
+
runs/code-decoder-v16-upscale/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 93 |
+
runs/code-decoder-v16-upscale/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 94 |
+
runs/code-decoder-v16-upscale/tensorboard/events.out.tfevents.1734753751.masked_hostname.local.33221.0 filter=lfs diff=lfs merge=lfs -text
|
| 95 |
+
runs/code-decoder-v17-bpe-upscale/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 96 |
+
runs/code-decoder-v17-bpe-upscale/tensorboard/events.out.tfevents.1734756335.masked_hostname.local.36222.0 filter=lfs diff=lfs merge=lfs -text
|
| 97 |
+
runs/code-decoder-v18-allTrains-customTokenizer/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 98 |
+
runs/code-decoder-v18-allTrains-customTokenizer/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 99 |
+
runs/code-decoder-v18-allTrains-customTokenizer/tensorboard/events.out.tfevents.1736301065.masked_hostname.local.17453.0 filter=lfs diff=lfs merge=lfs -text
|
| 100 |
+
runs/code-decoder-v19-bigset-5k/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 101 |
+
runs/code-decoder-v19-bigset-5k/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 102 |
+
runs/code-decoder-v19-bigset-5k/tensorboard/events.out.tfevents.1736830316.masked_hostname.local.27628.0 filter=lfs diff=lfs merge=lfs -text
|
| 103 |
+
runs/code-decoder-v2-smallchar/tensorboard/events.out.tfevents.1732481060.masked_hostname.local.25901.0 filter=lfs diff=lfs merge=lfs -text
|
| 104 |
+
runs/code-decoder-v2-smallchar/tensorboard/events.out.tfevents.1732492365.masked_hostname.local.33592.0 filter=lfs diff=lfs merge=lfs -text
|
| 105 |
+
runs/code-decoder-v2-smallchar/tensorboard/events.out.tfevents.1732492422.masked_hostname.local.33654.0 filter=lfs diff=lfs merge=lfs -text
|
| 106 |
+
runs/code-decoder-v20-bigset-153k/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 107 |
+
runs/code-decoder-v20-bigset-153k/tensorboard/events.out.tfevents.1736903138.masked_hostname.local.53701.0 filter=lfs diff=lfs merge=lfs -text
|
| 108 |
+
runs/code-decoder-v21-alltrains-tuner/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 109 |
+
runs/code-decoder-v21-alltrains-tuner/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 110 |
+
runs/code-decoder-v21-alltrains-tuner/tensorboard/events.out.tfevents.1738115963.masked_hostname.local.28117.0 filter=lfs diff=lfs merge=lfs -text
|
| 111 |
+
runs/code-decoder-v21-alltrains-tuner/tensorboard/events.out.tfevents.1738116064.masked_hostname.local.28231.0 filter=lfs diff=lfs merge=lfs -text
|
| 112 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_big_deeper/tensorboard/events.out.tfevents.1739681142.masked_hostname.local.78963.7 filter=lfs diff=lfs merge=lfs -text
|
| 113 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_big_deeper/tensorboard/events.out.tfevents.1739725160.masked_hostname.local.91729.7 filter=lfs diff=lfs merge=lfs -text
|
| 114 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 115 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 116 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim/tensorboard/events.out.tfevents.1739666843.masked_hostname.local.62901.5 filter=lfs diff=lfs merge=lfs -text
|
| 117 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim/tensorboard/events.out.tfevents.1739675798.masked_hostname.local.78963.5 filter=lfs diff=lfs merge=lfs -text
|
| 118 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim/tensorboard/events.out.tfevents.1739725159.masked_hostname.local.91729.5 filter=lfs diff=lfs merge=lfs -text
|
| 119 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_bigdim_drop/tensorboard/events.out.tfevents.1739729210.masked_hostname.local.91729.9 filter=lfs diff=lfs merge=lfs -text
|
| 120 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_deep_smalldim/tensorboard/events.out.tfevents.1739662710.masked_hostname.local.62901.4 filter=lfs diff=lfs merge=lfs -text
|
| 121 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_deep_smalldim/tensorboard/events.out.tfevents.1739675797.masked_hostname.local.78963.4 filter=lfs diff=lfs merge=lfs -text
|
| 122 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_deep_smalldim/tensorboard/events.out.tfevents.1739725158.masked_hostname.local.91729.4 filter=lfs diff=lfs merge=lfs -text
|
| 123 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_deeper/tensorboard/events.out.tfevents.1739675798.masked_hostname.local.78963.6 filter=lfs diff=lfs merge=lfs -text
|
| 124 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_deeper/tensorboard/events.out.tfevents.1739725159.masked_hostname.local.91729.6 filter=lfs diff=lfs merge=lfs -text
|
| 125 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium/tensorboard/events.out.tfevents.1739600578.masked_hostname.local.46506.1 filter=lfs diff=lfs merge=lfs -text
|
| 126 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium/tensorboard/events.out.tfevents.1739641096.masked_hostname.local.55605.1 filter=lfs diff=lfs merge=lfs -text
|
| 127 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium/tensorboard/events.out.tfevents.1739661953.masked_hostname.local.62901.1 filter=lfs diff=lfs merge=lfs -text
|
| 128 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium/tensorboard/events.out.tfevents.1739675796.masked_hostname.local.78963.1 filter=lfs diff=lfs merge=lfs -text
|
| 129 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium/tensorboard/events.out.tfevents.1739725157.masked_hostname.local.91729.1 filter=lfs diff=lfs merge=lfs -text
|
| 130 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_medium_drop/tensorboard/events.out.tfevents.1739725160.masked_hostname.local.91729.8 filter=lfs diff=lfs merge=lfs -text
|
| 131 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_more_heads/tensorboard/events.out.tfevents.1739633763.masked_hostname.local.46506.2 filter=lfs diff=lfs merge=lfs -text
|
| 132 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_more_heads/tensorboard/events.out.tfevents.1739641096.masked_hostname.local.55605.2 filter=lfs diff=lfs merge=lfs -text
|
| 133 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_more_heads/tensorboard/events.out.tfevents.1739661953.masked_hostname.local.62901.2 filter=lfs diff=lfs merge=lfs -text
|
| 134 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_more_heads/tensorboard/events.out.tfevents.1739675797.masked_hostname.local.78963.2 filter=lfs diff=lfs merge=lfs -text
|
| 135 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_more_heads/tensorboard/events.out.tfevents.1739725158.masked_hostname.local.91729.2 filter=lfs diff=lfs merge=lfs -text
|
| 136 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_smalldim/tensorboard/events.out.tfevents.1739638220.masked_hostname.local.46506.3 filter=lfs diff=lfs merge=lfs -text
|
| 137 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_smalldim/tensorboard/events.out.tfevents.1739641096.masked_hostname.local.55605.3 filter=lfs diff=lfs merge=lfs -text
|
| 138 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_smalldim/tensorboard/events.out.tfevents.1739661954.masked_hostname.local.62901.3 filter=lfs diff=lfs merge=lfs -text
|
| 139 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_smalldim/tensorboard/events.out.tfevents.1739675797.masked_hostname.local.78963.3 filter=lfs diff=lfs merge=lfs -text
|
| 140 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_smalldim/tensorboard/events.out.tfevents.1739725158.masked_hostname.local.91729.3 filter=lfs diff=lfs merge=lfs -text
|
| 141 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_tiny/tensorboard/events.out.tfevents.1739597728.masked_hostname.local.46506.0 filter=lfs diff=lfs merge=lfs -text
|
| 142 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_tiny/tensorboard/events.out.tfevents.1739641095.masked_hostname.local.55605.0 filter=lfs diff=lfs merge=lfs -text
|
| 143 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_tiny/tensorboard/events.out.tfevents.1739661952.masked_hostname.local.62901.0 filter=lfs diff=lfs merge=lfs -text
|
| 144 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_tiny/tensorboard/events.out.tfevents.1739675796.masked_hostname.local.78963.0 filter=lfs diff=lfs merge=lfs -text
|
| 145 |
+
runs/code-decoder-v22-bigset-tuner/CONFIG_tiny/tensorboard/events.out.tfevents.1739725157.masked_hostname.local.91729.0 filter=lfs diff=lfs merge=lfs -text
|
| 146 |
+
runs/code-decoder-v22-bigset-tuner/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 147 |
+
runs/code-decoder-v22-bigset-tuner/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 148 |
+
runs/code-decoder-v22-bigset-tuner/config_v1/CONFIG_medium/tensorboard/events.out.tfevents.1739584945.masked_hostname.local.39155.0 filter=lfs diff=lfs merge=lfs -text
|
| 149 |
+
runs/code-decoder-v22-bigset-tuner/config_v1/CONFIG_tiny/tensorboard/events.out.tfevents.1739584945.masked_hostname.local.39155.0 filter=lfs diff=lfs merge=lfs -text
|
| 150 |
+
runs/code-decoder-v22-bigset-tuner/tensorboard/events.out.tfevents.1739510905.masked_hostname.local.5175.0 filter=lfs diff=lfs merge=lfs -text
|
| 151 |
+
runs/code-decoder-v22-bigset-tuner/tensorboard/events.out.tfevents.1739546361.masked_hostname.local.11555.0 filter=lfs diff=lfs merge=lfs -text
|
| 152 |
+
runs/code-decoder-v22-bigset-tuner/tensorboard/events.out.tfevents.1739571108.masked_hostname.local.29698.0 filter=lfs diff=lfs merge=lfs -text
|
| 153 |
+
runs/code-decoder-v22-bigset-tuner/tensorboard/events.out.tfevents.1739572068.masked_hostname.local.30425.0 filter=lfs diff=lfs merge=lfs -text
|
| 154 |
+
runs/code-decoder-v22-bigset-tuner/tensorboard/events.out.tfevents.1739577463.masked_hostname.local.36626.0 filter=lfs diff=lfs merge=lfs -text
|
| 155 |
+
runs/code-decoder-v23-mega-nontopkacc/tensorboard/events.out.tfevents.1740115503.masked_hostname.local.21733.0 filter=lfs diff=lfs merge=lfs -text
|
| 156 |
+
runs/code-decoder-v23-mega-nontopkacc/tensorboard/events.out.tfevents.1740199837.masked_hostname.local.74855.0 filter=lfs diff=lfs merge=lfs -text
|
| 157 |
+
runs/code-decoder-v25-alltrains-anti/tensorboard/events.out.tfevents.1741741083.masked_hostname.local.37706.2 filter=lfs diff=lfs merge=lfs -text
|
| 158 |
+
runs/code-decoder-v25-alltrains-anti/tensorboard/events.out.tfevents.1741743494.masked_hostname.local.40585.2 filter=lfs diff=lfs merge=lfs -text
|
| 159 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741738828.masked_hostname.local.33536.0 filter=lfs diff=lfs merge=lfs -text
|
| 160 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741740526.masked_hostname.local.35444.0 filter=lfs diff=lfs merge=lfs -text
|
| 161 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741740565.masked_hostname.local.35796.0 filter=lfs diff=lfs merge=lfs -text
|
| 162 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741740625.masked_hostname.local.36318.0 filter=lfs diff=lfs merge=lfs -text
|
| 163 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741740748.masked_hostname.local.37706.0 filter=lfs diff=lfs merge=lfs -text
|
| 164 |
+
runs/code-decoder-v25-alltrains-scheduled/tensorboard/events.out.tfevents.1741743493.masked_hostname.local.40585.0 filter=lfs diff=lfs merge=lfs -text
|
| 165 |
+
runs/code-decoder-v25-alltrains-unscheduled/tensorboard/events.out.tfevents.1741740566.masked_hostname.local.35796.1 filter=lfs diff=lfs merge=lfs -text
|
| 166 |
+
runs/code-decoder-v25-alltrains-unscheduled/tensorboard/events.out.tfevents.1741740625.masked_hostname.local.36318.1 filter=lfs diff=lfs merge=lfs -text
|
| 167 |
+
runs/code-decoder-v25-alltrains-unscheduled/tensorboard/events.out.tfevents.1741740748.masked_hostname.local.37706.1 filter=lfs diff=lfs merge=lfs -text
|
| 168 |
+
runs/code-decoder-v25-alltrains-unscheduled/tensorboard/events.out.tfevents.1741743494.masked_hostname.local.40585.1 filter=lfs diff=lfs merge=lfs -text
|
| 169 |
+
runs/code-decoder-v26-med-anti/tensorboard/events.out.tfevents.1741780037.masked_hostname.local.40754.2 filter=lfs diff=lfs merge=lfs -text
|
| 170 |
+
runs/code-decoder-v26-med-scheduled/tensorboard/events.out.tfevents.1741740735.masked_hostname.local.37613.0 filter=lfs diff=lfs merge=lfs -text
|
| 171 |
+
runs/code-decoder-v26-med-scheduled/tensorboard/events.out.tfevents.1741741672.masked_hostname.local.38523.0 filter=lfs diff=lfs merge=lfs -text
|
| 172 |
+
runs/code-decoder-v26-med-scheduled/tensorboard/events.out.tfevents.1741743335.masked_hostname.local.39916.0 filter=lfs diff=lfs merge=lfs -text
|
| 173 |
+
runs/code-decoder-v26-med-scheduled/tensorboard/events.out.tfevents.1741743570.masked_hostname.local.40754.0 filter=lfs diff=lfs merge=lfs -text
|
| 174 |
+
runs/code-decoder-v26-med-unscheduled/tensorboard/events.out.tfevents.1741758743.masked_hostname.local.40754.1 filter=lfs diff=lfs merge=lfs -text
|
| 175 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-loss/tensorboard/events.out.tfevents.1742835548.masked_hostname.local.81114.4 filter=lfs diff=lfs merge=lfs -text
|
| 176 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-loss/tensorboard/events.out.tfevents.1742961345.masked_hostname.local.23462.4 filter=lfs diff=lfs merge=lfs -text
|
| 177 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-loss/tensorboard/events.out.tfevents.1743083756.masked_hostname.local.75759.4 filter=lfs diff=lfs merge=lfs -text
|
| 178 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-noloss/tensorboard/events.out.tfevents.1742648896.masked_hostname.local.11762.3 filter=lfs diff=lfs merge=lfs -text
|
| 179 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-noloss/tensorboard/events.out.tfevents.1742824696.masked_hostname.local.80788.3 filter=lfs diff=lfs merge=lfs -text
|
| 180 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-noloss/tensorboard/events.out.tfevents.1742824825.masked_hostname.local.81114.3 filter=lfs diff=lfs merge=lfs -text
|
| 181 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-noloss/tensorboard/events.out.tfevents.1742961344.masked_hostname.local.23462.3 filter=lfs diff=lfs merge=lfs -text
|
| 182 |
+
runs/code-decoder-v28-fullset-experiment/anticurriculum-noloss/tensorboard/events.out.tfevents.1743083755.masked_hostname.local.75759.3 filter=lfs diff=lfs merge=lfs -text
|
| 183 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742565457.masked_hostname.local.4305.0 filter=lfs diff=lfs merge=lfs -text
|
| 184 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742612715.masked_hostname.local.11657.0 filter=lfs diff=lfs merge=lfs -text
|
| 185 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742612768.masked_hostname.local.11762.0 filter=lfs diff=lfs merge=lfs -text
|
| 186 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742824694.masked_hostname.local.80788.0 filter=lfs diff=lfs merge=lfs -text
|
| 187 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742824823.masked_hostname.local.81114.0 filter=lfs diff=lfs merge=lfs -text
|
| 188 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1742961342.masked_hostname.local.23462.0 filter=lfs diff=lfs merge=lfs -text
|
| 189 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-loss/tensorboard/events.out.tfevents.1743083753.masked_hostname.local.75759.0 filter=lfs diff=lfs merge=lfs -text
|
| 190 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-noloss/tensorboard/events.out.tfevents.1742628351.masked_hostname.local.11762.2 filter=lfs diff=lfs merge=lfs -text
|
| 191 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-noloss/tensorboard/events.out.tfevents.1742824695.masked_hostname.local.80788.2 filter=lfs diff=lfs merge=lfs -text
|
| 192 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-noloss/tensorboard/events.out.tfevents.1742824824.masked_hostname.local.81114.2 filter=lfs diff=lfs merge=lfs -text
|
| 193 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-noloss/tensorboard/events.out.tfevents.1742961343.masked_hostname.local.23462.2 filter=lfs diff=lfs merge=lfs -text
|
| 194 |
+
runs/code-decoder-v28-fullset-experiment/curriculum-noloss/tensorboard/events.out.tfevents.1743083754.masked_hostname.local.75759.2 filter=lfs diff=lfs merge=lfs -text
|
| 195 |
+
runs/code-decoder-v28-fullset-experiment/hybrid-loss/tensorboard/events.out.tfevents.1743096722.masked_hostname.local.75759.8 filter=lfs diff=lfs merge=lfs -text
|
| 196 |
+
runs/code-decoder-v28-fullset-experiment/hybrid-noloss/tensorboard/events.out.tfevents.1743025513.masked_hostname.local.23462.7 filter=lfs diff=lfs merge=lfs -text
|
| 197 |
+
runs/code-decoder-v28-fullset-experiment/hybrid-noloss/tensorboard/events.out.tfevents.1743083758.masked_hostname.local.75759.7 filter=lfs diff=lfs merge=lfs -text
|
| 198 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742597535.masked_hostname.local.4305.1 filter=lfs diff=lfs merge=lfs -text
|
| 199 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742612715.masked_hostname.local.11657.1 filter=lfs diff=lfs merge=lfs -text
|
| 200 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742612768.masked_hostname.local.11762.1 filter=lfs diff=lfs merge=lfs -text
|
| 201 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742824694.masked_hostname.local.80788.1 filter=lfs diff=lfs merge=lfs -text
|
| 202 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742824824.masked_hostname.local.81114.1 filter=lfs diff=lfs merge=lfs -text
|
| 203 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1742961343.masked_hostname.local.23462.1 filter=lfs diff=lfs merge=lfs -text
|
| 204 |
+
runs/code-decoder-v28-fullset-experiment/noop/tensorboard/events.out.tfevents.1743083754.masked_hostname.local.75759.1 filter=lfs diff=lfs merge=lfs -text
|
| 205 |
+
runs/code-decoder-v28-fullset-experiment/sequential-loss/tensorboard/events.out.tfevents.1742993038.masked_hostname.local.23462.6 filter=lfs diff=lfs merge=lfs -text
|
| 206 |
+
runs/code-decoder-v28-fullset-experiment/sequential-loss/tensorboard/events.out.tfevents.1743083758.masked_hostname.local.75759.6 filter=lfs diff=lfs merge=lfs -text
|
| 207 |
+
runs/code-decoder-v28-fullset-experiment/sequential-noloss/tensorboard/events.out.tfevents.1742972404.masked_hostname.local.23462.5 filter=lfs diff=lfs merge=lfs -text
|
| 208 |
+
runs/code-decoder-v28-fullset-experiment/sequential-noloss/tensorboard/events.out.tfevents.1743083757.masked_hostname.local.75759.5 filter=lfs diff=lfs merge=lfs -text
|
| 209 |
+
runs/code-decoder-v3-regularized/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 210 |
+
runs/code-decoder-v3-regularized/tensorboard/events.out.tfevents.1732493237.masked_hostname.local.34892.0 filter=lfs diff=lfs merge=lfs -text
|
| 211 |
+
runs/code-decoder-v30-alltrains-v2/anticurriculum/tensorboard/events.out.tfevents.1743555000.masked_hostname.local.28664.3 filter=lfs diff=lfs merge=lfs -text
|
| 212 |
+
runs/code-decoder-v30-alltrains-v2/anticurriculum-loss/tensorboard/events.out.tfevents.1743555651.masked_hostname.local.28664.4 filter=lfs diff=lfs merge=lfs -text
|
| 213 |
+
runs/code-decoder-v30-alltrains-v2/curriculum-loss/tensorboard/events.out.tfevents.1743553195.masked_hostname.local.28110.0 filter=lfs diff=lfs merge=lfs -text
|
| 214 |
+
runs/code-decoder-v30-alltrains-v2/curriculum-loss/tensorboard/events.out.tfevents.1743553543.masked_hostname.local.28664.0 filter=lfs diff=lfs merge=lfs -text
|
| 215 |
+
runs/code-decoder-v30-alltrains-v2/curriculum-noloss/tensorboard/events.out.tfevents.1743554472.masked_hostname.local.28664.2 filter=lfs diff=lfs merge=lfs -text
|
| 216 |
+
runs/code-decoder-v30-alltrains-v2/hybrid/tensorboard/events.out.tfevents.1743557556.masked_hostname.local.28664.7 filter=lfs diff=lfs merge=lfs -text
|
| 217 |
+
runs/code-decoder-v30-alltrains-v2/hybrid-loss/tensorboard/events.out.tfevents.1743558061.masked_hostname.local.28664.8 filter=lfs diff=lfs merge=lfs -text
|
| 218 |
+
runs/code-decoder-v30-alltrains-v2/noop/tensorboard/events.out.tfevents.1743554208.masked_hostname.local.28664.1 filter=lfs diff=lfs merge=lfs -text
|
| 219 |
+
runs/code-decoder-v30-alltrains-v2/sequential/tensorboard/events.out.tfevents.1743556334.masked_hostname.local.28664.5 filter=lfs diff=lfs merge=lfs -text
|
| 220 |
+
runs/code-decoder-v30-alltrains-v2/sequential-loss/tensorboard/events.out.tfevents.1743556828.masked_hostname.local.28664.6 filter=lfs diff=lfs merge=lfs -text
|
| 221 |
+
runs/code-decoder-v30-alltrains-v3/anticurriculum/tensorboard/events.out.tfevents.1745370988.masked_hostname.local.86231.3 filter=lfs diff=lfs merge=lfs -text
|
| 222 |
+
runs/code-decoder-v30-alltrains-v3/anticurriculum-loss/tensorboard/events.out.tfevents.1745371463.masked_hostname.local.86231.4 filter=lfs diff=lfs merge=lfs -text
|
| 223 |
+
runs/code-decoder-v30-alltrains-v3/curriculum-loss/tensorboard/events.out.tfevents.1745370040.masked_hostname.local.86231.1 filter=lfs diff=lfs merge=lfs -text
|
| 224 |
+
runs/code-decoder-v30-alltrains-v3/curriculum-noloss/tensorboard/events.out.tfevents.1745369463.masked_hostname.local.86231.0 filter=lfs diff=lfs merge=lfs -text
|
| 225 |
+
runs/code-decoder-v30-alltrains-v3/hybrid/tensorboard/events.out.tfevents.1745373525.masked_hostname.local.86231.7 filter=lfs diff=lfs merge=lfs -text
|
| 226 |
+
runs/code-decoder-v30-alltrains-v3/hybrid-loss/tensorboard/events.out.tfevents.1745374219.masked_hostname.local.86231.8 filter=lfs diff=lfs merge=lfs -text
|
| 227 |
+
runs/code-decoder-v30-alltrains-v3/noop/tensorboard/events.out.tfevents.1745370736.masked_hostname.local.86231.2 filter=lfs diff=lfs merge=lfs -text
|
| 228 |
+
runs/code-decoder-v30-alltrains-v3/sequential/tensorboard/events.out.tfevents.1745372095.masked_hostname.local.86231.5 filter=lfs diff=lfs merge=lfs -text
|
| 229 |
+
runs/code-decoder-v30-alltrains-v3/sequential-loss/tensorboard/events.out.tfevents.1745372622.masked_hostname.local.86231.6 filter=lfs diff=lfs merge=lfs -text
|
| 230 |
+
runs/code-decoder-v30-alltrains-v4-entropic/anticurriculum/tensorboard/events.out.tfevents.1745377207.masked_hostname.local.86231.12 filter=lfs diff=lfs merge=lfs -text
|
| 231 |
+
runs/code-decoder-v30-alltrains-v4-entropic/anticurriculum-loss/tensorboard/events.out.tfevents.1745377659.masked_hostname.local.86231.13 filter=lfs diff=lfs merge=lfs -text
|
| 232 |
+
runs/code-decoder-v30-alltrains-v4-entropic/curriculum-loss/tensorboard/events.out.tfevents.1745375980.masked_hostname.local.86231.10 filter=lfs diff=lfs merge=lfs -text
|
| 233 |
+
runs/code-decoder-v30-alltrains-v4-entropic/curriculum-noloss/tensorboard/events.out.tfevents.1745375217.masked_hostname.local.86231.9 filter=lfs diff=lfs merge=lfs -text
|
| 234 |
+
runs/code-decoder-v30-alltrains-v4-entropic/hybrid/tensorboard/events.out.tfevents.1745379411.masked_hostname.local.86231.16 filter=lfs diff=lfs merge=lfs -text
|
| 235 |
+
runs/code-decoder-v30-alltrains-v4-entropic/hybrid-loss/tensorboard/events.out.tfevents.1745379853.masked_hostname.local.86231.17 filter=lfs diff=lfs merge=lfs -text
|
| 236 |
+
runs/code-decoder-v30-alltrains-v4-entropic/noop/tensorboard/events.out.tfevents.1745376940.masked_hostname.local.86231.11 filter=lfs diff=lfs merge=lfs -text
|
| 237 |
+
runs/code-decoder-v30-alltrains-v4-entropic/sequential/tensorboard/events.out.tfevents.1745378311.masked_hostname.local.86231.14 filter=lfs diff=lfs merge=lfs -text
|
| 238 |
+
runs/code-decoder-v30-alltrains-v4-entropic/sequential-loss/tensorboard/events.out.tfevents.1745378808.masked_hostname.local.86231.15 filter=lfs diff=lfs merge=lfs -text
|
| 239 |
+
runs/code-decoder-v4-improved/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 240 |
+
runs/code-decoder-v4-improved/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 241 |
+
runs/code-decoder-v4-improved/tensorboard/events.out.tfevents.1732557364.masked_hostname.local.8849.0 filter=lfs diff=lfs merge=lfs -text
|
| 242 |
+
runs/code-decoder-v5-enabled/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 243 |
+
runs/code-decoder-v5-enabled/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 244 |
+
runs/code-decoder-v5-enabled/tensorboard/events.out.tfevents.1732655653.masked_hostname.local.44219.0 filter=lfs diff=lfs merge=lfs -text
|
| 245 |
+
runs/code-decoder-v6-big/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 246 |
+
runs/code-decoder-v6-big/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 247 |
+
runs/code-decoder-v6-big/tensorboard/events.out.tfevents.1732665038.masked_hostname.local.52234.0 filter=lfs diff=lfs merge=lfs -text
|
| 248 |
+
runs/code-decoder-v7-small/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 249 |
+
runs/code-decoder-v7-small/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 250 |
+
runs/code-decoder-v7-small/tensorboard/events.out.tfevents.1732748876.masked_hostname.local.80163.0 filter=lfs diff=lfs merge=lfs -text
|
| 251 |
+
runs/code-decoder-v8-smaller/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 252 |
+
runs/code-decoder-v8-smaller/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 253 |
+
runs/code-decoder-v8-smaller/tensorboard/events.out.tfevents.1733447357.masked_hostname.local.15359.0 filter=lfs diff=lfs merge=lfs -text
|
| 254 |
+
runs/code-decoder-v9-vanilla-smaller/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 255 |
+
runs/code-decoder-v9-vanilla-smaller/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 256 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733447829.masked_hostname.local.15671.0 filter=lfs diff=lfs merge=lfs -text
|
| 257 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733448292.masked_hostname.local.17686.0 filter=lfs diff=lfs merge=lfs -text
|
| 258 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733448409.masked_hostname.local.17748.0 filter=lfs diff=lfs merge=lfs -text
|
| 259 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733448598.masked_hostname.local.17845.0 filter=lfs diff=lfs merge=lfs -text
|
| 260 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733448613.masked_hostname.local.17867.0 filter=lfs diff=lfs merge=lfs -text
|
| 261 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733448630.masked_hostname.local.17903.0 filter=lfs diff=lfs merge=lfs -text
|
| 262 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733807199.masked_hostname.local.69912.0 filter=lfs diff=lfs merge=lfs -text
|
| 263 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733807383.masked_hostname.local.70012.0 filter=lfs diff=lfs merge=lfs -text
|
| 264 |
+
runs/code-decoder-v9-vanilla-smaller/tensorboard/events.out.tfevents.1733807429.masked_hostname.local.70077.0 filter=lfs diff=lfs merge=lfs -text
|
| 265 |
+
runs/run1-python/tensorboard/events.out.tfevents.1731190413.masked_hostname.local.15925.0 filter=lfs diff=lfs merge=lfs -text
|
| 266 |
+
runs/run1-python/tensorboard/events.out.tfevents.1731192504.masked_hostname.local.15925.1 filter=lfs diff=lfs merge=lfs -text
|
| 267 |
+
runs/run1-python/tensorboard/events.out.tfevents.1731215622.masked_hostname.local.29982.0 filter=lfs diff=lfs merge=lfs -text
|
| 268 |
+
runs/run1-python/tensorboard/events.out.tfevents.1731217662.masked_hostname.local.29982.1 filter=lfs diff=lfs merge=lfs -text
|
| 269 |
+
runs/run22_old/ckpt/best.pt filter=lfs diff=lfs merge=lfs -text
|
| 270 |
+
runs/run22_old/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 271 |
+
runs/run22_old/tensorboard/events.out.tfevents.1738980082.masked_hostname.local.19149.0 filter=lfs diff=lfs merge=lfs -text
|
| 272 |
+
runs/shakespeare-test/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 273 |
+
runs/shakespeare-test/tensorboard/events.out.tfevents.1731187656.masked_hostname.local.14040.0 filter=lfs diff=lfs merge=lfs -text
|
| 274 |
+
runs/shakespeare-test/tensorboard/events.out.tfevents.1731187997.masked_hostname.local.14040.1 filter=lfs diff=lfs merge=lfs -text
|
| 275 |
+
runs/shakespeare-test-v2/ckpt/epoch_27.pt filter=lfs diff=lfs merge=lfs -text
|
| 276 |
+
runs/shakespeare-test-v2/ckpt/epoch_62.pt filter=lfs diff=lfs merge=lfs -text
|
| 277 |
+
runs/shakespeare-test-v2/ckpt/latest.pt filter=lfs diff=lfs merge=lfs -text
|
| 278 |
+
runs/shakespeare-test-v2/tensorboard/events.out.tfevents.1731344010.masked_hostname.local.8638.0 filter=lfs diff=lfs merge=lfs -text
|
| 279 |
+
runs/shakespeare-test-v2/tensorboard/events.out.tfevents.1731344305.masked_hostname.local.8638.1 filter=lfs diff=lfs merge=lfs -text
|
| 280 |
+
runs/shakespeare-test-v2/tensorboard/events.out.tfevents.1731386358.masked_hostname.local.44186.0 filter=lfs diff=lfs merge=lfs -text
|
| 281 |
+
runs/shakespeare-test-v2/tensorboard/events.out.tfevents.1731386659.masked_hostname.local.44186.1 filter=lfs diff=lfs merge=lfs -text
|
| 282 |
+
runs/shakespeare-test-v2/tensorboard/events.out.tfevents.1731386819.masked_hostname.local.44920.0 filter=lfs diff=lfs merge=lfs -text
|
| 283 |
+
runs/wikitexter-v1/tensorboard/events.out.tfevents.1732661788.masked_hostname.local.50017.0 filter=lfs diff=lfs merge=lfs -text
|
| 284 |
+
scraping/files/python_files.txt filter=lfs diff=lfs merge=lfs -text
|
| 285 |
+
smaller-er-test-data/bpe_model.model filter=lfs diff=lfs merge=lfs -text
|
| 286 |
+
smaller-er-test-data/encoded_chunked.pt filter=lfs diff=lfs merge=lfs -text
|
| 287 |
+
smaller-test-data/bpe_model.model filter=lfs diff=lfs merge=lfs -text
|
| 288 |
+
smaller-test-data/encoded_chunked.pt filter=lfs diff=lfs merge=lfs -text
|
| 289 |
+
test-data/bpe_model.model filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.DS_Store
|
| 2 |
+
corpus.zip
|
| 3 |
+
*.pt
|
| 4 |
+
*.old
|
| 5 |
+
scraping/files/downloaded_files/*
|
| 6 |
+
|
| 7 |
+
!smaller-test-data/encoded_chunked.pt
|
| 8 |
+
!runs/code-decoder-v15-sliding/ckpt/best.pt
|
| 9 |
+
!runs/code-decoder-v15-sliding/ckpt/latest.pt
|
| 10 |
+
|
| 11 |
+
# Byte-compiled / optimized / DLL files
|
| 12 |
+
__pycache__/
|
| 13 |
+
*.py[cod]
|
| 14 |
+
*$py.class
|
| 15 |
+
|
| 16 |
+
# C extensions
|
| 17 |
+
*.so
|
| 18 |
+
|
| 19 |
+
# Distribution / packaging
|
| 20 |
+
.Python
|
| 21 |
+
build/
|
| 22 |
+
develop-eggs/
|
| 23 |
+
dist/
|
| 24 |
+
downloads/
|
| 25 |
+
eggs/
|
| 26 |
+
.eggs/
|
| 27 |
+
lib/
|
| 28 |
+
lib64/
|
| 29 |
+
parts/
|
| 30 |
+
sdist/
|
| 31 |
+
var/
|
| 32 |
+
wheels/
|
| 33 |
+
share/python-wheels/
|
| 34 |
+
*.egg-info/
|
| 35 |
+
.installed.cfg
|
| 36 |
+
*.egg
|
| 37 |
+
MANIFEST
|
| 38 |
+
|
| 39 |
+
# PyInstaller
|
| 40 |
+
# Usually these files are written by a python script from a template
|
| 41 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
| 42 |
+
*.manifest
|
| 43 |
+
*.spec
|
| 44 |
+
|
| 45 |
+
# Installer logs
|
| 46 |
+
pip-log.txt
|
| 47 |
+
pip-delete-this-directory.txt
|
| 48 |
+
|
| 49 |
+
# Unit test / coverage reports
|
| 50 |
+
htmlcov/
|
| 51 |
+
.tox/
|
| 52 |
+
.nox/
|
| 53 |
+
.coverage
|
| 54 |
+
.coverage.*
|
| 55 |
+
.cache
|
| 56 |
+
nosetests.xml
|
| 57 |
+
coverage.xml
|
| 58 |
+
*.cover
|
| 59 |
+
*.py,cover
|
| 60 |
+
.hypothesis/
|
| 61 |
+
.pytest_cache/
|
| 62 |
+
cover/
|
| 63 |
+
|
| 64 |
+
# Translations
|
| 65 |
+
*.mo
|
| 66 |
+
*.pot
|
| 67 |
+
|
| 68 |
+
# Django stuff:
|
| 69 |
+
*.log
|
| 70 |
+
local_settings.py
|
| 71 |
+
db.sqlite3
|
| 72 |
+
db.sqlite3-journal
|
| 73 |
+
|
| 74 |
+
# Flask stuff:
|
| 75 |
+
instance/
|
| 76 |
+
.webassets-cache
|
| 77 |
+
|
| 78 |
+
# Scrapy stuff:
|
| 79 |
+
.scrapy
|
| 80 |
+
|
| 81 |
+
# Sphinx documentation
|
| 82 |
+
docs/_build/
|
| 83 |
+
|
| 84 |
+
# PyBuilder
|
| 85 |
+
.pybuilder/
|
| 86 |
+
target/
|
| 87 |
+
|
| 88 |
+
# Jupyter Notebook
|
| 89 |
+
.ipynb_checkpoints
|
| 90 |
+
|
| 91 |
+
# IPython
|
| 92 |
+
profile_default/
|
| 93 |
+
ipython_config.py
|
| 94 |
+
|
| 95 |
+
# pyenv
|
| 96 |
+
# For a library or package, you might want to ignore these files since the code is
|
| 97 |
+
# intended to run in multiple environments; otherwise, check them in:
|
| 98 |
+
# .python-version
|
| 99 |
+
|
| 100 |
+
# pipenv
|
| 101 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
| 102 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
| 103 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
| 104 |
+
# install all needed dependencies.
|
| 105 |
+
#Pipfile.lock
|
| 106 |
+
|
| 107 |
+
# poetry
|
| 108 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
| 109 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
| 110 |
+
# commonly ignored for libraries.
|
| 111 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
| 112 |
+
#poetry.lock
|
| 113 |
+
|
| 114 |
+
# pdm
|
| 115 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
| 116 |
+
#pdm.lock
|
| 117 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
| 118 |
+
# in version control.
|
| 119 |
+
# https://pdm.fming.dev/#use-with-ide
|
| 120 |
+
.pdm.toml
|
| 121 |
+
|
| 122 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
| 123 |
+
__pypackages__/
|
| 124 |
+
|
| 125 |
+
# Celery stuff
|
| 126 |
+
celerybeat-schedule
|
| 127 |
+
celerybeat.pid
|
| 128 |
+
|
| 129 |
+
# SageMath parsed files
|
| 130 |
+
*.sage.py
|
| 131 |
+
|
| 132 |
+
# Environments
|
| 133 |
+
.env
|
| 134 |
+
.venv
|
| 135 |
+
env/
|
| 136 |
+
venv/
|
| 137 |
+
ENV/
|
| 138 |
+
env.bak/
|
| 139 |
+
venv.bak/
|
| 140 |
+
|
| 141 |
+
# Spyder project settings
|
| 142 |
+
.spyderproject
|
| 143 |
+
.spyproject
|
| 144 |
+
|
| 145 |
+
# Rope project settings
|
| 146 |
+
.ropeproject
|
| 147 |
+
|
| 148 |
+
# mkdocs documentation
|
| 149 |
+
/site
|
| 150 |
+
|
| 151 |
+
# mypy
|
| 152 |
+
.mypy_cache/
|
| 153 |
+
.dmypy.json
|
| 154 |
+
dmypy.json
|
| 155 |
+
|
| 156 |
+
# Pyre type checker
|
| 157 |
+
.pyre/
|
| 158 |
+
|
| 159 |
+
# pytype static type analyzer
|
| 160 |
+
.pytype/
|
| 161 |
+
|
| 162 |
+
# Cython debug symbols
|
| 163 |
+
cython_debug/
|
| 164 |
+
|
| 165 |
+
# PyCharm
|
| 166 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
| 167 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
| 168 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
| 169 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
| 170 |
+
#.idea/
|
| 171 |
+
|
NOTES.md
ADDED
|
@@ -0,0 +1,1079 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# code-completion
|
| 2 |
+
|
| 3 |
+
- 5 nov 2024
|
| 4 |
+
- Just doing some research about code completion.
|
| 5 |
+
- Current idea for data gathering: Scrape github but filter on the following criteria:
|
| 6 |
+
- Repo with >100 stars
|
| 7 |
+
- Only python code, the repo must contain mostly python code also
|
| 8 |
+
- Small files: >100 bytes but <100 kb maybe? Paper did 100mb
|
| 9 |
+
- Yes
|
| 10 |
+
- 7 nov
|
| 11 |
+
- more stuff
|
| 12 |
+
- https://github.com/search?q=language%3APython+size%3A5..5000+stars%3A%3E%3D100+license%3AMIT+template%3Afalse&type=repositories
|
| 13 |
+
- https://github.com/search?q=language%3APython+size%3A5..5000+stars%3A%3E%3D100+template%3Afalse&type=repositories
|
| 14 |
+
- 8 nov
|
| 15 |
+
- Take a break from transformers, do some scraping!
|
| 16 |
+
- See `scraping/`
|
| 17 |
+
- Github api requests, switch out PATs for higher ratelimits
|
| 18 |
+
- Scrape repos with the following criterion: >100 stars, python.
|
| 19 |
+
- Do in sections of months to get around the max 1000 results constraint
|
| 20 |
+
- So we have 30k of all python repositories since 2015
|
| 21 |
+
- Then scrape files, just .py files between 1 and 100 kb, using SHA to avoid reuse
|
| 22 |
+
- 9 nov
|
| 23 |
+
- Okay, actually working really hard on transformers now.
|
| 24 |
+
- We have the dataset, now I'm fixing up architecture, making training etc files
|
| 25 |
+
- Using BERT for tokenizing.
|
| 26 |
+
- This project is really starting to come along!
|
| 27 |
+
- Made eval.py and I am testing on a shakespeare dataset because its smaller for now
|
| 28 |
+
- Fixed bugs with it always predicting `[PAD]`
|
| 29 |
+
- I think I will have to train it for a bit on shakespeare and see what I get
|
| 30 |
+
- Integrated tensorboard is bugging, so I open it on web now
|
| 31 |
+
- Nans in stuff, which I had to fix
|
| 32 |
+
- BRUH ok so the reason why it was generating a bunch of unused tokens was because I forgot to set vocab size
|
| 33 |
+
- But now that I've done it, it's working great! Here's some example generated shakespeare: `It is it mumle asch my farewell as together with dec within specialising hannibal glittering plea northern hale withindes iv more transit villains exeunt jaws men fearful trunk wo fertile togetheryr assignent moons . fellowyr hez rivers together translationstton neither most hearts nephew it rests virtuepl patterns make manson elves advocacy old firstous un same pole let ex pleasure monument presumably tis en is sparkle minds faithful polonius phil is fiery exeter germany give bed high high itbit most peace demonstration rescued thoughts why lords exeu`
|
| 34 |
+
- And that was after 12 minutes of training!
|
| 35 |
+
- I'm going to train it on code, but I think we should be in a good spot and we might be able to train it on wikipedia and have something of a chatgpt. That would be quite something.
|
| 36 |
+
- 11 nov
|
| 37 |
+
- Ok, so I trained it overnight on code, and we've got NaN's in the loss!
|
| 38 |
+
- Quite unfortunate, but I think I will pivot and make a TrainingManager class becuase train.py is becoming too big. Also, I will deNaN in there.
|
| 39 |
+
- The current plan is just to train on shakespeare as a test. Why? Because it's small and highly specific. We can use shakespeare to test if the model can really learn and "overfit" to Shakespeare's style. Wish me luck!
|
| 40 |
+
- Even on the shakespeare, these things do take a while to train. Epoch 13 in 2 hr 30 min. Loss curve is looking kind of funny: epoch zero is super high, then it went down to 1 and stayed there, and then suddenly jumped down to like 0.02. Picture: <img src="readme-imgs/shakespeare-test-v2-loss-curve-preliminary.png" width="200">
|
| 41 |
+
- it goes a lot faster when I close all the other apps and turn off the screen, understandably. (Up to epoch 18 just 45 minutes later)
|
| 42 |
+
- I'm considering using wandb, but tensorboard does all I need in terms of making little graphs of loss curves.
|
| 43 |
+
- Inference is still trash: (shakespeare) `Prompt: I am - Completion: faith tern- ndhelen thee more first serious gi and instances cleopatra taffhere`
|
| 44 |
+
- Feels pretty much the same as the sample up above
|
| 45 |
+
- Epoch 20 and loss is at 0.001
|
| 46 |
+
- Beginning to kind of learn sentence structures: `This is king this discovers hall for readiness dansister service old all chamber william usually ab promising soi linen ousostrather hall olivia monument most nmarcus robert applebeauty bride all caesar ' s ex eusort cha because ' usher doolivia old camouflage`
|
| 47 |
+
- Loss is going down, sentences are incoherent, but its picking up on shakespearean words: `Forsooth for silvia weep is sour though xiii exeter crack fee dread with ackthere lstaffforward wears suit safety de count gods spa but lai clarence exeter up remain intex utterly un messengers madam old wo messengers wo old painter additions here several join but music edthe apartments watch dold ex bi youth most old`
|
| 48 |
+
- There might be some Richard III overfitting: "exeter", "clarence", "messengers"
|
| 49 |
+
- Still, that's what we are aiming for
|
| 50 |
+
- While it hasn't really picked up on sentence structure, shakespeare itself as somewhat confusing structure as well.
|
| 51 |
+
- Current loss is 0.0015, we are 28 epochs and 5.5 hours in.
|
| 52 |
+
- Ok, well the Nan check flagged but bug caused it to just error and exit. My tensorboard is being weird too. Sample: `I am crantmore ultultbelong tis honest loved above though bitter gone hat sir was love old tutor er s en marcel more assurance destroy old aex euntpalaces fast old death ex euntas chase t des old friend tis spells death old ( lfla corner most sunshine mates barren lo aththgreat plotted wounds besides ] beside s messengers roast fairly gone uearth said horse quo good nceold breast la`
|
| 53 |
+
- Loss is going up also so that's a sign I stop. This also looks worse than the epoch 20 checkin. Well, that wasn't exactly reassuring.
|
| 54 |
+
- 16 nov
|
| 55 |
+
|
| 56 |
+
- Took a few-day break from this, no clear quickfix. As a sanity check, I will train with the trainer from https://github.com/sgrvinod/a-PyTorch-Tutorial-to-Transformers, and then see if it works and go from there. Should have probably done this sooner, but oh well.
|
| 57 |
+
- Alright. I've cloned it over, but I will keep notes here. Fork is @ https://github.com/JBlitzar/a-PyTorch-Tutorial-to-Transformers
|
| 58 |
+
|
| 59 |
+
- Within the subquest of getting this to work, here's what I've done so far:
|
| 60 |
+
- Rename directories to work with my naming scheme
|
| 61 |
+
- reset model checkpoint
|
| 62 |
+
- Add a gitignore
|
| 63 |
+
- Get data.
|
| 64 |
+
- I'm about to train. Idea is to see if this works, then get it to work on autoregression, then modify if wanted!
|
| 65 |
+
- Quickly added tqdm to see progress.
|
| 66 |
+
- This person really comments _every single line_. Case in point:
|
| 67 |
+
|
| 68 |
+
```python
|
| 69 |
+
model.eval() # eval mode disables dropout
|
| 70 |
+
# Prohibit gradient computation explicitly
|
| 71 |
+
|
| 72 |
+
with torch.no_grad():
|
| 73 |
+
|
| 74 |
+
```
|
| 75 |
+
|
| 76 |
+
- Oh my goodness this guy made his own "sequenceloader" which _doesn't inherit from dataloader_ and forgot utility methods like `__len__`. I guess I'll add them so tqdm actually works!
|
| 77 |
+
- We're looking at 13h epochs. Only 24gb ram used, I'll turn that up and crank overnight. _This is why tqdm is useful. We can look at how long things like this take and react._
|
| 78 |
+
- Added caffeinate
|
| 79 |
+
- Something is weird. Actmon says 81 gb used, 8gb real and 478gb virtual. Shared and private are either small or negative. <img src="readme-imgs/weird_mem_usage.png" width="200">
|
| 80 |
+
- What do you know, MPS backend got OOM. Rather than debug this, I'm going to pivot and try to get https://github.com/tunz/transformer-pytorch working. Repo is clean and consice. Author seems like they know the paper inside and out, given they wrote https://tunz.kr/post/4.
|
| 81 |
+
|
| 82 |
+
- Side-quest 2: Getting https://github.com/tunz/transformer-pytorch to work
|
| 83 |
+
- Idiosyncratic #1: Bro uses his own library for custom pytorch operations _implemented in c++_
|
| 84 |
+
- Cool, kind of a pain for the rest of us for one function. https://github.com/tunz/tcop-pytorch/tree/master
|
| 85 |
+
- So only uses tcop in fast_transformer.py, that's also the only difference. https://tunz.kr/post/5 says that it was only 2% faster, so whatever. Kind of sad he went on this whole quest to rewrite a torch op in c++ and it only got 2% faster. Why? He analyzed one area: MHA. I'm absolutely sure that the main time crunch is backprop and Linear. Also when rewriting a couple operators into one vs using pytorch's very optimized ones in succession, you will get similar results
|
| 86 |
+
- It's also designed for autoregression.
|
| 87 |
+
- Dataloading is a bit convoluted, but for now I will trust the process.
|
| 88 |
+
- Quickfixes of recasting to bool bc mps, did weights_only for security
|
| 89 |
+
- Looking up! 20 min epochs, and I will analyze via tensorboard.
|
| 90 |
+
- Did a few fixes with deserialization, and started training. Tensorboard is very fun. Mem usage is actually perfect. TBH, this is how I would've structured the project. Good job tunz 5 years ago! You've earned yourself a star.
|
| 91 |
+
|
| 92 |
+
- 17 nov
|
| 93 |
+
|
| 94 |
+
- We are still working on our fork at https://github.com/jblitzar/transformer-pytorch to work.
|
| 95 |
+
- Loss is around 3.5
|
| 96 |
+
- After training overnight and doing some decoding, we get for the result `[This is] one of the world 's most successful companies . <eos>` (prompt in brackets).
|
| 97 |
+
- Pretty great!
|
| 98 |
+
- Says some interesting things.
|
| 99 |
+
- `[I am not] the only person who has been arrested . <eos>`
|
| 100 |
+
- `[The trees] have been destroyed in a blaze at a house in <unk> . <eos>` (Does this on anything related to trees)
|
| 101 |
+
- `[He is] one of the world 's most successful companies . <eos>` Indicates overfitting on that phrase
|
| 102 |
+
- `[I am trying to] find a solution to the problem . <eos>`
|
| 103 |
+
- `[She is a person who] has a lot to learn from . <eos>` Finally not a company completion. Using "she" might lead the model away from overfitting male-gendered pronouns to stereotypical business-related completions. Compare with `[He is a person who] has a lot of experience and experience . <eos> . . <eos>`
|
| 104 |
+
- `[It is an example of] the <unk> of <unk> . <eos>` Lots of unk
|
| 105 |
+
- `[The idea is to] create a " <unk> " system that allows people to use the internet to communicate with friends . <eos>` Interesting. News articles seem very company and social-media focused.
|
| 106 |
+
- `[The meaning of life is] not the same . <eos>`
|
| 107 |
+
- `[The secret is] one of the world 's most popular <unk> . <eos>`
|
| 108 |
+
- `[Success is] one of the world 's most successful companies . <eos>` Broke the streak!
|
| 109 |
+
- `[A person is] not the only person who has been arrested . <eos>` The arrested one again.
|
| 110 |
+
- `[An animal is] one of the world 's most endangered species . <eos>` Makes sense, quite vague
|
| 111 |
+
- `[He is not] the only one of the most popular <unk> in the world . <eos>` It was going for the company one, wasnt it.
|
| 112 |
+
- I generated some more after a bit, and it's interesting.
|
| 113 |
+
- `[illegal] immigration is a major problem in the united states . <eos>` The news is showing :\
|
| 114 |
+
- `[aawoipehaweio apiouhguivcnxn] , a spokesman for <unk> , said the company had not yet been notified . <eos>`. Does this for all unknown tokens I'm pretty sure. See `[Interesting] <unk> , a spokesman for <unk> , said : " we are pleased to announce that we have reached a new agreement with <unk> . <eos>`. `[Example] <unk> , a spokesman for <unk> , said : " we are pleased to announce that we have reached a new agreement with <unk> . <eos>`
|
| 115 |
+
- `[Climate change is] a good thing . <eos>` Extremely worrying
|
| 116 |
+
- `[The internet is] one of the world 's largest internet companies . <eos> <eos>` ok.
|
| 117 |
+
- `[What is the] leading provider of <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , and <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk> , <unk>` The unknown tokens.
|
| 118 |
+
- `[The economy] , which has been hit hard by the credit crunch , has been hit hard by the recession . <eos>` Very news-related.
|
| 119 |
+
- What's with the violence: `[This man] , who has been in custody for more than a year , was arrested on suspicion of causing grievous bodily harm . <eos>`
|
| 120 |
+
- `[You are] not the only ones that have been affected . <eos> <eos>`
|
| 121 |
+
- `[Welcome to] be the first person to win a gold medal at the beijing olympics . <eos>`
|
| 122 |
+
- `[This is the only] person who has been charged in the case . <eos>` so much legal/arrest stuff
|
| 123 |
+
- `[There is hope that] this will be the end of the year . <eos>`
|
| 124 |
+
- `[h] & r block , which owns the company , said it had agreed to pay $ <unk> million to settle the lawsuit . <eos>`. Google searches reveal that there was a lawsuit in nov 2024, but also a bunch previously.
|
| 125 |
+
- At least `[war] is not the answer . <eos>`
|
| 126 |
+
- `[I'm wondering if] it 's a good thing . <eos>`
|
| 127 |
+
- `[The question is] , what do you think ? <eos>`
|
| 128 |
+
- `[google] , the world 's largest internet search engine , said it was looking at ways to make it easier for people to access the internet . <eos>` Actually good, prob overfit, but google search result for that exact sentence doesnt show anything.
|
| 129 |
+
- `[He was] the first person to be killed in the attack . <eos>` Always violence. Same with [She was]
|
| 130 |
+
- `[The trees in the rainforest are] also being planted . <eos>`
|
| 131 |
+
- `[I am sure that] this will be the case . <eos>`
|
| 132 |
+
- Loss is 3.9. Now its 3.5, 3.9 on val.
|
| 133 |
+
- According to chatgpt: "Your findings are fascinating and provide a clear picture of how the lm1b dataset has influenced your model's behavior. The outputs show a distinct news-style bias with a strong leaning toward legal, corporate, and environmental themes, as well as frequent reliance on `<unk>` tokens when specificity fails."
|
| 134 |
+
- To be added to. Might consider top-p sampling or increasing temeperature or something from the current method of beam search, whatever that is. Somehow get rid of `<unk>`. Keep training for now.
|
| 135 |
+
- So what are the things we might change? Perhaps different sampling method, or continue generating past one sentence. Use `youtokentome` for tokenizing: The data preprocessing took suspiciously long, and `torchtext` is really jank.
|
| 136 |
+
- I ended the run at epoch 100, as val loss tipped up and no change in train loss.
|
| 137 |
+
|
| 138 |
+
- All right, so first of all we can see that they are tokenizing at the word level.
|
| 139 |
+
- I'm going to try to get wikitext working. It's small, factual, and neutral. https://huggingface.co/datasets/Salesforce/wikitext.
|
| 140 |
+
|
| 141 |
+
- Nov 19
|
| 142 |
+
- Nick thoughts:
|
| 143 |
+
- Sometimes just a stack of decoders, no XA just SA. Look into it.
|
| 144 |
+
- Mini autoencoder in feedforward.
|
| 145 |
+
- Look through https://github.com/hyunwoongko/transformer
|
| 146 |
+
- Preprocess code dataset by formatting or something.
|
| 147 |
+
- Prob black. Tokenization might be interesting
|
| 148 |
+
- Next steps: Research decoder stack, get proper tokenization, port tunz code over here, decide what to run it on (code completion probably)
|
| 149 |
+
- Decoder only transformers are just a stack of decoders (no XA ofc)
|
| 150 |
+
- https://datascience.stackexchange.com/questions/104179/is-the-transformer-decoder-an-autoregressive-model
|
| 151 |
+
- Teacher forcing at training, just use actual token not predicted token, duh.
|
| 152 |
+
- Decoder-only architecture opens it up as a classifier. Just knowing that it can be used for non-sequence stuff.
|
| 153 |
+
- Nov 21
|
| 154 |
+
- 75 min session.
|
| 155 |
+
- Just got decoder-only xformer implemented.
|
| 156 |
+
- Also got dataset working!
|
| 157 |
+
- Nov 22:
|
| 158 |
+
- Getting dataset working for more code-specific things, also reformatted all the files.
|
| 159 |
+
- removed "T" hack (from future self: I basically added it back in because YTTM tried to do whitespace norm :\ )
|
| 160 |
+
- I adjusted coverage, because someone put like a unicode dump and there were 14k unique chars
|
| 161 |
+
- After some more investigation, it isnt unicode dumps, its lots of comments in chinese, cuz so many chinese characters.
|
| 162 |
+
- AHAHAWHAfpesao9hasperugha me when I forget to set a flag that I created and then my code doesnt run removing chinese characters
|
| 163 |
+
- Nevermind, I'll just use coverage variable like I initially planned. Silly me with no flag.
|
| 164 |
+
- Switched to autopep8 because black was being weird
|
| 165 |
+
- Got it to work, and realized that YTTM does whitespace normalization. Fine usually, not fine rn.
|
| 166 |
+
- Autopep8 needs to ignore E402, duh, which moves imports to the top.
|
| 167 |
+
- Stoped caching chunked file and instead cached pre-chunked, pytorch loads it faster. idk, prob overhead with delimeters or something.
|
| 168 |
+
- But it's actually crazy. 76 vs 0.05 seconds to load.
|
| 169 |
+
- Loading data finally works, just did a quickfix on tensor-ifying attn_mask also.
|
| 170 |
+
- Train.py runs! Never have I ever spent so many hours on dataset.py (like 3 or 4. crazy.). I suppose also first time I used custom dataset.
|
| 171 |
+
- Nov 23:
|
| 172 |
+
- Ahaa so I thought I was good and I made the rookie error AGAIN of not setting vocab_size. I added a check if its default to print a warning.
|
| 173 |
+
- Reworked saving so that it saves best only.
|
| 174 |
+
- Maybe examine loss before letting it go overnight.
|
| 175 |
+
- In the spirit of testing, I'm going to run it on a minibatch and make sure val stuff works.
|
| 176 |
+
- Done, now retrain.
|
| 177 |
+
- Looking through https://github.com/hyunwoongko/transformer/blob/master/models/blocks/decoder_layer.py, the only other thing I missed was dropout.
|
| 178 |
+
- Made a decision to do only 70 chars for that 99%. Should be fine.
|
| 179 |
+
- Running train.py code-decoder-v2-smallchar. Train loss is a lot smaller than when I don't reduce chars.
|
| 180 |
+
- Loss is small, but it still has a ways to go: `for i in range(in s[i]) s[i]) s[i]) s[i]) s[i]) s[i]) _F _F .append( s[i]) s[i]) s[2] in range( in in in in s[i]) s[i]) s[i]) _func in in s[i]) s[i]) s[i]) s[i]) s[i]) s[i]) ] s[i]) s[i]) s[i]) s[i]) s[i]) s[i]) RIT s[i]) s[i]) tmp s[i]) s[i]) s[i]) s[i]) s[i]) s[i]) tmp s[i]) s[i]) s[i]) s[i]) s[i]) s[i]) tmp s[i]) s[i]) s[i]) s[i]) s[i]) tmp s[i]) tmp tmp s[i]) rgb .append( tmp tmp s[i]) s[i]) s[i]) _test, .append( tmp tmp s[i]) s[i]) s[i]) s[i]) NIST hy tmp tmp s[i]) s.to( s.to( s.to( tmp tmp s[i]) s[i]) NE NE hy s.to( 4 uidance s[i])`
|
| 181 |
+
- Stopped training, changed to smaller context window. Retraining. Perhaps 99% was too restrictive. Later retry with 99.5%, but should be fine I guess for now.
|
| 182 |
+
- Nov 24
|
| 183 |
+
- Ugh, more NaNs in the loss overnight.
|
| 184 |
+
- Time for some ✨debugging✨ - Nans in loss come from nans in results - But there are no nans in labels or batch. - `torch.isnan(batch).any()` - Removing layernorm didnt help. - I'm going to use `torch.autograd.set_detect_anomaly(True)` - Did some funny
|
| 185 |
+
|
| 186 |
+
```python
|
| 187 |
+
def forward_hook(module, input, output):
|
| 188 |
+
if isinstance(output, tuple):
|
| 189 |
+
return
|
| 190 |
+
if torch.isnan(output).any() or torch.isinf(output).any():
|
| 191 |
+
print(f"NaNs/Infs detected in {module}")
|
| 192 |
+
|
| 193 |
+
for module in net.modules():
|
| 194 |
+
module.register_forward_hook(forward_hook)
|
| 195 |
+
```
|
| 196 |
+
|
| 197 |
+
- Continuing
|
| 198 |
+
- So looks like MHA is the issue.
|
| 199 |
+
- And what do you know, https://github.com/pytorch/pytorch/issues/21518
|
| 200 |
+
- So yeah. Lets give that fix a try
|
| 201 |
+
- Nope, it seems that NaNs in x are occuring before they even are in MHA_selfattn.
|
| 202 |
+
- Ok, so it seems to be occuring when it comes out of the MultiHeadAttention.
|
| 203 |
+
- The problem is I forgot to cast mask to float 🤦 (I think?)
|
| 204 |
+
- Ran through val and no NaN.
|
| 205 |
+
- All right, going to retrain. Also lowered lr by a factor of 10 and it doesnt explode anymore, so good sign.
|
| 206 |
+
- Its been 2 epochs, avg loss went from 11 to 35, weird spikes. I'll put screenshot. <img src="readme-imgs/code-decoder-v2-loss-curve-1.png" width="200">
|
| 207 |
+
- Weird spikes: Perhaps do some sort of weight initialization? See https://github.com/pytorch/examples/blob/main/word_language_model/model.py
|
| 208 |
+
- Maybe LR is too low now that we actually diagnosed the nans as coming from my MHA.
|
| 209 |
+
- https://github.com/hyunwoongko/transformer/blob/master/train.py Uses Xavier initialization and clips gradients, so I will.
|
| 210 |
+
- Starting up runs/code-decoder-v3-regularized
|
| 211 |
+
- Training, sample: (bracketed prompt from dataset)
|
| 212 |
+
|
| 213 |
+
```python
|
| 214 |
+
[
|
| 215 |
+
loss_ = self.gwta_loss(out, yss, m, grid_factor=np.power(2, idx))
|
| 216 |
+
else:
|
| 217 |
+
loss_ = m(out, yss)
|
| 218 |
+
loss += loss_
|
| 219 |
+
losses.append(loss_.item())
|
| 220 |
+
|
| 221 |
+
loss.backward()
|
| 222 |
+
self.optimizers.step()
|
| 223 |
+
]
|
| 224 |
+
|
| 225 |
+
# -- Histd_ +', b as do ed single This to to update is update
|
| 226 |
+
# continue last LR ed this ") ', training
|
| 227 |
+
# if output if args.local_rank not arg e:
|
| 228 |
+
```
|
| 229 |
+
|
| 230 |
+
- Not quite there yet, but looking like code. optimizer.step() is indeed to update parameters, and it does involve training, so it's getting there.
|
| 231 |
+
- Nov 25
|
| 232 |
+
|
| 233 |
+
- So loss went up after a bit. It's kind of weird. <img src="readme-imgs/code-decoder-v3-loss-curve.png" width="500">
|
| 234 |
+
- Loss went wayy up after it seemed like it was going well going down.
|
| 235 |
+
- Maybe over-regularization? I'm going to implement the lr schedule that https://github.com/hyunwoongko/transformer does and I adjusted gradient clipping to a lower threshold (1.0 vs 10.0)
|
| 236 |
+
- https://github.com/hyunwoongko/transformer also just trained it a lot longer
|
| 237 |
+
- Learning is slower, loss is at 1.3, but its stable, which is good.
|
| 238 |
+
- Loss is literally the same within 2 decimal places as it was 2 hrs ago. 3.22 or whatever. Sometimes with projects, after you've done all the learning and coding, the rest is like fine-tuning and training 1 million times, and that isn't super fulfilling. Transformers was really cool though, and I totally want to run this on wikitext.
|
| 239 |
+
- So this is (maybe) a classic case of too-low learning rate and so it got stuck in local minimum. Fine line between overfitting and over-regularization.
|
| 240 |
+
|
| 241 |
+
- Rerunning with higher LR.
|
| 242 |
+
- `v5-enabled`.
|
| 243 |
+
- aaand a factor of 10 on the lr was too much (loss of 7k after a few steps). Split the difference and go 0.0005
|
| 244 |
+
- That was also too much and resulted in exploding gradients.
|
| 245 |
+
- Hyperparameter tuning is _the worst._
|
| 246 |
+
- 0.0003 seems to be converging faster but not exploding.
|
| 247 |
+
- Aw man! It was looking good but it was not to last.
|
| 248 |
+
- 0.0002?
|
| 249 |
+
|
| 250 |
+
- Converging faster, which is good. Loss is 1.33 after 42 mins.
|
| 251 |
+
- It seemes to have leveled off at 1.32 again, just quicker this time.
|
| 252 |
+
|
| 253 |
+
- It's entirely possible that model is underpowered. Reports 19,711,760 params, but model size is tiny: 80 mb. Something going on?
|
| 254 |
+
|
| 255 |
+
- Just for fun, let's train on wikitext.
|
| 256 |
+
- Loss is 8 (now 7.6, now 7.2), yeah model is underpowered.
|
| 257 |
+
- What are standard model sizes? Looks like 12 decoders and 768-sized embedding.
|
| 258 |
+
- Previous was 6 decoders and 512 embedding.
|
| 259 |
+
- Ok, so wikitext was a good source of inspiration. Let's rerun code decoder with bigger. This will inevitably come with more hparam tuning
|
| 260 |
+
- Training `v6-big`
|
| 261 |
+
- 57911056 params
|
| 262 |
+
- Loss is only going up after first 100 steps. It hit 11k. This isnt right. LR down?
|
| 263 |
+
- Lr is down, now after 2 hrs its converged on loss of 6, its also only epoch 2. Unclear.
|
| 264 |
+
- Weird, so it didn't work. Just converged on 5.7. Ugh, minor architecture changes after already having a good model are the worst.
|
| 265 |
+
|
| 266 |
+
- Nov 27
|
| 267 |
+
- Nick thoughts:
|
| 268 |
+
- Simple. Scale down before scaling up.
|
| 269 |
+
- You get one line and just predict the next token?
|
| 270 |
+
- Look at it differently at how you want to evaluate it.
|
| 271 |
+
- Tweak optimizers etc. AdamW?
|
| 272 |
+
- The problem is not data scarcity. Is the model overpowered instead?
|
| 273 |
+
- Model might be overpowered given that its the same size as lm1b.
|
| 274 |
+
- 3 blocks, reduced dim, less heads.
|
| 275 |
+
- _really_ simplify
|
| 276 |
+
- Is it training?
|
| 277 |
+
- Maybe, doesn't really look too good. Markov-chain level, barely above random.
|
| 278 |
+
- How to prove that we can train something? Having a baseline of markov chain, rnn, really small xformer?
|
| 279 |
+
- How can we on the code task, convince ourselves that we can run something simple?
|
| 280 |
+
- Do a subset train, handpicked, micro.
|
| 281 |
+
- Train it on that, _get it to overfit._
|
| 282 |
+
- Grep for train.py, subset of that even
|
| 283 |
+
- 2 blocks, small dim, less heads.
|
| 284 |
+
- if still not working, consider another model
|
| 285 |
+
- rnn
|
| 286 |
+
- markov
|
| 287 |
+
- yikes
|
| 288 |
+
- Understand that it is challenging, do something you want
|
| 289 |
+
- "convince yourself that this works"
|
| 290 |
+
- How to figure out what isn't working, test and iterate quickly on small homogenous subset
|
| 291 |
+
- Trained it super small, all train.py files, num_heads small, low dim, low vocab size.
|
| 292 |
+
- Got loss of 1.234493 after 100 epochs. Go for 1k epochs? its 1.5s per epoch so.
|
| 293 |
+
- Nov 29
|
| 294 |
+
|
| 295 |
+
- Running eval on v7-small shows that it's still pretty bad though.
|
| 296 |
+
- Lets get a baseline markov chain, cuz why not?
|
| 297 |
+
- Also pro tokenization strategy: Strip comments and docstrings. Split by punctuation etc, within variable names split by capitalization and underscore.
|
| 298 |
+
- That was great except for the fact that it caused 186904 unique tokens.
|
| 299 |
+
|
| 300 |
+
- Retrained on _tiny_ dataset, and it did not overfit.
|
| 301 |
+
|
| 302 |
+
- Decenber 9.
|
| 303 |
+
|
| 304 |
+
- Yeah. It's been a long week.
|
| 305 |
+
- Research allegedly shows that nn.Transformer accepts shapes of (seq_len, batch_size) for some reason instead of the other way around??
|
| 306 |
+
- Super non-debuggable error. See [stack trace](readme-imgs/funny_stack_trace.txt)
|
| 307 |
+
- - ok so it was that and then also just removing the trace.
|
| 308 |
+
- Here's what we get:
|
| 309 |
+
|
| 310 |
+
```
|
| 311 |
+
|
| 312 |
+
import numpy
|
| 313 |
+
numpy add
|
| 314 |
+
np np\_ numpy np
|
| 315 |
+
|
| 316 |
+
de(num1, def
|
| 317 |
+
numpy np npWorld!") print_hello_world(): defzzbuzz(ni num2): num2): n returnor num2): numpy np npD \* def def
|
| 318 |
+
|
| 319 |
+
def num2): num2):
|
| 320 |
+
multiply(num1, num2): np def num1 elsell %zzb num2):
|
| 321 |
+
cy(num1, def num1
|
| 322 |
+
3 numpy np np num2):
|
| 323 |
+
el def np % / 3 % num2 5r num1 == n + % n %
|
| 324 |
+
deforld!") num1ubtract(num1,
|
| 325 |
+
num1 1
|
| 326 |
+
= num2 num2): print(" if def 0:zzbuzz % % def %/ 0:,tubtract(num1, ==T num1
|
| 327 |
+
|
| 328 |
+
npizzbuzz(n): World!") def np defW - % def np def "Buzz" + % %
|
| 329 |
+
zzbu def def
|
| 330 |
+
multiply(num1,N if
|
| 331 |
+
%
|
| 332 |
+
num2INE % %<EOS>ply(num1,ellWorld!") % f a % def return eliforld!"b == / zzbuzz(n num2): <IND num1 _ % n 3(num np 0: ad -NT> num2 % == /NT return %orld!" ): %r! \* num2 return return+ % <IND tr(n nEzzbuDiply(num1, % 0:mport3 print_ numpy
|
| 333 |
+
np(num1
|
| 334 |
+
def
|
| 335 |
+
np print_hello_world(): == num2): % 5l num1 print("Hellotr(n) % num2 %izzbuzz(n): def def num1,
|
| 336 |
+
|
| 337 |
+
% "Fizz" num2 num2 % num2): def deftiply(num1, 0: s % def def num2 num2 5 % <UNK> multiply(num1, % % return num2): " def 0: divide(num1, r %, num2 r 3o add
|
| 338 |
+
|
| 339 |
+
"FNEW:% % ly(num1, return / 0: 0: def %or(n return (nfWorld!") num2! % 1World!") % 0:ello
|
| 340 |
+
num2): 0: divide(num1,T % % % return returnn
|
| 341 |
+
% %ltiply(num1, return def print(" num2 %a n "Buzz" divide(num1,z deftrael
|
| 342 |
+
)p 5 re "Buzz" returnvturn % defell divide(num1,mpor 5
|
| 343 |
+
multiply(num1, % print*hello* ide(num1, divide(num1, num2 ==mpor % = 0: num2): % % def s num2 % % ==n
|
| 344 |
+
divide(num1,izzbuzz(n): ! 0:y
|
| 345 |
+
|
| 346 |
+
rin
|
| 347 |
+
% divide(num1,a return (n divide(num1,ltiply(num1,
|
| 348 |
+
|
| 349 |
+
```
|
| 350 |
+
|
| 351 |
+
- Not ideal.
|
| 352 |
+
- It's sad when you make stuff and it doesnt work.
|
| 353 |
+
- Perhaps even try pytorch trainer on our code, but that's really product over process
|
| 354 |
+
- Where do you draw the line? what are the goals here? Transformers are cool. I hope I can make one myself.
|
| 355 |
+
- So there you go. We can stick with this for a while.
|
| 356 |
+
- At the same time, where's the innovation? LLMs exist, people already do transformers. It would be cool to either do something new and do a write-up on it or use AI/ML to solve a real-world problem
|
| 357 |
+
- We'll see I guess.
|
| 358 |
+
|
| 359 |
+
- Dec 12
|
| 360 |
+
- Debugged stuff, transpose is the way to go, compared w/ pytorch example
|
| 361 |
+
- Made a scuffed accuracy metric for next-token I guess, but it actually converged really well. Problem is elsewhere?
|
| 362 |
+
- 0.984 after 1000 epochs on the tiny dataset
|
| 363 |
+
- Take more time with eval file. Problem is probably there.
|
| 364 |
+
- _try not to rush_
|
| 365 |
+
- "faster than you can understand"
|
| 366 |
+
- Bias towards things? mask out, beam search
|
| 367 |
+
- So I just did the default code. Outputs
|
| 368 |
+
|
| 369 |
+
```
|
| 370 |
+
print_h 2 3 n de(num1, <INDE % np a "Buzz" ultiply(num1, mport tu else "Fizz" orl add(num1, <IND % 1
|
| 371 |
+
fizzbuzz(n): else zzbuzz(n): "Fizz" if y(num1, d tiply(num1,
|
| 372 |
+
T World!") p NEW
|
| 373 |
+
c " _ 15 add
|
| 374 |
+
<EOS> el + num2 "Fizzbuzz" def add(num1, nump print(" f as ultiply(num1, nump <NEW numpy or def num2 ltiply(num1, =
|
| 375 |
+
f a return - tr(n World!") print_hello_ orld ply(num1, as num print zzbuzz(n): orl subtract(num1, "F I izzbuzz(n): % ply(num1,
|
| 376 |
+
print("Hello, btract(num1,
|
| 377 |
+
np "Fizzbuzz" INE re l 5
|
| 378 |
+
izzbuzz(n): tract(num1, v tract vide(num1, f (num1, i
|
| 379 |
+
|
| 380 |
+
```
|
| 381 |
+
|
| 382 |
+
- WHY CANT I JUST HAVE THE MODEL WORK???
|
| 383 |
+
- I even just prompted with the dataset.
|
| 384 |
+
- I'm not sure how accuracy was up before, because this isnt matching.
|
| 385 |
+
|
| 386 |
+
```
|
| 387 |
+
|
| 388 |
+
MOST = torch.argmax(output.view(-1, output.size(-1)), dim=1)
|
| 389 |
+
print(MOST)
|
| 390 |
+
print(word)
|
| 391 |
+
print(word_idx)
|
| 392 |
+
print(T[-1])
|
| 393 |
+
exit()
|
| 394 |
+
|
| 395 |
+
```
|
| 396 |
+
|
| 397 |
+
- None of them match T[-1]. What's going on?
|
| 398 |
+
- I made train_dataset dataset. Now acc stays at .3?
|
| 399 |
+
- Maybe its better. Was it cheating before somehow?
|
| 400 |
+
- Listen, I gotta go to bed. I'll figure this out tomorrow, or not.
|
| 401 |
+
- 167 freaking commits, code borrowed from 3 seperate codebases, and this thing doesnt even work when I give it all the answers
|
| 402 |
+
- Dec 13
|
| 403 |
+
- More compute doesnt help
|
| 404 |
+
- Dec 14
|
| 405 |
+
|
| 406 |
+
- We are going even smaller with smaller-er test data. Its just the alphabet over and over. It better overfit is all im saying.
|
| 407 |
+
- Acc at 47% looks like. Thats weird. Now 0.485. Some sort of weird limit? Like previously it stopped somewhere near 1/3, now its stopping somewhere near 1/2? Something weird going on. 0.489 accuracy.
|
| 408 |
+
- Ran again with another line of abcs in the data and it converged on 0.3. This must be a weird padding token thing.
|
| 409 |
+
- now THIS is depression:
|
| 410 |
+
|
| 411 |
+
```
|
| 412 |
+
m f d j I b <PAD> r z p h w W v A r d
|
| 413 |
+
<PAD> n g I y e o e x t <PAD> d I t b NE L W
|
| 414 |
+
h n n z p m l c m o <PAD> e o y g I j x A
|
| 415 |
+
s d h NE I <PAD> s t NE o n h d v NE A m o
|
| 416 |
+
L NE I u <PAD> l I NE o h e d t w e y x
|
| 417 |
+
|
| 418 |
+
```
|
| 419 |
+
|
| 420 |
+
- Yeah.
|
| 421 |
+
|
| 422 |
+
- Dec 16 - All right, so here's what we are going to do. - Lets set up a "sequential" dummy dataset that literally loads numbers 1,2,3,4,5, etc _as tokens_.
|
| 423 |
+
- Set up.
|
| 424 |
+
- Here's what batches are looking like before they get passed into the model:
|
| 425 |
+
`tensor([[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
| 426 |
+
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
|
| 427 |
+
[30, 31, 32, 33, 34, 35, 36, 37, 38, 39],
|
| 428 |
+
[40, 41, 42, 43, 44, 45, 46, 47, 48, 49],
|
| 429 |
+
[20, 21, 22, 23, 24, 25, 26, 27, 28, 29]], device='mps:0',
|
| 430 |
+
dtype=torch.int32)
|
| 431 |
+
`
|
| 432 |
+
- Cool. Looks right. Shape is [5,10], what it should be I think. Labels _should_ indeed be like that.
|
| 433 |
+
- Whatever that did, accuracy is back in the high nineties.
|
| 434 |
+
- 99%.
|
| 435 |
+
- `results = results.transpose(0, 1) # average bug` goes into the hall of fame
|
| 436 |
+
- Now lets pick apart eval.py.
|
| 437 |
+
- So I got it to work after some pain and reshaping. Refactored eval to be in a function, and just did simple multinomial. It works with next token, and I'm assuming that its just so darn overfit that other stuff doesnt really work.
|
| 438 |
+
- This is cool, lets end on a win for now.
|
| 439 |
+
- Dec 17
|
| 440 |
+
- Scaling back up to smaller (fizzbuzz) data
|
| 441 |
+
- Small data tweaks, used custom tokenizer, 96% acc 500 epochs.
|
| 442 |
+
- switched to argmax, got `import numpy <NEWLINE> <NEWLINE> <NEWLINE> return <NEWLINE> return <NEWLINE> return " <NEWLINE>` and yet 96% acc???
|
| 443 |
+
- Nick thoughts
|
| 444 |
+
- Training _works._ Works on numerical data, can work on simple tokenized code.
|
| 445 |
+
- Eval and generation is the issue.
|
| 446 |
+
- Reccomend: take some time to go through eval.py, what is it that works in acc on training that doesnt work in eval?
|
| 447 |
+
-
|
| 448 |
+
- Dec 18
|
| 449 |
+
|
| 450 |
+
- Now its only generating `<PAD>`
|
| 451 |
+
- So we made `tester_exactly_like_trainingmanager_please_please_work` and ran the same metric and acc is only 0.4013. Far cry from 0.96
|
| 452 |
+
- OH NAR
|
| 453 |
+
- We were loading `best.pt` from experiment folder, but that runs on val loss. Since I trashed the val dataset and it only overfits on train, best val loss was not best train loss. Ok acc is at 0.9934, but now we figure out why it doesnt work.
|
| 454 |
+
- "question everything from line 40-60" the bug on line 23:
|
| 455 |
+
- This is what happens when your code has too many moving parts. You forget stuff.
|
| 456 |
+
- alr we have the `tester_exactly_like_trainingmanager_only_last_please_work`
|
| 457 |
+
- Guys I think including "please please work" in the function name made it work
|
| 458 |
+
- which also works.
|
| 459 |
+
- Now we have `tester_exactly_like_trainingmanager_just_next_given_seq_pls`
|
| 460 |
+
- which works a bit less well
|
| 461 |
+
- Its time to commit, push, and revisit.
|
| 462 |
+
|
| 463 |
+
- Dec 19
|
| 464 |
+
|
| 465 |
+
- Evaluation works on small small code dataset
|
| 466 |
+
- Full output, clean later.
|
| 467 |
+
|
| 468 |
+
```python
|
| 469 |
+
tensor(0.9868)
|
| 470 |
+
(tensor([26, 25, 12, 5, 5, 6, 27, 8, 28, 12, 13, 5, 29, 28, 30, 31, 32, 32,
|
| 471 |
+
33, 35, 6, 16, 8, 9, 10, 11, 12, 13, 5, 14, 9, 17, 11, 5, 5, 6,
|
| 472 |
+
18, 8, 5, 14, 25, 34, 25, 5, 35, 28, 30, 36, 32, 32, 33, 13, 5, 14,
|
| 473 |
+
25, 37, 25, 35, 9, 21, 11, 5, 5, 6, 22, 23, 24, 8, 12, 13, 5, 22,
|
| 474 |
+
8, 25, 23, 10, 28, 12, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
| 475 |
+
0, 0, 0, 0, 0, 35, 28, 30, 38, 32, 32, 33, 13, 5, 14, 25, 27, 25,
|
| 476 |
+
5, 39, 13, 5, 14, 40, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10, 11,
|
| 477 |
+
12, 13, 5, 14, 9, 15, 11, 10, 11, 12, 13, 5, 14, 9, 19, 11, 5, 5,
|
| 478 |
+
6, 20, 8, 9, 10, 11, 12, 13]), tensor([26, 25, 12, 5, 5, 6, 27, 8, 28, 12, 13, 5, 29, 28, 30, 31, 32, 32,
|
| 479 |
+
33, 5, 6, 16, 8, 9, 10, 11, 12, 13, 5, 14, 9, 17, 11, 5, 5, 6,
|
| 480 |
+
18, 8, 5, 14, 25, 34, 25, 5, 35, 28, 30, 36, 32, 32, 33, 13, 5, 14,
|
| 481 |
+
25, 37, 25, 14, 9, 21, 11, 5, 5, 6, 22, 23, 24, 8, 12, 13, 5, 22,
|
| 482 |
+
8, 25, 23, 10, 28, 12, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
| 483 |
+
0, 0, 0, 0, 0, 35, 28, 30, 38, 32, 32, 33, 13, 5, 14, 25, 27, 25,
|
| 484 |
+
5, 39, 13, 5, 14, 40, 2, 3, 4, 5, 5, 5, 6, 7, 8, 9, 10, 11,
|
| 485 |
+
12, 13, 5, 14, 9, 15, 11, 10, 11, 12, 13, 5, 14, 9, 19, 11, 5, 5,
|
| 486 |
+
6, 20, 8, 9, 10, 11, 12, 13], dtype=torch.int32))
|
| 487 |
+
pretty please
|
| 488 |
+
tensor(1.)
|
| 489 |
+
(tensor([10, 11, 12, 13, 5, 14, 9, 19, 11, 5, 5, 6, 20, 8, 9, 10, 11, 12,
|
| 490 |
+
13]), tensor([10, 11, 12, 13, 5, 14, 9, 19, 11, 5, 5, 6, 20, 8, 9, 10, 11, 12,
|
| 491 |
+
13], dtype=torch.int32))
|
| 492 |
+
please please please
|
| 493 |
+
tensor(5)
|
| 494 |
+
Answer was 13
|
| 495 |
+
please please please
|
| 496 |
+
tensor(5)
|
| 497 |
+
Answer was 5
|
| 498 |
+
please please please
|
| 499 |
+
tensor([24, 26, 25, 12, 5, 5, 6, 27, 8, 28, 12, 13, 5, 29, 28, 30, 31, 32,
|
| 500 |
+
32], dtype=torch.int32)
|
| 501 |
+
world ! " ) <NEWLINE> <NEWLINE> def fizzbuzz ( n ) : <NEWLINE> if n % 3 = =
|
| 502 |
+
! " ) <NEWLINE> <NEWLINE> def fizzbuzz ( n ) : <NEWLINE> if n % 3 = = 0
|
| 503 |
+
that's inp I guess ^^
|
| 504 |
+
tensor([[24, 26, 25, 12, 5, 5, 6, 27, 8, 28, 12, 13, 5, 29, 28, 30, 31, 32,
|
| 505 |
+
32, 33, 13, 5, 14, 40, 5, 14, 25, 37, 25]])
|
| 506 |
+
world ! " ) <NEWLINE> <NEWLINE> def fizzbuzz ( n ) : <NEWLINE> if n % 3 = = 0 : <NEWLINE> return str <NEWLINE> return " buzz "
|
| 507 |
+
```
|
| 508 |
+
|
| 509 |
+
- ye.
|
| 510 |
+
|
| 511 |
+
- Dec 20
|
| 512 |
+
|
| 513 |
+
- Tried out sliding window, works really well.
|
| 514 |
+
- Tried to upscale, didnt work
|
| 515 |
+
|
| 516 |
+
- Gonna BPE instead maybe.
|
| 517 |
+
- also crank up batch size so faster
|
| 518 |
+
- except that was slower so it was the same speed
|
| 519 |
+
|
| 520 |
+
```
|
| 521 |
+
------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------
|
| 522 |
+
Name Self CPU % Self CPU CPU total % CPU total CPU time avg # of Calls
|
| 523 |
+
------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------
|
| 524 |
+
train_step 13.67% 63.885ms 87.14% 407.336ms 407.336ms 1
|
| 525 |
+
Optimizer.step#Adam.step 0.36% 1.674ms 16.75% 78.287ms 78.287ms 1
|
| 526 |
+
aten::scaled_dot_product_attention 0.00% 15.125us 10.99% 51.394ms 8.566ms 6
|
| 527 |
+
aten::_scaled_dot_product_attention_math 0.02% 87.170us 10.99% 51.379ms 8.563ms 6
|
| 528 |
+
aten::cross_entropy_loss 0.01% 65.377us 8.42% 39.360ms 39.360ms 1
|
| 529 |
+
aten::embedding 0.01% 30.419us 7.22% 33.771ms 33.771ms 1
|
| 530 |
+
aten::index_select 7.21% 33.701ms 7.21% 33.704ms 33.704ms 1
|
| 531 |
+
aten::item 0.03% 145.810us 6.63% 31.006ms 83.799us 370
|
| 532 |
+
aten::_local_scalar_dense 6.60% 30.843ms 6.60% 30.860ms 83.405us 370
|
| 533 |
+
aten::is_nonzero 0.00% 1.875us 5.94% 27.783ms 27.783ms 1
|
| 534 |
+
------------------------------------------------------- ------------ ------------ ------------ ------------ ------------ ------------
|
| 535 |
+
Self CPU time total: 467.476ms
|
| 536 |
+
|
| 537 |
+
```
|
| 538 |
+
|
| 539 |
+
- Just going to let it train and hope that something terrible doesnt happen. Well, that's why we save best.pt, innit?
|
| 540 |
+
|
| 541 |
+
- Dec 22
|
| 542 |
+
- Training didnt go well, I stopped it like thirty minutes later.
|
| 543 |
+
- Seems like its spamming token 81
|
| 544 |
+
|
| 545 |
+
```
|
| 546 |
+
tensor([ 166, 2077, 109, 28, 20, 614, 2301, 917, 32, 755, 32, 3654,
|
| 547 |
+
293, 723, 3034, 978, 88, 88, 81, 166, 2077, 109, 28, 20,
|
| 548 |
+
614, 2301, 917, 32, 755, 15, 3654, 293, 723, 3034, 978, 88,
|
| 549 |
+
88, 81, 166, 2077, 109, 28, 20, 614, 2301, 917, 32, 755,
|
| 550 |
+
15], dtype=torch.int32)
|
| 551 |
+
def write_flat(f, name, ar, np.array([0, 0, 0])))
|
| 552 |
+
|
| 553 |
+
def write_flat(f, name, arr np.array([0, 0, 0])))
|
| 554 |
+
|
| 555 |
+
def write_flat(f, name, arr
|
| 556 |
+
batch ^ labels v
|
| 557 |
+
write_flat(f, name, ar, np.array([0, 0, 0])))
|
| 558 |
+
|
| 559 |
+
def write_flat(f, name, arr np.array([0, 0, 0])))
|
| 560 |
+
|
| 561 |
+
def write_flat(f, name, arr):
|
| 562 |
+
that's inp I guess ^^
|
| 563 |
+
tensor([[ 166, 2077, 109, 28, 20, 614, 2301, 917, 32, 755, 32, 3654,
|
| 564 |
+
293, 723, 3034, 978, 88, 88, 81, 166, 2077, 109, 28, 20,
|
| 565 |
+
614, 2301, 917, 32, 755, 15, 3654, 293, 723, 3034, 978, 88,
|
| 566 |
+
88, 81, 166, 2077, 109, 28, 20, 614, 2301, 917, 32, 755,
|
| 567 |
+
15, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81,
|
| 568 |
+
81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81,
|
| 569 |
+
81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81,
|
| 570 |
+
81, 81, 81, 81, 81]])
|
| 571 |
+
def write_flat(f, name, ar, np.array([0, 0, 0])))
|
| 572 |
+
|
| 573 |
+
def write_flat(f, name, arr np.array([0, 0, 0])))
|
| 574 |
+
|
| 575 |
+
def write_flat(f, name, arr
|
| 576 |
+
```
|
| 577 |
+
|
| 578 |
+
- What is 81?
|
| 579 |
+
- "<INDENT>".
|
| 580 |
+
|
| 581 |
+
- Jan 7
|
| 582 |
+
|
| 583 |
+
- Revisiting after a break with Nick.
|
| 584 |
+
- It works on fizzbuzz, which is a major win! - Scaling to more data with BPE, it struggles to learn. - Nick thoughts: Many things change. - Interesting idea to test BPE on fizzbuzz data - BPE is messing up
|
| 585 |
+
|
| 586 |
+
````
|
| 587 |
+
--- sep batch ---
|
| 588 |
+
|
| 589 |
+
if n % 3 == 0:
|
| 590 |
+
NEWLINE> if n % 3 == 0:
|
| 591 |
+
r
|
| 592 |
+
|
| 593 |
+
--- sep batch ---
|
| 594 |
+
DENT> elif n % 15 == 0:
|
| 595 |
+
<INz"
|
| 596 |
+
elif n % 15 == 0:
|
| 597 |
+
<IND"
|
| 598 |
+
<INDE
|
| 599 |
+
--- sep batch ---
|
| 600 |
+
def subtract(num1, num2):
|
| 601 |
+
um2
|
| 602 |
+
```
|
| 603 |
+
|
| 604 |
+
def subtract(num1, num2):
|
| 605 |
+
<m2
|
| 606 |
+
|
| 607 |
+
d
|
| 608 |
+
--- sep batch ---
|
| 609 |
+
|
| 610 |
+
````
|
| 611 |
+
|
| 612 |
+
- Search the literature for what others use for tokenization.
|
| 613 |
+
- Code custom tokenization seems good and not buggy and retaining information.
|
| 614 |
+
- If our issue is too many tokens, just make tokens not in the top 99% of usage or whatever thrown away
|
| 615 |
+
- if it throws away all variable names, revisit.
|
| 616 |
+
- lets code it up
|
| 617 |
+
- basically just did that. Used np.percentile, lowercase bug in that you need to split by caps before doing case normalization
|
| 618 |
+
- Perhaps train it up
|
| 619 |
+
- Nick says mod architecture: Don't jump _too much._
|
| 620 |
+
- Not much was removed. 4306 tokens.
|
| 621 |
+
- This code isn't actually very complex.
|
| 622 |
+
- Current config:
|
| 623 |
+
```
|
| 624 |
+
vocab_size = 4306
|
| 625 |
+
embed_dim = 256
|
| 626 |
+
heads = 4
|
| 627 |
+
ff_dim = 256
|
| 628 |
+
layers = 4
|
| 629 |
+
drop = 0
|
| 630 |
+
```
|
| 631 |
+
|
| 632 |
+
- Jan 8
|
| 633 |
+
- Here's results from eval, looking pretty good!
|
| 634 |
+
```
|
| 635 |
+
Getting first 20 tokens for batch and labels
|
| 636 |
+
tensor([ 18, 167, 19, 38, 22, 2312, 24, 2386, 19, 3, 3, 14,
|
| 637 |
+
421, 527, 18, 2283, 19, 20, 3, 59], dtype=torch.int32)
|
| 638 |
+
( i ) ] , stderr = fnull ) <newline> <newline> def process arguments ( argv ) : <newline> if
|
| 639 |
+
batch ^ labels v
|
| 640 |
+
i ) ] , stderr = fnull ) <newline> <newline> def process arguments ( argv ) : <newline> if len
|
| 641 |
+
that's inp I guess ^^
|
| 642 |
+
tensor([[ 18, 167, 19, 38, 22, 2312, 24, 2386, 19, 3, 3, 14,
|
| 643 |
+
421, 527, 18, 2283, 19, 20, 3, 59, 52, 18, 2283, 19,
|
| 644 |
+
249, 24, 119, 20, 3, 316, 18, 19, 3, 86, 20, 3,
|
| 645 |
+
2181, 162, 24, 2283, 37, 39, 38, 3, 3, 40, 2181, 162,
|
| 646 |
+
3, 3, 14, 316, 18, 19, 20, 3, 87, 18, 45, 2300]])
|
| 647 |
+
( i ) ] , stderr = fnull ) <newline> <newline> def process arguments ( argv ) : <newline> if len ( argv ) ! = 2 : <newline> help ( ) <newline> else : <newline> iteration num = argv [ 1 ] <newline> <newline> return iteration num <newline> <newline> def help ( ) : <newline> print ( ' usage
|
| 648 |
+
```
|
| 649 |
+
- So what are next steps? Maybe try it out on the big boy dataset?
|
| 650 |
+
- Implemented stride with sliding window because otherwise its 27,365,313 samples.
|
| 651 |
+
- Stride of 10 to a much more managable 2m.
|
| 652 |
+
- unfortunately, a vocab size of 186906 is too much. Change thresh to 0.99 instead of 0.995 perhaps?
|
| 653 |
+
- Jan 13
|
| 654 |
+
- weird. Changing cutoff to 0.1 makes 186278 tokens, still a lot.
|
| 655 |
+
- I split by hyphen, also figured out how to cope with hex.
|
| 656 |
+
- Too many tokens, just strict count instead of trying to do percentages. There's some UNK, but that's life.
|
| 657 |
+
- `<newline> def <UNK> _ tv _ fn ( x , measurement = none ) : <newline> with torch . no _ grad ( ) : <newline> <UNK> = _ at ( measurement ) <newline> x , x _ mean = cs _ <UNK> ( x , <UNK> , niter = 1 ) <newline> return x , x _ mean <newline> return <UNK> _ tv _ fn <newline> <newline> predictor _ denoise _ update _ fn = get _ update _ fn ( predictor _ update _ fn ) <newline> corrector _ denoise _ update _ fn = get _ update _ fn ( corrector _ update _ fn ) <newline> mc _ update _ fn = get _ <UNK> _ tv _ fn ( ) <newline> <newline> def pc _ <UNK> ( model , data , measurement = none ) : <newline> with torch . no _ grad ( ) : <newline> x = sde . prior _ sampling ( data . shape ) . to ( data . device ) <newline> <newline> ones = torch . ones _ like ( x ) . to ( data . device ) <newline> norm _ const = _ at ( _ a ( ones ) ) <newline> timesteps = torch . linspace ( sde . t , eps , sde . n ) <newline> for i in tqdm ( range ( sde . n ) ) : <newline> t = timesteps [ i ] <newline> <newline> x _ batch = <UNK> ( x , 1 2 ) <newline> `
|
| 658 |
+
- 0345793 -> tokenizes each number
|
| 659 |
+
-
|
| 660 |
+
- Looking pretty good, going to train
|
| 661 |
+
- Jan 14
|
| 662 |
+
|
| 663 |
+
- 60% acc, not-good val loss again. Cranking up model size might do it.
|
| 664 |
+
- Trying eval. Just figured out that vocab size was misconfigured.
|
| 665 |
+
- maybe try with a big vocab size?
|
| 666 |
+
- Maybe 153k is feasible, just takes a while.
|
| 667 |
+
- Lesson: Try something out before assuming, "your computer can handle a lot more than you'd think"
|
| 668 |
+
- It produced recognizable code. First hurdle: it trains. Is it overfitting? Is it generalizable?
|
| 669 |
+
- We've gone through a lot. Almost hyperparameter tune. A big challenge because it takes a while to train. How to downscale and get the perf you are looking for?
|
| 670 |
+
- model size is important, optimizer is important.
|
| 671 |
+
- Metrics: lots of different things to see if your model is training well, lots of trade offs. Loss is good, Accuracy is kind of good (used for debugging).
|
| 672 |
+
- If it does work, start evaluating it. Is it generalizing or memorizing? Use f1 score perhaps. Also see if code fragments are copied and pasted. (check-memorization). Consider hyperparameter tuning, try scaling down or something? Will take a while.
|
| 673 |
+
- What if it doesn't work?
|
| 674 |
+
- No bug, at least it can learn.
|
| 675 |
+
- If it's just memorizing, then we need to generalize. Train loss _and_ val loss need to go down
|
| 676 |
+
- Does that mean that the model is too big? Could mean not enough data. Too restrictive with data collection? I need updated code.
|
| 677 |
+
- Experimented with many datasets: Code datasets, sequential numbers.
|
| 678 |
+
- Perhaps go back to the old dataset (only train.py). Stop training this, it won't do much. idk -nick
|
| 679 |
+
- Add in scheduler maybe. But it won't work unless you have more metrics more frequently.
|
| 680 |
+
- ⭐Action items
|
| 681 |
+
- Maybe let it run, maybe stop it. Nick says stop it. Incremental testing all the way.
|
| 682 |
+
- ⭐ Experiment from train.py dataset. From there, check for overfitting. Hyperparam tune. Smallest model possible.
|
| 683 |
+
- Model size first, then all other hyperparams. Consider adding f1 score. Isolated envs.
|
| 684 |
+
- Some libraries you need that are unupdated. Learn the hard way that it's nice to have isolated envs. Maybe? I'm not sure.
|
| 685 |
+
|
| 686 |
+
- Jan 16
|
| 687 |
+
|
| 688 |
+
- Running it on the small set, loss down, acc up, val loss up. the classic.
|
| 689 |
+
- Train loss down to 2, val loss up to 9
|
| 690 |
+
|
| 691 |
+
- Jan 21
|
| 692 |
+
|
| 693 |
+
- It's overfit, ran check-memorization with output from eval. Val loss is much higher than train loss
|
| 694 |
+
- <img src="readme-imgs/val-loss-v21.png" width="600px">
|
| 695 |
+
- So it's overfitting right away. Strange?
|
| 696 |
+
- List of things to mitigate overfitting. It's working in terms of overfitting, which is good. Dataset splitting is truly random, which is good.
|
| 697 |
+
- ⭐Is it actually overparamaterized? Usually mitigates overfitting
|
| 698 |
+
- X Data augmentation? Very hard with code dataset.
|
| 699 |
+
- ~X Tweaking parameters or something?
|
| 700 |
+
- (?) Discrepancy btwn train and test dataset, but we think this is not true.
|
| 701 |
+
- Often a headache. Good to double check etc. It's the most obvious one, but it's bad if you get it wrong
|
| 702 |
+
- ⭐Regularization (dropout, gradient clipping)
|
| 703 |
+
- X Early stopping? but not because val loss is never good
|
| 704 |
+
- Which to pursue first? regularization or changing model size.
|
| 705 |
+
- going to builtin_architecture to tune.
|
| 706 |
+
- So these params (halved) still caused overfitting. `vocab_size = 3646 embed_dim = 128 heads = 2 ff_dim = 128 layers = 4 drop = 0`
|
| 707 |
+
- Even more extreme reduction and still no results. `vocab_size = 3646 embed_dim = 64 heads = 2 ff_dim = 64 layers = 2 drop = 0`
|
| 708 |
+
- Smaller model size isn't helping.
|
| 709 |
+
- Trying dropout at 0.3, crossing our fingers that no numerical instability. Dropout isn't working. Loss still going up on val.
|
| 710 |
+
- Decrease window size? Smaller context -> more similarity between train and test.
|
| 711 |
+
- Because of sliding window. train data is expressed in test. The discrepancy is therefore extremely worrying.
|
| 712 |
+
- testing out by making test dataset = train dataset.
|
| 713 |
+
- okay so val loss still went up, which means that our data pipeline is at fault
|
| 714 |
+
- 3 lines in line 269. **So it's _actually_ the same bug as before when it wasn't transposed, but this time I forgot to copy over the changes to valstep**
|
| 715 |
+
- So I guess have dataloaders that are different now and see what happens
|
| 716 |
+
- Changing something and having it in multiple places is the worst. I tried to have good OOP with TrainingManager, ~~but perhaps it just muddled it more.~~ Investigate a framework where call_model is a function and then trainstep and valstep just do different logging. Always the code reuse.
|
| 717 |
+
- great. So val loss actually goes down now with train loss with independent datasets. Even though it's slightly cheating because of sliding window idk. But it's not straight-ahead memorization.
|
| 718 |
+
- Closing thoughts:
|
| 719 |
+
- amazing bugfix
|
| 720 |
+
- model is really learning with train -> val
|
| 721 |
+
- Investigate sliding window, try to have truly independent dsets.
|
| 722 |
+
- Now all the code works, we want a model that works the best. Try dropout, regularization, etc, to prevent overfitting and induce real machine learning.
|
| 723 |
+
- toy with hyperparams, only when you have it perfect scale up. Penalize large weights with adamw or something? Lots of techniques
|
| 724 |
+
- oh also commit better oop
|
| 725 |
+
|
| 726 |
+
- Jan 24
|
| 727 |
+
- I did better OOP
|
| 728 |
+
- Allegedly perplexity is good loss?
|
| 729 |
+
- Jan 28
|
| 730 |
+
- Did some talking with Nick.
|
| 731 |
+
- We eliminated train/test pollution with sliding windows with the somewhat suspicious Datasplit_chunker.
|
| 732 |
+
- Considering different datasets, such as stackoverflow or python packages themselves, but maybe not. Perhaps cast the net wider and get a larger dataset? 50k files seems fine for now though.
|
| 733 |
+
- Actual normal training curve, signifying overfitting (check val loss)
|
| 734 |
+
- <img src="readme-imgs/normal-training-curve.png" width="30%">
|
| 735 |
+
- This shows us that data is independent but the model is still learning and generalizing.
|
| 736 |
+
- Check mem is still returning true, which is suboptimal to say the least.
|
| 737 |
+
- But sometimes not?? (seems like mostly not now, which is good)
|
| 738 |
+
- Sample: `<newline> dataset = [ ] <newline> for i in xrange ( batch _ size ) : <newline> point = np . random . randn ( 2 ) * 0 . 0 5 <newline> center = random . choice ( centers ) <newline> point [ 0 ] + = center [ 0 ] <newline> point [ 1 ] + = center` (not memorized.)
|
| 739 |
+
- Sample: `<newline> logger . info ( f " initial validation samples in first step . . . " ) <newline> model . eval ( ) <newline> <newline> gen _ validation _ samples ( validation _ pipeline , args , wandb , samples _ dir , train _ ts , train _ steps ) <newline> <newline> model . train ( ) <newline>` (not memorized)
|
| 740 |
+
- But at the same time, this looks pretty good.
|
| 741 |
+
- The logger.info was the part included in the dset.
|
| 742 |
+
- Lets train it on the big one again, just for funsies.
|
| 743 |
+
- Jan 29
|
| 744 |
+
|
| 745 |
+
- Trained, stagnated. It's pretty good.
|
| 746 |
+
- NOO NOT THE NEWLINE MEMORIZATION
|
| 747 |
+
- `= c . result <newline> <newline> if self . verbose : <newline> print ( ' \ texception : % s ' % self . result ) <newline> return <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline> <newline>`
|
| 748 |
+
- um `. v _ proj " , " p _ bg 3 9 4 " , " p _ bg 3 9 " , " p _ bg 3 9 " , " p _ bg 3 9 " , " p _ bg 3 9 " , " p _ bg 3 9 " , " p _ bg 3 `
|
| 749 |
+
- This one looks good: `dir ( absolute _ path ) <newline> dataset _ to _ create = os . path . join ( get _ dir , ' dataset _ name ' ) ) <newline> dataset _ to _ create = os . path . join ( get _ dir , ' dataset _ name ' ) ) <newline> dataset _ to _ create` ... until you realize that its repetition
|
| 750 |
+
- A weird one: `' nonpayable ' , ' type ' : ' function ' } , { ' inputs ' : [ { ' internaltype ' : ' uint 2 5 6 ' , ' name ' : ' ' , ' type ' : ' uint 2 5 6 ' } ] , ' name ' : ' ' , ' outputs`
|
| 751 |
+
- No direct mem at least.
|
| 752 |
+
|
| 753 |
+
- Feb 4
|
| 754 |
+
- Ran token analysis
|
| 755 |
+
```
|
| 756 |
+
Number of tokens that appear only once: 36431. Percentage: 0.0007120090008913647
|
| 757 |
+
Mean token count: 493.84688441047024
|
| 758 |
+
Median token count: 2.0
|
| 759 |
+
Standard deviation of token count: 40239.56310773193
|
| 760 |
+
Min token count: 0
|
| 761 |
+
Max token count: 7655766
|
| 762 |
+
Top 10 most frequent tokens:
|
| 763 |
+
> : 7655766
|
| 764 |
+
<: 7633073
|
| 765 |
+
tab: 4785963
|
| 766 |
+
newline: 2818005
|
| 767 |
+
\_: 2420050
|
| 768 |
+
.: 1680181
|
| 769 |
+
,: 1428405
|
| 770 |
+
(: 1374695
|
| 771 |
+
): 1371692
|
| 772 |
+
> =: 1294978
|
| 773 |
+
```
|
| 774 |
+
- Mean/median differential is crazy
|
| 775 |
+
- Most tokens appear twice.
|
| 776 |
+
- So only keeping tokens that appear >10 times leaves us with 22809, very reasonable.
|
| 777 |
+
- Training it up! So far it's looking optimistic
|
| 778 |
+
- Feb 5
|
| 779 |
+
- `tab > self . avg = self . sum / self . count < newline > < newline > < tab > def _ _ call _ _ ( self , x ) : < newline > < tab > < tab > return self . sum ( x ) < newline > < newline > < tab > def | PREFIX FROM TRAIN DSET: tab > self . avg = self . sum / self . count < newline > < newline > <`
|
| 780 |
+
- Still some rep. : `. md ' , rst _ file = filename + ' . rst ' < newline > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < | PREFIX FROM TRAIN DSET: . md ' , rst _ file = filename + ' . rst ' < newline > < tab >`
|
| 781 |
+
- :( `< newline > < tab > < newline > < tab > evaluating knn accuracy in feature space . < newline > < tab > < tab > < newline > < tab > < tab > < newline > < tab > < tab > < newline > < tab > < tab > < newline > < tab | PREFIX FROM TRAIN DSET: < newline > < tab > < newline > < tab > evaluating knn accuracy in feature space . <`
|
| 782 |
+
- Its actually just tabspamming: `dependencies automatically for any language . < newline > < newline > fabricate is a build tool that finds dependencies . < newline > < newline > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > < tab > | PREFIX FROM TRAIN DSET: dependencies automatically for any language . < newline > < newline > fabricate is a build tool that finds dependencies`
|
| 783 |
+
- I'm going to train a bigger model.
|
| 784 |
+
- Sad: `> problem ( ' abc ' , ' abd ' , ' <UNK> ' , < tab > iterations ) > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > | PREFIX FROM TRAIN DSET: > problem ( ' abc ' , ' abd ' , ' <UNK> ' , < tab > iterations )`
|
| 785 |
+
- Um so it just spams `>`: `in result < newline > < newline > < newline > def test _ get auto track target ( ) > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > | PREFIX FROM TRAIN DSET: in result < newline > < newline > < newline > def test _ get auto track target ( )`
|
| 786 |
+
- Maybe its time for dropout. And label smoothing. oh wait label smoothing is already there. Ok, rerunning with dropout ig.
|
| 787 |
+
- Screenshot for posterity: <img src="readme-imgs/v22-another-run.png" width = "30%">
|
| 788 |
+
- You can see the 0.15 acc. Ok, so dropout now.
|
| 789 |
+
- A bit into training, it doesnt seem to be helping: `0 ] ) , < newline > < tab > < tab > < tab > < tab > nn > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > | PREFIX FROM TRAIN DSET: 0 ] ) , < newline > < tab > < tab > < tab > < tab > nn`
|
| 790 |
+
- Acc stuck at 0.15, probably because its just constantly doing `>`
|
| 791 |
+
- So this sucks.
|
| 792 |
+
- I'm going to make a better, less skill-issued tokenizer:
|
| 793 |
+
```python
|
| 794 |
+
def split_token(token):
|
| 795 |
+
if token.startswith("<") and token.endswith(">"): # preserve ✨special✨ tokens
|
| 796 |
+
return [token.lower()]
|
| 797 |
+
result = re.sub(r"([a-z])([A-Z])", r"\1 \2", token)
|
| 798 |
+
result = re.sub(r"([_-])", r" \1 ", result)
|
| 799 |
+
result = re.sub(r"([^a-zA-Z])", r" \1 ", result)
|
| 800 |
+
return [part.lower() for part in result.split() if part.strip()]
|
| 801 |
+
```
|
| 802 |
+
- Let's see what this does. New datasample: `<tab> f ' acc @ 1 { acc 1 _ meter . val : . 3 f } ( { acc 1 _ meter . avg : . 3 f } ) \ t ' <newline> <tab> <tab> <tab> <tab> f ' acc @ 5 { acc 5 _ meter . val : . 3 f } ( { acc 5 _ meter . avg : . 3 f } ) \ t ' <newline> <tab> <tab> <tab> <tab> f ' mem { memory _ used : . 0 f } mb ' ) <newline> <tab> logger . info ( f ' * acc @ 1 { acc 1 _ meter . avg : . 3 f } acc @ 5 { acc 5 _ meter . avg : . 3 f } ' ) <newline> <tab> return acc 1 _ meter . avg , acc 5 _ meter . avg , loss _ meter . avg <newline> <newline> <newline> @ torch . no _ grad ( ) <newline> def throughput ( data _ loader , model , logger ) : <newline> <tab> model . eval ( ) <newline> <newline> <tab> for idx , ( images , _ ) in enumerate ( data _ loader ) : <newline> <tab> <tab> images = images . cuda ( non _ blocking = true ) <newline> <tab> <tab> batch _ size = images . shape [ 0 ] <newline> <tab> <tab> for i in range ( 5 0 ) : <newline> <tab> <tab> <tab> model ( images ) <newline>`
|
| 803 |
+
- Aand hit train yet again.
|
| 804 |
+
- Its just traded that for tab mem. `. exclusion _ file : <newline> <tab> <tab> with open ( args . exclusion _ file , " r " <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> | PREFIX FROM TRAIN DSET: . exclusion _ file : <newline> <tab> <tab> with open ( args . exclusion _ file , " r "`
|
| 805 |
+
- `_ format ) : <newline> <tab> previewer = none <newline> <tab> method = args . preview _ method <newline> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> <tab> | PREFIX FROM TRAIN DSET: _ format ) : <newline> <tab> previewer = none <newline> <tab> method = args . preview _ method <newline> <tab>`
|
| 806 |
+
- :\\
|
| 807 |
+
- 800 lines of notes, 2605 lines of code, 4 months of work, 241 commits, dozens of hours, and we get `<tab> <tab> <tab> <tab> <tab> <tab> <tab>`.
|
| 808 |
+
- You know what? You know what we're going to do? Train on a dataset where `<tab>` and `<newline>` doesnt exist. It looks like this: `coalesced = _ flatten _ dense _ tensors ( grads ) dist . all _ reduce ( coalesced ) coalesced / = dist . get _ world _ size ( ) for buf , synced in zip ( grads , _ unflatten _ dense _ tensors ( coalesced , grads ) ) :`
|
| 809 |
+
- brody `) : parser = argparse . argument parser ( description = " " ) parser . add _ argument ( _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | PREFIX FROM TRAIN DSET: ) : parser = argparse . argument parser ( description = " " ) parser . add _ argument (`
|
| 810 |
+
- It actually just memorizes the most common one.
|
| 811 |
+
- Feb 7
|
| 812 |
+
|
| 813 |
+
- apparently weight decay works? Also dropout in the embedding
|
| 814 |
+
```python
|
| 815 |
+
src = self.input_emb(src) * math.sqrt(self.ninp)
|
| 816 |
+
src = self.embedding_dropout(src)
|
| 817 |
+
src = self.pos_encoder(src)
|
| 818 |
+
```
|
| 819 |
+
- `self.optimizer = torch.optim.Adam(self.net.parameters(), lr=learning_rate, weight_decay=1e-5)`
|
| 820 |
+
- Actual average hyperparameter tuning.
|
| 821 |
+
- No change that its almost ironic
|
| 822 |
+
- <img src="readme-imgs/no_change.png">
|
| 823 |
+
- Less compute is all you need? But also more training time.
|
| 824 |
+
|
| 825 |
+
```python
|
| 826 |
+
vocab_size = 22812
|
| 827 |
+
embed_dim = 256
|
| 828 |
+
heads = 4
|
| 829 |
+
ff_dim = 256
|
| 830 |
+
layers = 4
|
| 831 |
+
drop = 0.1
|
| 832 |
+
embedding_drop = 0.1
|
| 833 |
+
```
|
| 834 |
+
|
| 835 |
+
- `results [ index ] = [ rec ] results [ index ] . append ( text ) return results def get _ results ( results , results ) : results = results [ index ] results [ index ] results [ index ] results [ index ] results [ index ] results [ index ] results [ index ] results | PREFIX FROM TRAIN DSET: results [ index ] = [ rec ] results [ index ] . append ( text ) return results def`
|
| 836 |
+
- Upon further inspection, mostly repetition: `get ( ' image ' , ' ' ) if tag : image _ with _ tag = f ' { tag } ' else : image _ with _ tag = f ' { tag } ' else : image _ with _ tag = f ' { tag } ' else : image _ with _ tag = | PREFIX FROM TRAIN DSET: get ( ' image ' , ' ' ) if tag : image _ with _ tag = f '`
|
| 837 |
+
- It just seems to be copying: `= data _ loader . load _ data ( ) dataset _ size = len ( data _ loader ) dataset _ size = len ( data _ loader ) dataset _ size = len ( data _ loader ) dataset _ size = len ( data _ loader ) dataset _ size = len ( data _ loader ) | PREFIX FROM TRAIN DSET: = data _ loader . load _ data ( ) dataset _ size = len ( data _ loader )`
|
| 838 |
+
- Maybe a larger context window to test is all you need? I'm going to do 100 and 100 now.
|
| 839 |
+
<details>
|
| 840 |
+
<summary>Outputs</summary>
|
| 841 |
+
|
| 842 |
+
- No, it just "computes the score" `it does make it easy to mix different scores together . converts an event into the format needed by the <UNK> module , http : / / pp . com . mx / python / <UNK> the type of track ( opus or score ) is <UNK> . returns a " score " containing only the channels specified converts the " score " to midi , and feeds it into ' <UNK> - ' returns a " score " shifted in time by " shift " ticks , or shifted so that the first event starts at " start _ time " . returns a " score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute the score " to compute | PREFIX FROM TRAIN DSET: it does make it easy to mix different scores together . converts an event into the format needed by the <UNK> module , http : / / pp . com . mx / python / <UNK> the type of track ( opus or score ) is <UNK> . returns a " score " containing only the channels specified converts the " score " to midi , and feeds it into ' <UNK> - ' returns a " score " shifted in time by " shift " ticks , or shifted so that the first event starts at " start _ `
|
| 843 |
+
- Almost but not complete repetition: `s ) is not correctly formatted - - please read the documentation . " % path ) else : self . model = qt gui . qstandard item model ( parent = self ) self . model . set horizontal header labels ( self . field names ) self . freq _ table . set model ( self . model ) header = self . freq _ table . horizontal header ( ) for n , _ in enumerate ( self . field names ) : header . set section resize mode ( qheader view . stretch ) for record in self . field names : if record . is _ valid ( ) : self . data = self . data [ 0 ] self . data [ 1 ] = self . data [ 1 ] self . data [ 1 ] = self . data [ 1 ] self . data [ 1 ] = self . data [ 1 ] self . data [ 1 ] = self . data [ 1 ] self . data [ 1 ] = self . data [ 1 ] self . data [ 1 ] = self . data | PREFIX FROM TRAIN DSET: s ) is not correctly formatted - - please read the documentation . " % path ) else : self . model = qt gui . qstandard item model ( parent = self ) self . model . set horizontal header labels ( self . field names ) self . freq _ table . set model ( self . model ) header = self . freq _ table . horizontal header ( ) for n , _ in enumerate ( self . field names ) : header . set section resize mode ( qheader view . stretch ) for record`
|
| 844 |
+
- "code t" `i > seconds < / i > requested by : { message . from _ user . mention } < / b > < b > h { } , ts s s . < / b > < b > w g fs . g fs s s s s . < / b > a s : / <UNK> - < code > t . < / code > / <UNK> - < code > t s . < / code > / <UNK> - < code > t . < / code > / <UNK> - < code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . < / code > t . | PREFIX FROM TRAIN DSET: i > seconds < / i > requested by : { message . from _ user . mention } < / b > < b > h { } , ts s s . < / b > < b > w g fs . g fs s s s s . < / b > a s : / <UNK> - < code > t . < / code > / <UNK> - < code > t s . < / code > / <UNK> - < code > t . < / code > / <UNK> - < `
|
| 845 |
+
- After train: `. sort ( list ( v . keys ( ) ) ) all _ clips = all _ clips [ : num _ seq * ( len ( all _ clips ) / / num _ seq ) ] . reshape ( len ( all _ clips ) / / num _ seq , num _ seq ) for i in range ( all _ clips . shape [ 0 ] ) : self . subclip _ seqs . append ( ( all _ clips [ i ] , k , i ) ) torch . save ( ( self . clips [ i ] , self . clips [ i ] ) , self . clips [ i ] ) def _ _ getitem _ _ ( self , index ) : if index = = 0 : return self . clips [ index ] else : return self . clips [ index ] def _ _ getitem _ _ ( self , index ) : return self . clips [ index ] def _ _ getitem _ _ ( self , index ) : return self . clips [ index ] def _ _ getitem _ _ ( | PREFIX FROM TRAIN DSET: . sort ( list ( v . keys ( ) ) ) all _ clips = all _ clips [ : num _ seq * ( len ( all _ clips ) / / num _ seq ) ] . reshape ( len ( all _ clips ) / / num _ seq , num _ seq ) for i in range ( all _ clips . shape [ 0 ] ) : self . subclip _ seqs . append ( ( all _ clips [ i ] , k , i ) ) torch . save ( ( self `
|
| 846 |
+
</details>
|
| 847 |
+
|
| 848 |
+
- Like, its something, but its not great.
|
| 849 |
+
|
| 850 |
+
- Feb 8
|
| 851 |
+
- wait, the problem might be eval. Because just selecting the max prob is equivalent to temperature of 0.
|
| 852 |
+
- At least it thought of (or memorized) "save folder": `default = 0 . 9 , help = ' momentum ' ) parser . add _ argument ( ' - - weight _ decay ' , type = float , default = 1 e - 4 , help = ' weight decay ' ) parser . add _ argument ( ' - - num _ class ' , type = int , default = 1 0 , help = ' number of classes ' ) parser . add _ argument ( ' - - cuda ' , type = int , default = 1 ) parser . add _ argument ( ' - - save _ folder ' , type = str , default = ' data / save ' , help = ' save folder ' ) parser . add _ argument ( ' - - save _ folder ' , type = str , default = ' data / save ' , help = ' save folder ' ) parser . add _ argument ( ' - - save _ folder ' , type = str , default = ' data / save ' , help = ' save folder ' ) parser . add _ argument ( | PREFIX FROM TRAIN DSET: default = 0 . 9 , help = ' momentum ' ) parser . add _ argument ( ' - - weight _ decay ' , type = float , default = 1 e - 4 , help = ' weight decay ' ) parser . add _ argument ( ' - - num _ class ' , type = int , default = 1 0 , help = ' number of classes ' ) parser . add _ argument ( ' - - cuda ' , type = int , default = 1 ) parser . add _ argument`
|
| 853 |
+
- It's actually thinking. Granted, the data said get_llm_suggestions, but llm_model is close enough:
|
| 854 |
+
- `. cos _ sim ( query _ embedding , semantic _ search . corpus _ embeddings ) [ 0 ] top _ results = torch . topk ( cos _ scores , k = semantic _ search . top _ k ) final = [ ] for _ , idx in zip ( top _ results [ 0 ] , top _ results [ 1 ] ) : final . append ( { ' text ' : semantic _ search . corpus [ idx ] } ) return final model = none tokenizer = none def get _ llm _ model ( model _ name , tokenizer , tokenizer ) : model _ name = model _ name . split ( " , " ) [ 0 ] model _ name = model _ name . split ( " , " ) [ 1 ] tokenizer = tokenizer ( tokenizer _ name , tokenizer = tokenizer , max _ length = tokenizer . max _ length , return _ tensors = model _ name , truncation = true ) if model _ name : model _ name = [ ] model _ name = model _ name . split ( | PREFIX FROM TRAIN DSET: . cos _ sim ( query _ embedding , semantic _ search . corpus _ embeddings ) [ 0 ] top _ results = torch . topk ( cos _ scores , k = semantic _ search . top _ k ) final = [ ] for _ , idx in zip ( top _ results [ 0 ] , top _ results [ 1 ] ) : final . append ( { ' text ' : semantic _ search . corpus [ idx ] } ) return final model = none tokenizer = none def get _ llm _ `
|
| 855 |
+
- This makes me very happy. Running black and signing off for the day.
|
| 856 |
+
- Feb 13
|
| 857 |
+
- What in the sussy data? `" + wi + " id : " + yl + <UNK> + wi ) except exception : err msg ( " please check your victim ' s profile url " ) sys . exit ( 1 ) def login ( self , target , password ) : try : self . br . open ( " https : / / facebook . com " ) self . br . select _ form ( nr = 0 ) self . br . form [ ' email ' ] = target self . br . form [ ' pass ' ]`
|
| 858 |
+
- Kind of learning? `_ pool = [ ' conv net w 3 2 ' , ' conv net w 6 4 ' , ' conv net w 1 2 8 ' , ' conv net w 2 5 6 ' ] elif eval _ mode = = ' d ' : model _ eval _ pool = [ ' conv net d 1 ' , ' conv net d 2 ' , ' conv net d 3 ' , ' conv net d 4 ' ] elif eval _ mode = = ' a ' : model _ eval _ pool = [ ' conv net d 1 ' , ' conv net d 3 ' , ' conv net d 3 ' , ' conv net d 5 ' , ' conv net d 6 ' , ' conv net d 6 ' , ' conv net d 6 ' , ' conv net d 6 ' , ' conv net d 6 ' ] elif eval _ mode = = ' d ' : model _ eval _ pool = [ ' conv net d 6 ' , ' conv net d 6 ' , ' conv net d 6 ' | PREFIX FROM TRAIN DSET: _ pool = [ ' conv net w 3 2 ' , ' conv net w 6 4 ' , ' conv net w 1 2 8 ' , ' conv net w 2 5 6 ' ] elif eval _ mode = = ' d ' : model _ eval _ pool = [ ' conv net d 1 ' , ' conv net d 2 ' , ' conv net d 3 ' , ' conv net d 4 ' ] elif eval _ mode = = ' a ' : model _ eval _ pool = [ `
|
| 859 |
+
- Now it will log generations.
|
| 860 |
+
- Feb 14
|
| 861 |
+
- So its doing pretty well, esp considering we are now running eval on valset. Some repetition. Also it looks like I should've trained longer all along. Still continous improvement.
|
| 862 |
+
- Repetition is decreasing but still present.
|
| 863 |
+
- So it's pretty good but not great. Time to hyperparam search model sizes.
|
| 864 |
+
- fixed a quick bug with the logging. Now running overnight.
|
| 865 |
+
- Here are the results!
|
| 866 |
+
- <img src="readme-imgs/comparison.png" width="40%">
|
| 867 |
+
-
|
| 868 |
+
- Feb 18
|
| 869 |
+
|
| 870 |
+
- Mostly hitting the ceiling
|
| 871 |
+
- Maybe more data?
|
| 872 |
+
- **Make a writeup**
|
| 873 |
+
|
| 874 |
+
- For yourself
|
| 875 |
+
- As a medium
|
| 876 |
+
- Peer-reviewed thing for high schoolers
|
| 877 |
+
|
| 878 |
+
- Journal of emerging investigators
|
| 879 |
+
- $35 fee, high acceptance rate.
|
| 880 |
+
- 7-8 months
|
| 881 |
+
- https://emerginginvestigators.org
|
| 882 |
+
|
| 883 |
+
- Theses: Effect of dataset on results, or hyperparameters. Tokenization. Etc.
|
| 884 |
+
|
| 885 |
+
- Will take a bit. Nick is happy to advise.
|
| 886 |
+
|
| 887 |
+
- Some sort of other metric?
|
| 888 |
+
- Possible metrics:
|
| 889 |
+
- Top k accuracy
|
| 890 |
+
- (not yet) AST parse (possibly too high of a bar)
|
| 891 |
+
- Longest common subsequence **between generated and actual** (kind of a scuffed metric)
|
| 892 |
+
- (x) Bleu score might actually work. investigate.
|
| 893 |
+
|
| 894 |
+
- Feb 20
|
| 895 |
+
- It's been a bit. Continuously scraping all python repos with >10 stars since 2015 now. Tricky to get around those pesky github rate limits
|
| 896 |
+
- Pretty crazy amounts. From 35k to 123k repos, from 52k to 186k files
|
| 897 |
+
- I scraped it all and am now starting to train. `v23-mega`
|
| 898 |
+
- Feb 21
|
| 899 |
+
- Wait, generations are getting better.
|
| 900 |
+
- `K=10,T=0.8: <data>all _ input _ nodes [ 0 ] for i in group ] <newline> <tab> <tab> <tab> <newline> <tab> <tab> <tab> out _ flat = [ <newline> <tab> <tab> <tab> <tab> graph . call _ function ( torch . ops . aten . view . default , args = ( i , [ i . meta [ " val " ] . numel ( ) ] ) ) for i in inputs <newline> <tab> <tab> <tab> ] <newline> <tab> <tab> <tab> out _ cat = graph . call _ function ( torch . ops . aten . cat . default </data>, args = ( i , [ i . meta [ " val " ] . numel ( ) ] ) ) <newline> <tab> <tab> <tab> out _ cat . append ( torch . ops . ops . impl . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . dynamic . view . `
|
| 901 |
+
- `x _ interp = machine . interpolate ( imgs [ 0 ] , imgs [ 1 ] , n _ interp ) <newline> <newline> <tab> opt </data>= torch . zeros ( len ( imgs ) ) <newline> <tab> for x _ interp in range ( 1 , args . batch _ size ) : <newline> <tab> <tab> x _ interp `
|
| 902 |
+
- So its actually quite good until it generates too far out.
|
| 903 |
+
- And its not memorized!
|
| 904 |
+
- More data is all you need??
|
| 905 |
+
- With regards to repetition, it always repeats a line. Like `nout = nin <newline> <tab> <tab> nout = nin <newline> <tab> <tab> nout = nin <newline> <tab> <tab>` or `<newline> options . add _ argument ( ' - - port ' , default = ' localhost ' , help = ' port ' ) <newline> options . add _ argument ( ' - - port ' , default = ' localhost ' , help = ' port ' ) <newline> options . add _ argument ( ' - - port ' , default = ' localhost ' , help = ' port ' ) <newline> options . add _ argument ( ' - - port ' , default = ' localhost ' , help = ' port ' ) <newline> options . `
|
| 906 |
+
- Feb 22
|
| 907 |
+
- Pretty good. `<data>0 " , <newline> " mozilla / 5 . 0 ( windows nt 5 . 1 ; wow 6 4 ) apple web kit / 5 3 7 . 3 6 ( khtml , like gecko ) firefox / 5 3 . 0 " <newline> ] <newline> <tab> return user _ agents [ int ( hashlib . md 5 ( str ( time . time ( ) ) . encode ( ) ) . hexdigest ( ) , 1 6 ) % len ( user _ agents ) ] <newline> <newline> def get _ integ _ hash ( query ) </data> : <newline> <tab> return user _ agents [ int ( k ) ] <newline> <newline> def extract _ urls ( query ) : <newline> <tab> response = request . read ( ) <newline> <tab> soup = beautiful soup ( response , " lxml " ) <newline> <tab> return response . get ( " html " ) <newline> <newline> def find _ html _ by _ token ( query ) : <newline> <tab> soup = beautiful soup ( response , " html " ) <newline> <tab> return soup . find _ all ( " tr " ) [ 0 ] ! `
|
| 908 |
+
- Feb 25
|
| 909 |
+
|
| 910 |
+
- Experimenting with minimizing repetition
|
| 911 |
+
- Thinking of hypotheses:
|
| 912 |
+
- Effect of `<something>` on `<metric>`
|
| 913 |
+
- Rather than human verification, settle for top-k accuracy or something similar
|
| 914 |
+
- Number of decoder layers?
|
| 915 |
+
- Architecture as a whole.
|
| 916 |
+
- Data chunk size?
|
| 917 |
+
- Double descent
|
| 918 |
+
- change dataset or model size
|
| 919 |
+
- X Tokenization? Very interesting question
|
| 920 |
+
- (~) Decoding strategies (topk, beam, greedy, etc).
|
| 921 |
+
- (\*) Curriculum learning
|
| 922 |
+
- A curriculum in which you start with easy samples and then move to hard samples performs better on topk accuracy than normal learning
|
| 923 |
+
- Curriculum learning leads to improved convergence speed (more specific, good)
|
| 924 |
+
- Curriculum learning
|
| 925 |
+
- With loss (_harder_)
|
| 926 |
+
- with token rarity
|
| 927 |
+
- Not doing curriculum learning
|
| 928 |
+
- anti curriculum learning
|
| 929 |
+
- What are the next steps?
|
| 930 |
+
- breif related work search
|
| 931 |
+
- https://arxiv.org/pdf/2101.10382
|
| 932 |
+
- NLP people who say curriculum learning doesn't help: https://arxiv.org/pdf/2108.02170
|
| 933 |
+
- For finetuning https://aclanthology.org/2020.acl-main.542.pdf
|
| 934 |
+
- Sequence completion increasing length: https://aclanthology.org/2023.acl-long.666.pdf
|
| 935 |
+
- IDEA: increase sequence length during training for this?
|
| 936 |
+
- (\*) Curriculum learning FOR CODE https://arxiv.org/pdf/2407.10194
|
| 937 |
+
- tinypy dataset: https://www.kaggle.com/datasets/kamelmohammedyamani/tinypy-for-curriculum-learning
|
| 938 |
+
|
| 939 |
+
- March 4
|
| 940 |
+
- One solid dataset: check!
|
| 941 |
+
- Generalizable architecture: check!
|
| 942 |
+
- Testable hypothesis: check
|
| 943 |
+
- Experimental groups: different curriculum learning strategies (but actually just only do with and without curriculum)
|
| 944 |
+
- How to implement?
|
| 945 |
+
- Assign some sort of difficulty score to each sample
|
| 946 |
+
- Sort by difficulty
|
| 947 |
+
- Every epoch, add a few harder ones.
|
| 948 |
+
- Dynamically do it by loss? If stagnates, increase difficulty
|
| 949 |
+
- Straightforward and good.
|
| 950 |
+
- Metric
|
| 951 |
+
- Just use accuracy
|
| 952 |
+
- trainsteps to reach best acc ± eps (or loss)
|
| 953 |
+
- Qualitative
|
| 954 |
+
- Perplexity? Literally e^loss, so easy implementation, and others use it.
|
| 955 |
+
- Great! implemented schedule and difficulty score based on rarity.
|
| 956 |
+
- Things to do:
|
| 957 |
+
- Perplexity (now)
|
| 958 |
+
- train on a real (small) dataset and compare
|
| 959 |
+
- Run hypothesis experiments
|
| 960 |
+
- Curriculum learning, easy to hard, based off of median token rarity
|
| 961 |
+
- Control
|
| 962 |
+
- Anti-curriculum (easy, sort descending)
|
| 963 |
+
- March 11
|
| 964 |
+
- No meeting, just doing stuff myself. Check for implemented, doublecheck for ran
|
| 965 |
+
- Perplexity (now)
|
| 966 |
+
- train on a real (small) dataset and compare
|
| 967 |
+
- Run hypothesis experiments
|
| 968 |
+
- Curriculum learning, easy to hard, based off of median token rarity ✅
|
| 969 |
+
- Control ✅
|
| 970 |
+
- Anti-curriculum (easy, sort descending) ✅
|
| 971 |
+
- March 18
|
| 972 |
+
|
| 973 |
+
- Looking again at https://arxiv.org/pdf/2407.10194. Possibly cite it in the paper.
|
| 974 |
+
- Fixed anticurriculum
|
| 975 |
+
- add edit similarity, but it's kind of hard at the moment
|
| 976 |
+
- added hybrid and sequential schedules as described in section 5.
|
| 977 |
+
- What experiments? What hyperparams?
|
| 978 |
+
- `def train_curriculum(self, epochs=None, dataloader=None, noop=True, curriculum=False, anticurriculum=False, sequential=False, hybrid=False, loss_based=False):`
|
| 979 |
+
- No curriculum: `noop`, or default
|
| 980 |
+
- Incremental curriculum | not loss-based vs loss-based: `curriculum=True`, `loss_based=False|True`
|
| 981 |
+
- incremental anticurriculum (implemented properly this time) | not loss-based vs loss-based: `anticurriculum=True`, `loss_based=False|True`
|
| 982 |
+
- sequential curriculum | not loss-based vs loss-based: `sequential=True`, `loss_based=False|True`
|
| 983 |
+
- hybrid curriculum | not loss-based vs loss-based: `hybrid=True`, `loss_based=False|True`
|
| 984 |
+
- How are we selecting "hard" samples?
|
| 985 |
+
- Implemented loss-based
|
| 986 |
+
- TBD copyright issues
|
| 987 |
+
- Should resolve: unlicensed bad, but MIT/GPL/CC0 good
|
| 988 |
+
|
| 989 |
+
- April 1
|
| 990 |
+
- A silly bug for a silly day.
|
| 991 |
+
- Checked in with nick, the losses are maybe a bit _too_ similar. Bug perhaps?
|
| 992 |
+
- "too coincidental should raise alarms"
|
| 993 |
+
- Ugh so in train.py, passing in \*\*kwargs doesn't override others, but `noop=True` by default, and it's the first in the `if`/`elif` chain, so all other logic is skipped.
|
| 994 |
+
- Also did the license stuff, and refactored the schedule thing to be an enum. Funny story: enums reinstantiated are not `==`.
|
| 995 |
+
- April 2
|
| 996 |
+
- After some head-banging, I got the copying to work. But only 86,616 files, even though all the urls are included. Who knows why haha. 86k is ok when compared to 90k.
|
| 997 |
+
- 11:51 later, it's done
|
| 998 |
+
- but also only 86616 out of 97711 went
|
| 999 |
+
- So 11,000 files just kind of disappeared. Stuff happens I guess
|
| 1000 |
+
- Line 1000 !
|
| 1001 |
+
- It's 767 mb of files, so still bigger than the smaller corpus of 450 ish mb. Also, let's check the results of training (from the 450mb data)
|
| 1002 |
+
|
| 1003 |
+
- So uhh
|
| 1004 |
+
- noop schedule model did best 💀📉👎😭😡😤💥💔😩💻🤯📊🔴❌🧠
|
| 1005 |
+
- A picture is worth a thousand words.
|
| 1006 |
+
- _Always remember, setbacks are just setups for comebacks! Keep pushing forward, the next run will be better._
|
| 1007 |
+
- <img src="readme-imgs/v30-results.png" style="width: 30%">
|
| 1008 |
+
- <img src="readme-imgs/v30-v2.png" style="width: 30%">
|
| 1009 |
+
- will it though? "better" is an unclear heuristic. I mean, the "best" run is when it overfit to memorizing the alphabet. Like, I don't know what to say here. 1000 lines of MD is crazy.
|
| 1010 |
+
- Maybe it will. Maybe you always learn something new, and that's what matters. I don't know. Why do we do this, anyways? To learn. To be intellectually stimulated. And here we are. So keep pushing forward towards that goal. Towards fulfillment, whatever that means.
|
| 1011 |
+
|
| 1012 |
+
- Tuesday, april 22
|
| 1013 |
+
|
| 1014 |
+
- So I wanted to sort based on entropy. While implementing this, I noticed that, uh, I never used the rarity scores. I really should intermediarily log things more. I swear.
|
| 1015 |
+
- Rerunning, also added in the entropy sorting. I should question my code more. But if this one works well, then it's nice. so many challenges. Also, I had a lot of things occuring. So that's why it's been twenty days.
|
| 1016 |
+
- well, after "fixing" it, results are identical, so I'm going to take a closer look.
|
| 1017 |
+
|
| 1018 |
+
- So it's not fixed.
|
| 1019 |
+
|
| 1020 |
+
- <img src="readme-imgs/sorted_indices.png" style="width: 50%">
|
| 1021 |
+
|
| 1022 |
+
- Run
|
| 1023 |
+
- Run with entropy
|
| 1024 |
+
- Fill in outline
|
| 1025 |
+
|
| 1026 |
+
- Wednesday apr 23
|
| 1027 |
+
|
| 1028 |
+
- Quick entry, line 1024 !
|
| 1029 |
+
- <img src="readme-imgs/v30-v3-v4.png" style="width: 30%">
|
| 1030 |
+
- <img src="readme-imgs/v30-v3-v4-topk.png" style="width: 30%">
|
| 1031 |
+
- The second one is interesting because there was some overtaking. Several other metrics looked like the first. Finally, noop did best. :\(
|
| 1032 |
+
|
| 1033 |
+
- Tue may 6
|
| 1034 |
+
- Something today and then there will be nothing for two weeks because I will be away. Mostly doing some work in the doc, actually writing the paper. I'll put the PDF here one day, of course. Maybe it's all just very silly. My results show that it doesn't work, and it's not even unique because that's what the last guy came across. What more can you do? I am trying, hoping, dreaming of contributing meaningfully to the world, and I thought that this might be a chance. It's a start. It's something. It's not nothing. Plus, it's interesting to write a paper, and I did make a transformer, if nothing else. It's all about learning. And I did learn today. One day it'll all come together. I can report on these results. And I really hope I get in because it would suck if I didn't, especially with the 75% acceptance rate. So I do that, and then what? Several months of review later, I have a paper under my name. That's cool, at least. I don't know, but I guess this is what I'm doing.
|
| 1035 |
+
- Wednesday may 7
|
| 1036 |
+
|
| 1037 |
+
- Updated readme.
|
| 1038 |
+
|
| 1039 |
+
- Tue Jun 10
|
| 1040 |
+
- haha, it's been a bit. Looking over it with Nick, it's not fully converged. Also, yes, noop did best. Perhaps we train longer? What does training longer even mean? Do you just show it all the data at that point?
|
| 1041 |
+
- Start training on the real data (mega licensed) because it's more data for a larger model.
|
| 1042 |
+
- Make some diagrams for curriculums. Easier to communicate for understanding.
|
| 1043 |
+
- "catastrophic forgetting": if you use sliding windows (sequential), gradient updates overwrite the hard ones, you forget the easy stuff. Maybe model isn't doing as well. And we have empirical results: the worst-performing one is the sequential one.
|
| 1044 |
+
- Train for longer to actually see what's going on. Train for all the data after the schedule is done.
|
| 1045 |
+
- Curriculum learning is usually used as a warm-up, not a full-time training schedule. You train normally for several epochs after.
|
| 1046 |
+
- Fri jun 13
|
| 1047 |
+
- Started using adamw. I came back from a three-day trip and 2.5 out of the eight experiments had been completed (haha, the difference between ten and thirty epochs from my prior experiment runs), so I'm investigating methods to speed up training. Hopefully AdamW helps, and then I've implemented some aggressive garbage collection after each experiment. I tried model compilation (using inductor and torchscript), but it didn't play nicely with MPS. I tried profiling to reduce python overhead, but again it said obvious things were taking the most time (train_step, Optimizer.step#AdamW.step, aten::scaled_dot_product_attention, etc). I still implemented caching for the token rarity scores, so hopefully that helps. What mystifies me is this: I made a quick function to print out memory usage, and it all reported values less than a gigabyte. And yet: the python process uses around 40(±10) gigabytes of memory (as reported by Activity Monitor). MPS Memory: 0.54 GB allocated / CPU RAM: 71.7% used / Model parameters: 0.12 GB / Estimated optimizer state: 0.25 GB. (CPU RAM is accurate to what Activity Monitor reports because it uses psutil). Especially MPS because that's supposed to be where my dataset and the model weights live (although maybe half a gigabyte of data compressed into tokens and 30m parameters is smaller than I think).
|
| 1048 |
+
- I made some diagrams describing the schedules. They look pretty good!
|
| 1049 |
+
- <img src="readme-imgs/schedule-diagrams.png">
|
| 1050 |
+
- Still training. Retraining. Let's see if AdamW + gc is faster. Patience. 1.15 ish s/it, same as 1.1 baseline, so idk. I'll take faster convergence times, though. Then again, it's with respect to the other ones. So idk.
|
| 1051 |
+
- <img src="readme-imgs/tb-31-curves.png" style="width: 30%">
|
| 1052 |
+
- Checking TB, it stabilizes after 20, but 30 looks like a good loss curve, I guess. I don't know. 88% RAM is pressing, but I can get away with one other app open, so I'll just let this cook. Tried pin memory, it was about the same. Better to not mess with that stuff, let the OS manage memory as needed since RAM is so high for some reason even though the model is 120mb.
|
| 1053 |
+
- added crazy graceful shutdown so it saves exactly upon ctrl+c. Cursed SIGINT handlers and `except KeyboardInterrupt` and `if self._interrupted` logic. It works. Did I use copilot? Of course. Such is programming these days. I can feel it. It's becoming real now. haha 360 commits later.
|
| 1054 |
+
- perhaps it's unoptimized `_generate_square_subsequent_mask`. Perhaps I should just let it train in peace and hop on something else.
|
| 1055 |
+
- average fluctuation:
|
| 1056 |
+
```
|
| 1057 |
+
Time: 2025-06-14 02:36:56 / MPS: 0.82 GB / RAM: 88.6% used / data: 0.25 GB / Params: 0.12 GB / Optim (est): 0.25 GB /
|
| 1058 |
+
Time: 2025-06-14 03:45:40 / MPS: 0.82 GB / RAM: 83.9% used / data: 0.25 GB / Params: 0.12 GB / Optim (est): 0.25 GB /
|
| 1059 |
+
Time: 2025-06-14 04:54:29 / MPS: 0.82 GB / RAM: 75.8% used / data: 0.25 GB / Params: 0.12 GB / Optim (est): 0.25 GB /
|
| 1060 |
+
Time: 2025-06-14 06:02:54 / MPS: 0.82 GB / RAM: 85.4% used / data: 0.25 GB / Params: 0.12 GB / Optim (est): 0.25 GB /
|
| 1061 |
+
Time: 2025-06-14 07:11:30 / MPS: 0.82 GB / RAM: 92.2% used / data: 0.25 GB / Params: 0.12 GB / Optim (est): 0.25 GB /
|
| 1062 |
+
```
|
| 1063 |
+
- 75 to 92% is a big range. That's between 48gb and 58 gb used. I guess it's all those activations. Haha quadratic-time-complexity attention mechanisms. Well, no OOM so this is as fast as it's going to get.
|
| 1064 |
+
- Looking at `vm_stat`, swap space is being used. First of all, all those code helper (renderer) and code helper (plugin) processes. Perhaps I need to just quit vsc and use terminal while training.
|
| 1065 |
+
- Jun 16
|
| 1066 |
+
- aaahaha still training. continuously. On run 3.8.
|
| 1067 |
+
- Strange loss curve.
|
| 1068 |
+
- <img src="readme-imgs/funny-curve.png" style="width: 30%">
|
| 1069 |
+
-Perhaps this is AdamW, but the loss fluctuates in almost a regular pattern. Maybe batches aren't randomly sorted?
|
| 1070 |
+
- Jun 21
|
| 1071 |
+
- Took a look at the runs. 5.3/8 done, each takes on average 1.5 days.
|
| 1072 |
+
- four days left?? FOUR DAYS??? Today is Saturday and the meeting is on Tuesday. That's 3.5 at best. Well then. I guess two weeks wasn't enough time for me to get it together. To be fair, that's 8\*1.5=12 days straight training, and I lost three days to restart because _someone_ had the brilliant idea to use AdamW. which wasn't w to restart and produced not-w loss curves (weird regular fluctuation which, by the way, is consistent across runs). So actually the estimate is accurate.
|
| 1073 |
+
- haha noop config does slightly better but they are all about the same. The exact opposite of my hypothesis. Of course it did. Very slick. Not like I didn't see it coming, though. All the preliminary experiments on the smaller subset said as much, after all.
|
| 1074 |
+
- "At this point the only thing these variants have optimized is your emotional resilience" -ChatGPT
|
| 1075 |
+
- And what of this project? So many extractable lessons, so much resilience learned over nine months now (crazy) and 368 commits. But not much in terms of results. A paper is cool, but a paper that presents new things as doing better is cooler. Oh well. I'll take what I can get. Because what else are you going to do? What else are you going to do. My god this has been a journey. "Oh haha. I know what to do! I'll code a transformer. It'll be fun and quick. Two month project." Little did they know. It has been fun (at times), but it has _not_ been quick.
|
| 1076 |
+
- That feeling when the transformer trains you (to be more resilient, I guess?)
|
| 1077 |
+
- Jun 22
|
| 1078 |
+
- 5.8 done.
|
| 1079 |
+
- Also I guess I knew even before the preliminary experiments, because it also didn't work in the other paper. But I guess I thought I would be different. And then I did the small experiments, and it didn't work. But I guess I thought I would be different. Now, after two weeks of continuous training, the results are almost in and the verdict is that it didn't work. So I can't say this is a sudden realization.
|
README.md
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Code Completion
|
| 2 |
+
|
| 3 |
+
## Take a look at [notes.md](https://github.com/JBlitzar/code-completion/blob/main/NOTES.md)
|
| 4 |
+
|
| 5 |
+
## **1. Overview**
|
| 6 |
+
|
| 7 |
+
This project aims to develop a code completion model for Python. My process involves scraping GitHub repositories, preprocessing the data, implementing and training a transformer, and refining its performance to improve generalization and avoid overfitting.
|
| 8 |
+
|
| 9 |
+
## **2. Data Collection & Processing**
|
| 10 |
+
|
| 11 |
+
- See [dataset.py](dataset.py)
|
| 12 |
+
- Scraped GitHub repositories with the following filters ([link](scraping)):
|
| 13 |
+
- More than 100 stars
|
| 14 |
+
- After 2015 (to ensure usage of modern python)
|
| 15 |
+
- `.py` files between 1KB and 100KB
|
| 16 |
+
- Processed around 30,000 repositories and filtered them based on SHA hashes to avoid duplicates, resulting in about 500MB of text data and 53,000 files.
|
| 17 |
+
- Files are then formatted using `autopep8`
|
| 18 |
+
- Tokenization experiments:
|
| 19 |
+
- Started with BERT-based tokenization.
|
| 20 |
+
- Explored Byte Pair Encoding (BPE) with `yttm` but decided against it because of strange tokenization issues. Processing code is a more nuanced problem than natural language (different use of punctuation and whitespace, in particular).
|
| 21 |
+
- Eventually settled on a custom tokenizer ([link](https://github.com/JBlitzar/code-completion/blob/main/dataset.py#L178)), aggressively subdividing the code by first removing docstrings and comments, and then splitting based off of capitalization, spaces, and underscores while preserving newlines and indentation.
|
| 22 |
+
- I discovered that, despite the aggressive tokenization, there were still many tokens that were used only once or twice. In the end, I only preserved tokens that appeared more than ten times.
|
| 23 |
+
|
| 24 |
+
## **3. Model Development & Training**
|
| 25 |
+
|
| 26 |
+
- After learning about attention mechanisms and reading through various [resources](resources.md), I implemented it myself in [architecture.py](architecture.py). The design is very modular, each component usually being composed of a few smaller components glued together in a `Sequential`. While this was an excellent learning opportunity, and it was really great to truly understand how attention mechanisms worked inside of a transformer, because this project has so many moving parts, as I continued debugging, I used pytorch's builtin implementation of transformers for iteration. The [source code itself](https://github.com/pytorch/pytorch/blob/v2.6.0/torch/nn/modules/transformer.py#L57) is actually surprisingly similar.
|
| 27 |
+
- I created my own [training framework](trainingmanager.py), which I've used in the past to quickly train other models. Building off of this, I made a quick script to run [hyperparameter searches](hyperparam_tune.py).
|
| 28 |
+
- I implemented gradient clipping, weight decay, and Xavier normalization.
|
| 29 |
+
- What's amazing is that I was at this stage of the project _in November_. Spoiler alert: it's not November anymore. In previous, less complex ML projects (such as the VAE), I would do a few weeks of training and finetuning, but usually finish not long after that.
|
| 30 |
+
|
| 31 |
+
## **4. Challenges, Takeaways, & Further Experimentation**
|
| 32 |
+
|
| 33 |
+
### **Challenges**
|
| 34 |
+
|
| 35 |
+
- Many challenges arose while training.
|
| 36 |
+
- First of all, I was getting NaNs in the loss, due to incorrect casting in the Multi-Head Attention. At this point, I decided to use the builtin implementation in order to isolate the problem and prevent future issues like this.
|
| 37 |
+
- This next but is probably the most intense one I faced during this project. An interesting shape issue arose where the model expected data in the shape of (seq_len, batch_size), but was receiving and outputting the reverse. What was insane was that in the loss calculation, I flattened the outputs, leading to a lack of actual errors. Pytorch is usually good at catching shape errors and making debugging easy, but if you transpose and then flatten, it would have the same shape as if you didn't.
|
| 38 |
+
|
| 39 |
+
- I only discovered this after actual weeks of debugging and scrutinization of other parts of the code. Finally, I was able to isolate it to this after training on a [purposely undersized dataset](dummy-data-dir/data/corpus.txt) to get the model to overfit, and noticing an incorrect pattern in the outputs.
|
| 40 |
+
- While this fixed the core issue, the bug persisted in a couple of ways:
|
| 41 |
+
- I previously had two places where I evaluated the model in `trainingmanager.py`: The training step and the validation step. I didn't fix the issue in the validation step, which caused it to persist and validation loss to increase rather than decrease over time, creating misleading graphs that looked like overfitting.
|
| 42 |
+
- I also saved the best checkpoint based off of lowest validation loss. Then, when [evaluating the model](eval.py), I loaded in the best checkpoint. Unfortunately, this lead to loading in of bad models because the validation loss was messed up.
|
| 43 |
+
- The lesson here is to make sure you don't have the same code in multiple places, and to ensure when you change a part, that it won't have unintended side effects.
|
| 44 |
+
|
| 45 |
+
- Analysis of data is important to understand it and know how to treat it.
|
| 46 |
+
|
| 47 |
+
- For example, it was important to realize that many tokens were used only a couple of times. This allowed me to cut down on the number of unique tokens, thus reducing model size, without disrupting the diversity of the dataset.
|
| 48 |
+
|
| 49 |
+
- After relatively extensive hyperparameter tuning, I had determined that I was plateuing on performance.
|
| 50 |
+
- I implemented additional inference strategies (top-k, top-p, beam search) and metrics (perplexity, top-k accuracy)
|
| 51 |
+
- I curated a larger dataset, with a threshold of 10 github stars rather than 100 and more lenient size restrictions. This resulted in a dataset approximately four times bigger. After some license filtering, I ended up with a dataset approximately two times bigger.
|
| 52 |
+
|
| 53 |
+
- Most recently, I've been working on experimenting with different curriculums to see if it leads to faster convergence speeds. There was [another paper](https://arxiv.org/pdf/2407.10194) on the same topic which concluded that curriculum learning has no effect on code generation, instead finding that it greatly assisted in code execution. They used procedurally-generated synthetic data to control for which samples were easy, medium, or hard, but I was curious how it would pan out on real data rather than synthetic data. I've been experimenting with several different curriculums, using several different metrics, namely median token rarity, entropy, and loss-based metrics, with the curriculums being a traditional sequential curriculum, sliding window curriculum, anticurriculum, and three-stage hybrid curriculum.
|
| 54 |
+
|
| 55 |
+
## **5. Conclusion**
|
| 56 |
+
|
| 57 |
+
This project has come a long way, from early scraping experiments to training a functioning code completion model. The focus now is on scaling up data collection and refining the model to produce high-quality completions that generalize well across unseen codebases.
|
architecture.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
import numpy as np
|
| 5 |
+
|
| 6 |
+
DIM = 128
|
| 7 |
+
print(f"DIM IS SET TO {DIM}")
|
| 8 |
+
|
| 9 |
+
DEVICE = "mps" if torch.backends.mps.is_available() else "cpu"
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class MHA_SelfAttention(nn.Module):
|
| 13 |
+
def __init__(self, embed_dim=DIM, num_heads=1, *args, **kwargs):
|
| 14 |
+
super().__init__(*args, **kwargs)
|
| 15 |
+
if num_heads != 8:
|
| 16 |
+
print(
|
| 17 |
+
"Num heads is not 8. This is a reminder to change this back after experimenting with smaller architectures"
|
| 18 |
+
)
|
| 19 |
+
self.mha = nn.MultiheadAttention(embed_dim, num_heads)
|
| 20 |
+
self.num_heads = num_heads
|
| 21 |
+
|
| 22 |
+
def forward(self, x, mask=None, triangle_mask=False):
|
| 23 |
+
# if torch.isnan(x).any():
|
| 24 |
+
# print("NAN ALERT!")
|
| 25 |
+
attn_mask = None
|
| 26 |
+
seq_len = x.size(1)
|
| 27 |
+
|
| 28 |
+
if triangle_mask:
|
| 29 |
+
attn_mask = torch.triu(torch.ones(seq_len, seq_len), diagonal=1) == 0
|
| 30 |
+
attn_mask = attn_mask.to(x.device)
|
| 31 |
+
|
| 32 |
+
if mask is not None:
|
| 33 |
+
if attn_mask is not None:
|
| 34 |
+
attn_mask = mask.unsqueeze(1) & attn_mask.unsqueeze(0)
|
| 35 |
+
else:
|
| 36 |
+
attn_mask = mask.unsqueeze(1).expand(-1, seq_len, -1)
|
| 37 |
+
|
| 38 |
+
if attn_mask is not None:
|
| 39 |
+
attn_mask = attn_mask.repeat(self.num_heads, 1, 1).float()
|
| 40 |
+
attn_mask = attn_mask.masked_fill(
|
| 41 |
+
~attn_mask.bool(), -1e9
|
| 42 |
+
) # https://github.com/pytorch/pytorch/issues/21518 we don't talk about how long that took to know. Later it seems like they also support bool, but idk 🤷
|
| 43 |
+
|
| 44 |
+
# print(f"attn_mask shape: {attn_mask.shape if attn_mask is not None else None}")
|
| 45 |
+
# if attn_mask is not None:
|
| 46 |
+
# print(f"attn_mask stats: max={attn_mask.max()}, min={attn_mask.min()}, mean={attn_mask.mean()}")
|
| 47 |
+
|
| 48 |
+
x = x.transpose(0, 1)
|
| 49 |
+
# if torch.isnan(x).any():
|
| 50 |
+
# print("NAN ALERT!")
|
| 51 |
+
attn_output, _ = self.mha(x, x, x, attn_mask=attn_mask)
|
| 52 |
+
attn_output = attn_output.transpose(0, 1)
|
| 53 |
+
# if torch.isnan(x).any() or torch.isinf(x).any():
|
| 54 |
+
# print("NAN ALERT!")
|
| 55 |
+
|
| 56 |
+
# if torch.isnan(attn_output).any() or torch.isinf(attn_output).any():
|
| 57 |
+
# print("NAN or INF detected in attn_output!")
|
| 58 |
+
# print(f"Output stats: max={attn_output.max()}, min={attn_output.min()}, mean={attn_output.mean()}")
|
| 59 |
+
|
| 60 |
+
return attn_output
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class FeedForward(nn.Module):
|
| 64 |
+
def __init__(self, dim=DIM, hidden_dim=None, *args, **kwargs):
|
| 65 |
+
super().__init__(*args, **kwargs)
|
| 66 |
+
self.dim = dim
|
| 67 |
+
self.hidden_dim = hidden_dim if hidden_dim is not None else dim
|
| 68 |
+
|
| 69 |
+
self.block = nn.Sequential(
|
| 70 |
+
nn.LayerNorm(self.dim), # nobody knows what this does
|
| 71 |
+
nn.Linear(self.dim, self.hidden_dim),
|
| 72 |
+
nn.GELU(),
|
| 73 |
+
nn.Linear(self.hidden_dim, self.dim),
|
| 74 |
+
nn.GELU(),
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
def forward(self, x):
|
| 78 |
+
return self.block(x)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class DecoderBlock(nn.Module):
|
| 82 |
+
def __init__(self, *args, **kwargs):
|
| 83 |
+
super().__init__(*args, **kwargs)
|
| 84 |
+
self.sa = MHA_SelfAttention()
|
| 85 |
+
self.block = FeedForward()
|
| 86 |
+
|
| 87 |
+
# self.drop = nn.Dropout(p=0.1)
|
| 88 |
+
|
| 89 |
+
def forward(self, x, padding_mask=None):
|
| 90 |
+
res_x = x
|
| 91 |
+
x = self.sa(x, mask=padding_mask, triangle_mask=True)
|
| 92 |
+
|
| 93 |
+
# x = self.drop(x)
|
| 94 |
+
x = x + res_x
|
| 95 |
+
|
| 96 |
+
res_x_2 = x
|
| 97 |
+
x = self.block(x)
|
| 98 |
+
# x = self.drop(x)
|
| 99 |
+
x = x + res_x_2
|
| 100 |
+
|
| 101 |
+
# if torch.isnan(x).any():
|
| 102 |
+
# print("NAN ALERT!")
|
| 103 |
+
|
| 104 |
+
return x
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class PositionalEncoding(nn.Module):
|
| 108 |
+
def __init__(self, max_len=5000):
|
| 109 |
+
super().__init__()
|
| 110 |
+
position = torch.arange(0, max_len).unsqueeze(1)
|
| 111 |
+
div_term = torch.exp(torch.arange(0, DIM, 2) * -(np.log(10000.0) / DIM))
|
| 112 |
+
pe = torch.zeros(max_len, DIM)
|
| 113 |
+
pe[:, 0::2] = torch.sin(position * div_term)
|
| 114 |
+
pe[:, 1::2] = torch.cos(position * div_term)
|
| 115 |
+
self.register_buffer("pe", pe.unsqueeze(0))
|
| 116 |
+
|
| 117 |
+
def forward(self, x):
|
| 118 |
+
seq_len = x.size(1)
|
| 119 |
+
return x + self.pe[:, :seq_len, :].to(x.device)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class DecoderTransformer(nn.Module):
|
| 123 |
+
def __init__(self, num_blocks=6, vocab_size=100, *args, **kwargs):
|
| 124 |
+
super().__init__(*args, **kwargs)
|
| 125 |
+
|
| 126 |
+
if vocab_size == 100:
|
| 127 |
+
print(
|
| 128 |
+
"WARNING: vocab_size is set to 100. You probably mean to set it to something else. Comment out the exit line below if this was intentional"
|
| 129 |
+
)
|
| 130 |
+
exit()
|
| 131 |
+
|
| 132 |
+
self.num_blocks = num_blocks
|
| 133 |
+
self.decoders = nn.ModuleList([DecoderBlock() for _ in range(num_blocks)])
|
| 134 |
+
self.pos_encoding = PositionalEncoding()
|
| 135 |
+
self.enc_embedding = nn.Embedding(vocab_size, DIM)
|
| 136 |
+
|
| 137 |
+
self.oblock = nn.Sequential(
|
| 138 |
+
nn.Linear(DIM, vocab_size),
|
| 139 |
+
# nn.Softmax(dim=-1)
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
# https://github.com/hyunwoongko/transformer
|
| 143 |
+
@torch.no_grad()
|
| 144 |
+
def _initialize_weights(m):
|
| 145 |
+
if hasattr(m, "weight") and m.weight.dim() > 1:
|
| 146 |
+
nn.init.kaiming_uniform_(m.weight.data)
|
| 147 |
+
|
| 148 |
+
self.apply(_initialize_weights)
|
| 149 |
+
|
| 150 |
+
print(
|
| 151 |
+
f"Model initialized with {sum(p.numel() for p in self.parameters() if p.requires_grad)} params."
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
def forward(self, x, padding_mask=None):
|
| 155 |
+
# if torch.isnan(x).any():
|
| 156 |
+
# print("NAN ALERT!")
|
| 157 |
+
if isinstance(x, tuple):
|
| 158 |
+
x, padding_mask = x
|
| 159 |
+
|
| 160 |
+
if padding_mask is not None:
|
| 161 |
+
padding_mask = padding_mask == 0
|
| 162 |
+
|
| 163 |
+
x = self.pos_encoding(self.enc_embedding(x))
|
| 164 |
+
|
| 165 |
+
# if torch.isnan(x).any():
|
| 166 |
+
# print("NAN ALERT!")
|
| 167 |
+
|
| 168 |
+
for didx, dblock in enumerate(self.decoders):
|
| 169 |
+
|
| 170 |
+
x = dblock(x, padding_mask=padding_mask)
|
| 171 |
+
|
| 172 |
+
x = self.oblock(x)
|
| 173 |
+
|
| 174 |
+
return x
|
archive-misc/architecture-v1.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.nn.functional as F
|
| 4 |
+
import numpy as np
|
| 5 |
+
|
| 6 |
+
DIM = 512
|
| 7 |
+
|
| 8 |
+
DEVICE = "mps" if torch.backends.mps.is_available() else "cpu"
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class MHA_SelfAttention(nn.Module):
|
| 12 |
+
def __init__(self, embed_dim=DIM, num_heads=8, *args, **kwargs):
|
| 13 |
+
super().__init__(*args, **kwargs)
|
| 14 |
+
self.mha = nn.MultiheadAttention(embed_dim, num_heads)
|
| 15 |
+
self.num_heads = num_heads
|
| 16 |
+
|
| 17 |
+
def forward(self, x, mask=None, triangle_mask=False):
|
| 18 |
+
attn_mask = None
|
| 19 |
+
seq_len = x.size(1)
|
| 20 |
+
|
| 21 |
+
if triangle_mask:
|
| 22 |
+
attn_mask = torch.triu(torch.ones(seq_len, seq_len), diagonal=1) == 0
|
| 23 |
+
attn_mask = attn_mask.to(x.device)
|
| 24 |
+
|
| 25 |
+
if mask is not None:
|
| 26 |
+
if attn_mask is not None:
|
| 27 |
+
attn_mask = mask.unsqueeze(1) & attn_mask.unsqueeze(0)
|
| 28 |
+
else:
|
| 29 |
+
attn_mask = mask.unsqueeze(1).expand(-1, seq_len, -1)
|
| 30 |
+
|
| 31 |
+
if attn_mask is not None:
|
| 32 |
+
attn_mask = attn_mask.repeat(self.num_heads, 1, 1)
|
| 33 |
+
|
| 34 |
+
x = x.transpose(0, 1)
|
| 35 |
+
attn_output, _ = self.mha(x, x, x, attn_mask=attn_mask)
|
| 36 |
+
attn_output = attn_output.transpose(0, 1)
|
| 37 |
+
|
| 38 |
+
return attn_output
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class MHA_EncoderDecoderAttention(nn.Module):
|
| 42 |
+
def __init__(self, embed_dim=DIM, num_heads=8, *args, **kwargs):
|
| 43 |
+
super().__init__(*args, **kwargs)
|
| 44 |
+
self.mha = nn.MultiheadAttention(embed_dim, num_heads)
|
| 45 |
+
self.num_heads = num_heads
|
| 46 |
+
|
| 47 |
+
def forward(self, x, encoded, mask=None):
|
| 48 |
+
attn_mask = None
|
| 49 |
+
seq_len_x = x.size(1)
|
| 50 |
+
seq_len_encoded = encoded.size(1)
|
| 51 |
+
|
| 52 |
+
if mask is not None:
|
| 53 |
+
attn_mask = mask.unsqueeze(1).expand(-1, seq_len_x, seq_len_encoded)
|
| 54 |
+
attn_mask = attn_mask.repeat(self.num_heads, 1, 1)
|
| 55 |
+
|
| 56 |
+
x = x.transpose(0, 1)
|
| 57 |
+
encoded = encoded.transpose(0, 1)
|
| 58 |
+
|
| 59 |
+
attn_output, _ = self.mha(x, encoded, encoded, attn_mask=attn_mask)
|
| 60 |
+
|
| 61 |
+
attn_output = attn_output.transpose(0, 1)
|
| 62 |
+
|
| 63 |
+
return attn_output
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class FeedForward(nn.Module):
|
| 67 |
+
def __init__(self, dim=DIM, hidden_dim=None, *args, **kwargs):
|
| 68 |
+
super().__init__(*args, **kwargs)
|
| 69 |
+
self.dim = dim
|
| 70 |
+
self.hidden_dim = hidden_dim if hidden_dim is not None else dim
|
| 71 |
+
|
| 72 |
+
self.block = nn.Sequential(
|
| 73 |
+
nn.LayerNorm(self.dim),
|
| 74 |
+
nn.Linear(self.dim, self.hidden_dim),
|
| 75 |
+
nn.GELU(),
|
| 76 |
+
nn.Linear(self.hidden_dim, self.dim),
|
| 77 |
+
nn.GELU(),
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
def forward(self, x):
|
| 81 |
+
return self.block(x)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class EncoderBlock(nn.Module):
|
| 85 |
+
def __init__(self, *args, **kwargs):
|
| 86 |
+
super().__init__(*args, **kwargs)
|
| 87 |
+
self.sa = MHA_SelfAttention()
|
| 88 |
+
self.block = FeedForward()
|
| 89 |
+
|
| 90 |
+
def forward(self, x, padding_mask=None):
|
| 91 |
+
res_x = x
|
| 92 |
+
x = self.sa(x, padding_mask)
|
| 93 |
+
x = x + res_x
|
| 94 |
+
|
| 95 |
+
res_x_2 = x
|
| 96 |
+
x = self.block(x)
|
| 97 |
+
x = x + res_x_2
|
| 98 |
+
|
| 99 |
+
return x
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class DecoderBlock(nn.Module):
|
| 103 |
+
def __init__(self, *args, **kwargs):
|
| 104 |
+
super().__init__(*args, **kwargs)
|
| 105 |
+
self.sa = MHA_SelfAttention()
|
| 106 |
+
self.eda = MHA_EncoderDecoderAttention()
|
| 107 |
+
self.block = FeedForward()
|
| 108 |
+
|
| 109 |
+
def forward(self, x, encoded, padding_mask=None):
|
| 110 |
+
res_x = x
|
| 111 |
+
x = self.sa(x, mask=padding_mask, triangle_mask=True)
|
| 112 |
+
x = x + res_x
|
| 113 |
+
|
| 114 |
+
res_x_2 = x
|
| 115 |
+
x = self.eda(x, encoded, mask=padding_mask)
|
| 116 |
+
x = x + res_x_2
|
| 117 |
+
|
| 118 |
+
res_x_3 = x
|
| 119 |
+
x = self.block(x)
|
| 120 |
+
x = x + res_x_3
|
| 121 |
+
|
| 122 |
+
return x
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class PositionalEncoding(nn.Module):
|
| 126 |
+
def __init__(self, max_len=5000):
|
| 127 |
+
super().__init__()
|
| 128 |
+
position = torch.arange(0, max_len).unsqueeze(1)
|
| 129 |
+
div_term = torch.exp(torch.arange(0, DIM, 2) * -(np.log(10000.0) / DIM))
|
| 130 |
+
pe = torch.zeros(max_len, DIM)
|
| 131 |
+
pe[:, 0::2] = torch.sin(position * div_term)
|
| 132 |
+
pe[:, 1::2] = torch.cos(position * div_term)
|
| 133 |
+
self.register_buffer("pe", pe.unsqueeze(0))
|
| 134 |
+
|
| 135 |
+
def forward(self, x):
|
| 136 |
+
seq_len = x.size(1)
|
| 137 |
+
return x + self.pe[:, :seq_len, :].to(x.device)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class Transformer(nn.Module):
|
| 141 |
+
def __init__(self, num_blocks=6, vocab_size=30522, seq_len=100, *args, **kwargs):
|
| 142 |
+
super().__init__(*args, **kwargs)
|
| 143 |
+
self.num_blocks = num_blocks
|
| 144 |
+
self.encoders = nn.ModuleList([EncoderBlock() for _ in range(num_blocks)])
|
| 145 |
+
self.decoders = nn.ModuleList([DecoderBlock() for _ in range(num_blocks)])
|
| 146 |
+
self.pos_encoding = PositionalEncoding()
|
| 147 |
+
self.enc_embedding = nn.Embedding(vocab_size, DIM)
|
| 148 |
+
|
| 149 |
+
self.oblock = nn.Sequential(
|
| 150 |
+
nn.Linear(DIM, vocab_size),
|
| 151 |
+
# nn.Softmax(dim=-1)
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
def forward(self, x, padding_mask=None):
|
| 155 |
+
if isinstance(x, tuple):
|
| 156 |
+
x, padding_mask = x
|
| 157 |
+
|
| 158 |
+
if padding_mask is not None:
|
| 159 |
+
padding_mask = padding_mask == 0
|
| 160 |
+
|
| 161 |
+
x = self.pos_encoding(self.enc_embedding(x))
|
| 162 |
+
|
| 163 |
+
for eidx, eblock in enumerate(self.encoders):
|
| 164 |
+
x = eblock(x, padding_mask=padding_mask)
|
| 165 |
+
|
| 166 |
+
encoded = x # No need to clone
|
| 167 |
+
|
| 168 |
+
x = self.pos_encoding(x)
|
| 169 |
+
|
| 170 |
+
for didx, dblock in enumerate(self.decoders):
|
| 171 |
+
x = dblock(x, encoded, padding_mask=padding_mask)
|
| 172 |
+
|
| 173 |
+
x = self.oblock(x)
|
| 174 |
+
|
| 175 |
+
return x
|
archive-misc/ascii_percentage.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def calculate_ascii_percentage(file_path):
|
| 5 |
+
try:
|
| 6 |
+
with open(file_path, "rb") as f:
|
| 7 |
+
data = f.read()
|
| 8 |
+
|
| 9 |
+
total_chars = len(data)
|
| 10 |
+
if total_chars == 0:
|
| 11 |
+
return 0
|
| 12 |
+
|
| 13 |
+
ascii_chars = sum(1 for c in data if 0 <= c <= 127)
|
| 14 |
+
percentage = (ascii_chars / total_chars) * 100
|
| 15 |
+
|
| 16 |
+
return percentage
|
| 17 |
+
except Exception as e:
|
| 18 |
+
print(f"Error: {e}")
|
| 19 |
+
return None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
file_path = os.path.expanduser(
|
| 23 |
+
"~/torch_datasets/github-python/corpus/data/corpus_processed.txt"
|
| 24 |
+
)
|
| 25 |
+
ascii_percentage = calculate_ascii_percentage(file_path)
|
| 26 |
+
if ascii_percentage is not None:
|
| 27 |
+
print(f"Percentage of ASCII characters: {ascii_percentage:.2f}%")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def find_unicode_passages(file_path, threshold=0.5, min_length=20):
|
| 31 |
+
"""
|
| 32 |
+
Prints passages with a high density of non-ASCII characters.
|
| 33 |
+
Args:
|
| 34 |
+
file_path (str): Path to the input file.
|
| 35 |
+
threshold (float): Proportion of non-ASCII characters to flag a line.
|
| 36 |
+
min_length (int): Minimum length of a line to be considered.
|
| 37 |
+
"""
|
| 38 |
+
try:
|
| 39 |
+
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
| 40 |
+
for line_num, line in enumerate(f, start=1):
|
| 41 |
+
total_chars = len(line.strip())
|
| 42 |
+
if total_chars < min_length:
|
| 43 |
+
continue # Skip short lines
|
| 44 |
+
|
| 45 |
+
non_ascii_count = sum(1 for c in line if ord(c) >= 128)
|
| 46 |
+
if non_ascii_count / total_chars > threshold:
|
| 47 |
+
print(f"Line {line_num}: {line.strip()}")
|
| 48 |
+
print(
|
| 49 |
+
f" -> Non-ASCII Density: {non_ascii_count / total_chars:.2%}"
|
| 50 |
+
)
|
| 51 |
+
except Exception as e:
|
| 52 |
+
print(f"Error: {e}")
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# Example usage
|
| 56 |
+
find_unicode_passages(file_path, threshold=0.5, min_length=20)
|
archive-misc/bpe_test.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import random
|
| 2 |
+
|
| 3 |
+
import youtokentome as yttm
|
| 4 |
+
|
| 5 |
+
bpe = yttm.BPE(model="e.model")
|
| 6 |
+
|
| 7 |
+
train_data_path = "train_data.txt"
|
| 8 |
+
model_path = "example.model"
|
| 9 |
+
|
| 10 |
+
# Generating random file with training data
|
| 11 |
+
# 10000 lines with 100 characters in each line
|
| 12 |
+
n_lines = 10000
|
| 13 |
+
n_characters = 100
|
| 14 |
+
with open(train_data_path, "w") as fout:
|
| 15 |
+
for _ in range(n_lines):
|
| 16 |
+
print("".join([random.choice("abcd ") for _ in range(n_characters)]), file=fout)
|
| 17 |
+
|
| 18 |
+
# Generating random text
|
| 19 |
+
test_text = "".join([random.choice("abcde ") for _ in range(100)])
|
| 20 |
+
|
| 21 |
+
# Training model
|
| 22 |
+
yttm.BPE.train(data=train_data_path, vocab_size=5000, model=model_path)
|
| 23 |
+
|
| 24 |
+
# Loading model
|
| 25 |
+
bpe = yttm.BPE(model=model_path)
|
| 26 |
+
|
| 27 |
+
# Two types of tokenization
|
| 28 |
+
print(bpe.encode([test_text], output_type=yttm.OutputType.ID))
|
| 29 |
+
print(bpe.encode([test_text], output_type=yttm.OutputType.SUBWORD))
|
archive-misc/check-memorization.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
with open(
|
| 4 |
+
os.path.expanduser(
|
| 5 |
+
"~/torch_datasets/github-python/mega_corpus/data/corpus_processed.txt"
|
| 6 |
+
),
|
| 7 |
+
"r",
|
| 8 |
+
) as f:
|
| 9 |
+
data = f.read()
|
| 10 |
+
to_check = """<newline> logger . info ( f " initial validation samples in first step . . . " ) <newline> model . eval ( ) <newline> <newline> gen _ validation _ samples ( validation _ pipeline , args , wandb , samples _ dir , train _ ts , train _ steps ) <newline> <newline> model . train ( ) <newline>"""
|
| 11 |
+
to_check = """' nonpayable ' , ' type ' : ' function ' } , { ' inputs ' : [ { ' internaltype ' : ' uint 2 5 6 ' , ' name ' : ' ' , ' type ' : ' uint 2 5 6 ' } ] , ' name ' : ' ' , ' outputs"""
|
| 12 |
+
|
| 13 |
+
to_check = """parser . add _ argument ( ' - - save _ folder ' , type = str , default = ' data / save ' , help = ' save folder ' )"""
|
| 14 |
+
to_check = """= torch . zeros ( len ( imgs ) ) <newline> <tab> for x _ interp in range ( 1 , args . batch _ size ) :"""
|
| 15 |
+
# to_check = """x _ interp = machine . interpolate ( imgs [ 0 ] , imgs [ 1 ] , n _ interp )""" # should be true
|
| 16 |
+
# to_check = "<UNK>"
|
| 17 |
+
to_check = to_check.replace(" ", "").lower()
|
| 18 |
+
data = data.replace(" ", "").lower()
|
| 19 |
+
|
| 20 |
+
print(to_check in data)
|
archive-misc/concatenator.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import glob
|
| 3 |
+
from tqdm import tqdm
|
| 4 |
+
|
| 5 |
+
folder = os.path.expanduser("~/torch_datasets/github-python/mega_licensed_all_files")
|
| 6 |
+
output_file = os.path.expanduser(
|
| 7 |
+
"~/torch_datasets/github-python/mega_licensed_corpus/concatenated.py"
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
with open(output_file, "w", encoding="utf-8") as out_f:
|
| 11 |
+
for file in tqdm(glob.glob(os.path.join(folder, "*.py"))):
|
| 12 |
+
out_f.write("\n# <FILESEP>\n")
|
| 13 |
+
try:
|
| 14 |
+
with open(file, "r", encoding="utf-8", errors="ignore") as in_f:
|
| 15 |
+
out_f.write(in_f.read())
|
| 16 |
+
except Exception as e:
|
| 17 |
+
out_f.write(f"\n# Skipping {file} due to error: {e}\n")
|
| 18 |
+
|
| 19 |
+
print(f"Concatenation complete: {output_file}")
|
archive-misc/dataset.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torchvision.datasets as dset
|
| 2 |
+
from torch.utils.data import Dataset
|
| 3 |
+
import torch
|
| 4 |
+
from torch.utils.data import DataLoader
|
| 5 |
+
import glob
|
| 6 |
+
import os
|
| 7 |
+
from transformers import AutoTokenizer
|
| 8 |
+
from torch.utils.data import Dataset, DataLoader, random_split
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class GithubDataset(Dataset):
|
| 12 |
+
def __init__(
|
| 13 |
+
self,
|
| 14 |
+
root_dir=os.path.expanduser("~/torch_datasets/github-python/corpus"),
|
| 15 |
+
train=False,
|
| 16 |
+
max_length=512,
|
| 17 |
+
):
|
| 18 |
+
self.root = root_dir
|
| 19 |
+
self.file_list = glob.glob(os.path.join(root_dir, "*.*"))
|
| 20 |
+
self.tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased")
|
| 21 |
+
self.max_length = max_length
|
| 22 |
+
|
| 23 |
+
def __len__(self):
|
| 24 |
+
return len(self.file_list)
|
| 25 |
+
|
| 26 |
+
def __getitem__(self, idx):
|
| 27 |
+
|
| 28 |
+
path = self.file_list[idx]
|
| 29 |
+
|
| 30 |
+
with open(path, "r", encoding="utf-8", errors="ignore") as file:
|
| 31 |
+
code = file.read()
|
| 32 |
+
|
| 33 |
+
encoding = self.tokenizer(
|
| 34 |
+
code,
|
| 35 |
+
padding="max_length",
|
| 36 |
+
truncation=True,
|
| 37 |
+
max_length=self.max_length,
|
| 38 |
+
return_tensors="pt",
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
input_ids = encoding["input_ids"].squeeze(0)
|
| 42 |
+
attention_mask = encoding["attention_mask"].squeeze(0)
|
| 43 |
+
|
| 44 |
+
# print(encoding.keys)
|
| 45 |
+
|
| 46 |
+
return input_ids, attention_mask
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
dataset = GithubDataset() # root_dir="./test-data/")
|
| 50 |
+
dataset = GithubDataset(root_dir="./test-data/")
|
| 51 |
+
train_size = int(0.8 * len(dataset))
|
| 52 |
+
test_size = len(dataset) - train_size
|
| 53 |
+
|
| 54 |
+
train_dataset, test_dataset = random_split(dataset, [train_size, test_size])
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def get_train_dataset():
|
| 58 |
+
return train_dataset
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def get_test_dataset():
|
| 62 |
+
|
| 63 |
+
return test_dataset
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def get_dataloader(dataset, batch_size=64):
|
| 67 |
+
|
| 68 |
+
return DataLoader(dataset, batch_size=batch_size, shuffle=True)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
if __name__ == "__main__":
|
| 72 |
+
d = get_train_dataset()
|
| 73 |
+
print("Number of samples: ", len(d))
|
| 74 |
+
|
| 75 |
+
a, b = d[4]
|
| 76 |
+
t = AutoTokenizer.from_pretrained("bert-base-uncased")
|
| 77 |
+
for i in a:
|
| 78 |
+
print(t.decode(i.item()), end=" ")
|
| 79 |
+
print()
|
archive-misc/dset_splitter.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import tqdm
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
# get through all files in inputted path, put the first 80% in one file concatenated and the last 20% in another
|
| 6 |
+
def split_files(input_path, output_path1, output_path2):
|
| 7 |
+
|
| 8 |
+
files = [
|
| 9 |
+
f
|
| 10 |
+
for f in os.listdir(input_path)
|
| 11 |
+
if os.path.isfile(os.path.join(input_path, f)) and f.endswith(".py")
|
| 12 |
+
]
|
| 13 |
+
|
| 14 |
+
# Sort files to maintain order
|
| 15 |
+
files.sort()
|
| 16 |
+
|
| 17 |
+
split_index = int(len(files) * 0.8)
|
| 18 |
+
first_80_files = files[:split_index]
|
| 19 |
+
last_20_files = files[split_index:]
|
| 20 |
+
|
| 21 |
+
print(os.listdir(input_path))
|
| 22 |
+
|
| 23 |
+
with open(output_path1, "w") as outfile1:
|
| 24 |
+
for fname in tqdm.tqdm(first_80_files):
|
| 25 |
+
with open(os.path.join(input_path, fname), errors="ignore") as infile:
|
| 26 |
+
outfile1.write(infile.read())
|
| 27 |
+
outfile1.write("\nprint('---FILESEP---')\n")
|
| 28 |
+
|
| 29 |
+
with open(output_path2, "w") as outfile2:
|
| 30 |
+
for fname in tqdm.tqdm(last_20_files):
|
| 31 |
+
with open(os.path.join(input_path, fname), errors="ignore") as infile:
|
| 32 |
+
outfile2.write(infile.read())
|
| 33 |
+
outfile2.write("\nprint('---FILESEP---')\n")
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# Example usage
|
| 37 |
+
input_path = os.path.expanduser("~/torch_datasets/github-python/all_trains")
|
| 38 |
+
output_path1 = os.path.expanduser("~/torch_datasets/github-python/80")
|
| 39 |
+
output_path2 = os.path.expanduser("~/torch_datasets/github-python/20")
|
| 40 |
+
split_files(input_path, output_path1, output_path2)
|
archive-misc/entropy_upper_bound.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from collections import Counter
|
| 3 |
+
import math
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def calculate_topk_upper_bound(file_path, k=5):
|
| 7 |
+
"""
|
| 8 |
+
Calculates the upper bound for top-k accuracy based on the tokenized text file.
|
| 9 |
+
|
| 10 |
+
Args:
|
| 11 |
+
file_path (str): Path to the input text file.
|
| 12 |
+
k (int): Top-k accuracy value to compute.
|
| 13 |
+
|
| 14 |
+
Returns:
|
| 15 |
+
float: The upper bound for top-k accuracy.
|
| 16 |
+
"""
|
| 17 |
+
try:
|
| 18 |
+
# Read the file and tokenize by spaces
|
| 19 |
+
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
| 20 |
+
text = f.read()
|
| 21 |
+
|
| 22 |
+
tokens = text.split() # Tokenize by spaces
|
| 23 |
+
|
| 24 |
+
# Calculate token frequencies
|
| 25 |
+
token_counts = Counter(tokens)
|
| 26 |
+
total_tokens = len(tokens)
|
| 27 |
+
|
| 28 |
+
if total_tokens == 0:
|
| 29 |
+
return 0
|
| 30 |
+
|
| 31 |
+
# Convert frequencies to probabilities
|
| 32 |
+
token_probabilities = {token: count / total_tokens for token, count in token_counts.items()}
|
| 33 |
+
|
| 34 |
+
# Calculate entropy
|
| 35 |
+
entropy = -sum(p * math.log2(p) for p in token_probabilities.values())
|
| 36 |
+
|
| 37 |
+
# Calculate top-k accuracy upper bound
|
| 38 |
+
sorted_tokens = sorted(token_probabilities.items(), key=lambda x: x[1], reverse=True)
|
| 39 |
+
top_k_prob = sum(prob for _, prob in sorted_tokens[:k])
|
| 40 |
+
|
| 41 |
+
# Print entropy and top-k accuracy upper bound
|
| 42 |
+
print(f"Entropy: {entropy:.4f} bits")
|
| 43 |
+
print(f"Top-{k} Accuracy Upper Bound: {top_k_prob:.4f}")
|
| 44 |
+
return top_k_prob
|
| 45 |
+
except Exception as e:
|
| 46 |
+
print(f"Error: {e}")
|
| 47 |
+
return None
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# Example usage
|
| 51 |
+
file_path = os.path.expanduser(
|
| 52 |
+
"~/torch_datasets/github-python/corpus/data/corpus_processed.txt"
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
top_k_accuracy = calculate_topk_upper_bound(file_path, k=5)
|
| 56 |
+
if top_k_accuracy is not None:
|
| 57 |
+
print(f"Upper Bound for Top-5 Accuracy: {top_k_accuracy:.4f}")
|
archive-misc/eval_old.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
|
| 3 |
+
# from architecture import DecoderTransformer
|
| 4 |
+
from builtin_architecture import make_model
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
import time
|
| 8 |
+
from dataset import dataset, get_train_dataset
|
| 9 |
+
import torch.nn.functional as F
|
| 10 |
+
|
| 11 |
+
EXPERIMENT_DIRECTORY = "runs/code-decoder-v10-vanilla-smaller-batchfirst" # "runs/code-decoder-v9-vanilla-smaller"#"runs/code-decoder-v8-smaller" # "runs/code-decoder-v4-improved" # shakespeare-test, run1-python
|
| 12 |
+
|
| 13 |
+
device = "mps" if torch.backends.mps.is_available() else "cpu"
|
| 14 |
+
|
| 15 |
+
device = "cpu"
|
| 16 |
+
|
| 17 |
+
# net = DecoderTransformer(vocab_size=199, num_blocks=1)
|
| 18 |
+
net = make_model()
|
| 19 |
+
net.to(device)
|
| 20 |
+
|
| 21 |
+
net.load_state_dict(
|
| 22 |
+
torch.load(os.path.join(EXPERIMENT_DIRECTORY, "ckpt", "best.pt"), weights_only=True)
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
for name, param in net.named_parameters():
|
| 27 |
+
if torch.isnan(param).any():
|
| 28 |
+
print(f"NaN found in {name}")
|
| 29 |
+
for name, param in net.named_parameters():
|
| 30 |
+
if param.grad is not None and torch.isnan(param.grad).any():
|
| 31 |
+
print(f"NaN found in gradients of {name}")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
pad_token_id = 0
|
| 35 |
+
sep_token_id = None
|
| 36 |
+
|
| 37 |
+
input_text = input("Prompt: ")
|
| 38 |
+
max_length = 100
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
input_ids = torch.tensor(dataset.manager.encode(input_text), dtype=int)
|
| 42 |
+
print(input_ids.shape)
|
| 43 |
+
attention_mask = dataset.manager.attention_mask(input_ids.squeeze(0)).to(device)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
generated_text = dataset.manager.decode(input_ids)
|
| 47 |
+
|
| 48 |
+
print(generated_text)
|
| 49 |
+
generated_text = ""
|
| 50 |
+
input_ids = torch.randint(199, (1, 1), dtype=torch.long).to(device)
|
| 51 |
+
|
| 52 |
+
net.eval() # Set model to evaluation mode
|
| 53 |
+
temp = 1.0 # Balanced temperature
|
| 54 |
+
|
| 55 |
+
for _ in range(max_length):
|
| 56 |
+
with torch.no_grad():
|
| 57 |
+
output = net(input_ids) # Model output
|
| 58 |
+
logits = F.log_softmax(output[-1], dim=-1) # Normalize logits
|
| 59 |
+
word_weights = logits.div(temp).cpu() # Scale by temperature
|
| 60 |
+
|
| 61 |
+
# Top-k sampling
|
| 62 |
+
top_k = 10 # Adjust based on your vocabulary size
|
| 63 |
+
vocab_size = word_weights.size(0)
|
| 64 |
+
top_k = min(top_k, vocab_size) # Ensure top_k is valid
|
| 65 |
+
|
| 66 |
+
top_probs, top_indices = torch.topk(word_weights, k=top_k)
|
| 67 |
+
|
| 68 |
+
# Handle edge case: only one valid token
|
| 69 |
+
if top_probs.size(0) == 1:
|
| 70 |
+
word_idx = top_indices[0] # Directly choose the only available token
|
| 71 |
+
else:
|
| 72 |
+
sampled_idx = torch.multinomial(top_probs, 1).item()
|
| 73 |
+
word_idx = top_indices[sampled_idx]
|
| 74 |
+
|
| 75 |
+
# Decode and append token
|
| 76 |
+
print(word_idx)
|
| 77 |
+
predicted_token = dataset.manager.decode(word_idx.item())
|
| 78 |
+
print(predicted_token, end=" ")
|
| 79 |
+
generated_text += predicted_token
|
| 80 |
+
|
| 81 |
+
print("Word Weights:", word_weights)
|
| 82 |
+
print("Top Probabilities:", top_probs)
|
| 83 |
+
print("Top Indices:", top_indices)
|
| 84 |
+
|
| 85 |
+
# Update input sequence
|
| 86 |
+
word_tensor = torch.tensor([[word_idx]], dtype=torch.long).to(device)
|
| 87 |
+
input_ids = torch.cat([input_ids, word_tensor], dim=1)
|
| 88 |
+
|
| 89 |
+
print("\nGenerated text:", generated_text)
|
| 90 |
+
with open("output.txt", "w+") as f:
|
| 91 |
+
f.write(generated_text)
|
archive-misc/f1_score.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sklearn.metrics import f1_score
|
| 2 |
+
|
| 3 |
+
print("imported!")
|
| 4 |
+
exit()
|
| 5 |
+
from copy import deepcopy
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
# y_true = [0] * 50 + [0] * 5 + [0] * 10 + [1] * 8 + [1] * 40 + [1] * 12+ [2] * 5 + [2] * 7 + [2] * 50
|
| 9 |
+
# y_pred = [0] * 50 + [1] * 5+ [2] * 10 + [0] * 8 + [1] * 40+ [2] * 12+ [0] * 5 + [1] * 7 + [2] * 50
|
| 10 |
+
|
| 11 |
+
# y_true = deepcopy(y_true)
|
| 12 |
+
# y_pred = deepcopy(y_pred)
|
| 13 |
+
|
| 14 |
+
# print(y_true)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# y_true = np.array(y_true)
|
| 18 |
+
# y_pred = np.array(y_pred)
|
| 19 |
+
# print(f1_score(y_true, y_pred, average='sample'))
|
archive-misc/plot_metrics.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import glob
|
| 3 |
+
import pandas as pd
|
| 4 |
+
import matplotlib.pyplot as plt
|
| 5 |
+
import numpy as np
|
| 6 |
+
import re
|
| 7 |
+
|
| 8 |
+
def extract_run_name(filename):
|
| 9 |
+
"""Extract the run name from the filename."""
|
| 10 |
+
basename = os.path.basename(filename)
|
| 11 |
+
# Extract the part between '_' and '_tensorboard.csv'
|
| 12 |
+
match = re.search(r'_([^_]+)(?:-loss)?_tensorboard\.csv$', basename)
|
| 13 |
+
if match:
|
| 14 |
+
return match.group(1)
|
| 15 |
+
return basename.split('_')[1].split('-')[0] # Fallback extraction
|
| 16 |
+
|
| 17 |
+
def setup_plot_style():
|
| 18 |
+
"""Apply publication-quality styling to plots."""
|
| 19 |
+
plt.rcParams.update({
|
| 20 |
+
'font.family': 'serif',
|
| 21 |
+
'font.size': 12,
|
| 22 |
+
'axes.labelsize': 14,
|
| 23 |
+
'axes.titlesize': 16,
|
| 24 |
+
'legend.fontsize': 10,
|
| 25 |
+
'figure.dpi': 300,
|
| 26 |
+
'figure.figsize': (10, 6),
|
| 27 |
+
'lines.linewidth': 2.5,
|
| 28 |
+
'axes.grid': True,
|
| 29 |
+
'grid.linestyle': '--',
|
| 30 |
+
'grid.alpha': 0.6,
|
| 31 |
+
'axes.spines.top': False,
|
| 32 |
+
'axes.spines.right': False,
|
| 33 |
+
})
|
| 34 |
+
|
| 35 |
+
def get_metric_label(metric_name):
|
| 36 |
+
"""Return a human-readable label for the metric."""
|
| 37 |
+
labels = {
|
| 38 |
+
'loss_epoch': 'Loss',
|
| 39 |
+
'perplexityval_epoch': 'Validation Perplexity',
|
| 40 |
+
'topkacc_epoch': 'Top-K Accuracy',
|
| 41 |
+
'acc_trainstep': 'Training Accuracy'
|
| 42 |
+
}
|
| 43 |
+
return labels.get(metric_name, metric_name.replace('_', ' ').title())
|
| 44 |
+
|
| 45 |
+
def get_color_mapping(run_names):
|
| 46 |
+
"""Create a consistent color mapping for all runs."""
|
| 47 |
+
# Define a color palette with distinct colors
|
| 48 |
+
# colors = [
|
| 49 |
+
# '#1f77b4', # Blue
|
| 50 |
+
# '#ff7f0e', # Orange
|
| 51 |
+
# '#2ca02c', # Green
|
| 52 |
+
# '#d62728', # Red
|
| 53 |
+
# '#9467bd', # Purple
|
| 54 |
+
# '#8c564b', # Brown
|
| 55 |
+
# '#e377c2', # Pink
|
| 56 |
+
# '#7f7f7f', # Gray
|
| 57 |
+
# '#bcbd22', # Yellow-green
|
| 58 |
+
# '#17becf', # Cyan
|
| 59 |
+
# ]
|
| 60 |
+
# colors = """#091717
|
| 61 |
+
|
| 62 |
+
# #13B3B9
|
| 63 |
+
|
| 64 |
+
# #265E5A
|
| 65 |
+
|
| 66 |
+
# #20808D
|
| 67 |
+
|
| 68 |
+
# #25E5A5
|
| 69 |
+
|
| 70 |
+
# #20808D
|
| 71 |
+
|
| 72 |
+
# #FBFAF4
|
| 73 |
+
|
| 74 |
+
# #E4E3D4
|
| 75 |
+
|
| 76 |
+
# #FFD2A6
|
| 77 |
+
|
| 78 |
+
# #A84B2F
|
| 79 |
+
|
| 80 |
+
# #944454""".lower().split("\n\n")
|
| 81 |
+
colors = [
|
| 82 |
+
"#e6194b", # Red
|
| 83 |
+
"#f58231", # Orange
|
| 84 |
+
"#ffe119", # Yellow
|
| 85 |
+
"#bfef45", # Lime
|
| 86 |
+
"#3cb44b", # Green
|
| 87 |
+
"#42d4f4", # Cyan
|
| 88 |
+
"#4363d8", # Blue
|
| 89 |
+
"#911eb4", # Purple
|
| 90 |
+
"#f032e6", # Magenta
|
| 91 |
+
"#a9a9a9" # Grey
|
| 92 |
+
]
|
| 93 |
+
|
| 94 |
+
# Create a mapping of run names to colors
|
| 95 |
+
return {name: colors[i % len(colors)] for i, name in enumerate(sorted(run_names))}
|
| 96 |
+
|
| 97 |
+
def plot_metric(metric_dir, color_mapping, output_dir):
|
| 98 |
+
"""Plot all runs for a specific metric."""
|
| 99 |
+
metric_name = os.path.basename(metric_dir)
|
| 100 |
+
csv_files = glob.glob(os.path.join(metric_dir, '*.csv'))
|
| 101 |
+
|
| 102 |
+
if not csv_files:
|
| 103 |
+
print(f"No CSV files found in {metric_dir}")
|
| 104 |
+
return
|
| 105 |
+
|
| 106 |
+
plt.figure(figsize=(12, 7))
|
| 107 |
+
|
| 108 |
+
for csv_file in sorted(csv_files):
|
| 109 |
+
try:
|
| 110 |
+
# Read the CSV file
|
| 111 |
+
df = pd.read_csv(csv_file)
|
| 112 |
+
|
| 113 |
+
# Extract run name from filename
|
| 114 |
+
run_name = extract_run_name(csv_file)
|
| 115 |
+
|
| 116 |
+
# Plot the data using step as x-axis
|
| 117 |
+
color = color_mapping.get(run_name, 'gray')
|
| 118 |
+
plt.plot(df['Step'], df['Value'], label=run_name, color=color, alpha=0.9)
|
| 119 |
+
#plt.plot(df['Step'], df['Value'], label=run_name, color=color, marker='o', markersize=6, alpha=0.8)
|
| 120 |
+
|
| 121 |
+
except Exception as e:
|
| 122 |
+
print(f"Error processing {csv_file}: {e}")
|
| 123 |
+
|
| 124 |
+
# Set labels and title
|
| 125 |
+
plt.xlabel('Step')
|
| 126 |
+
plt.ylabel(get_metric_label(metric_name))
|
| 127 |
+
|
| 128 |
+
comparison = "Epoch" if "epoch" in metric_name else "Step"
|
| 129 |
+
plt.title(f'{get_metric_label(metric_name)} vs. {comparison}', fontweight='bold')
|
| 130 |
+
|
| 131 |
+
# Add legend with good positioning
|
| 132 |
+
plt.legend(loc='best', frameon=True, fancybox=True, framealpha=0.9,
|
| 133 |
+
shadow=True, borderpad=1, ncol=2 if len(csv_files) > 5 else 1)
|
| 134 |
+
|
| 135 |
+
# Add grid for better readability
|
| 136 |
+
plt.grid(True, linestyle='--', alpha=0.7)
|
| 137 |
+
|
| 138 |
+
# Tight layout for clean margins
|
| 139 |
+
plt.tight_layout()
|
| 140 |
+
|
| 141 |
+
# Save the plot
|
| 142 |
+
os.makedirs(output_dir, exist_ok=True)
|
| 143 |
+
output_path = os.path.join(output_dir, f'{metric_name}_plot.png')
|
| 144 |
+
plt.savefig(output_path, bbox_inches='tight')
|
| 145 |
+
print(f"Saved plot to {output_path}")
|
| 146 |
+
|
| 147 |
+
# Close the figure to free memory
|
| 148 |
+
plt.close()
|
| 149 |
+
|
| 150 |
+
def main():
|
| 151 |
+
# Base directory containing the metric directories
|
| 152 |
+
base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'runs_jsons')
|
| 153 |
+
|
| 154 |
+
# Output directory for plots
|
| 155 |
+
output_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'plots')
|
| 156 |
+
os.makedirs(output_dir, exist_ok=True)
|
| 157 |
+
|
| 158 |
+
# Setup plot style
|
| 159 |
+
setup_plot_style()
|
| 160 |
+
|
| 161 |
+
# Get all metric directories
|
| 162 |
+
metric_dirs = [d for d in glob.glob(os.path.join(base_dir, '*')) if os.path.isdir(d)]
|
| 163 |
+
|
| 164 |
+
# Collect all run names across all metrics for consistent coloring
|
| 165 |
+
all_run_names = set()
|
| 166 |
+
for metric_dir in metric_dirs:
|
| 167 |
+
csv_files = glob.glob(os.path.join(metric_dir, '*.csv'))
|
| 168 |
+
for csv_file in csv_files:
|
| 169 |
+
run_name = extract_run_name(csv_file)
|
| 170 |
+
all_run_names.add(run_name)
|
| 171 |
+
|
| 172 |
+
# Create color mapping
|
| 173 |
+
color_mapping = get_color_mapping(all_run_names)
|
| 174 |
+
|
| 175 |
+
# Plot each metric
|
| 176 |
+
for metric_dir in metric_dirs:
|
| 177 |
+
plot_metric(metric_dir, color_mapping, output_dir)
|
| 178 |
+
|
| 179 |
+
print(f"All plots have been generated in {output_dir}")
|
| 180 |
+
|
| 181 |
+
if __name__ == '__main__':
|
| 182 |
+
main()
|
archive-misc/plots/acc_trainstep_plot.png
ADDED
|
Git LFS Details
|
archive-misc/plots/loss_epoch_plot.png
ADDED
|
Git LFS Details
|
archive-misc/plots/perplexityval_epoch_plot.png
ADDED
|
Git LFS Details
|
archive-misc/plots/topkacc_epoch_plot.png
ADDED
|
Git LFS Details
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750391963.405016,99,0.28758272528648376
|
| 3 |
+
1750392086.168981,199,0.37307843565940857
|
| 4 |
+
1750392208.5869079,299,0.40630942583084106
|
| 5 |
+
1750392330.8717701,399,0.43048712611198425
|
| 6 |
+
1750392453.498474,499,0.45128124952316284
|
| 7 |
+
1750392585.924861,599,0.4640906751155853
|
| 8 |
+
1750394101.821525,722,0.4684717357158661
|
| 9 |
+
1750394232.5230699,822,0.47576409578323364
|
| 10 |
+
1750394358.10847,922,0.4853419065475464
|
| 11 |
+
1750394491.3217402,1022,0.4948982894420624
|
| 12 |
+
1750394625.2167208,1122,0.5040943622589111
|
| 13 |
+
1750394759.376929,1222,0.511553943157196
|
| 14 |
+
1750396346.573935,2591,0.523975670337677
|
| 15 |
+
1750396464.359958,2691,0.5316954851150513
|
| 16 |
+
1750396593.034543,2791,0.5362462997436523
|
| 17 |
+
1750396722.3297448,2891,0.5424111485481262
|
| 18 |
+
1750396843.487953,2991,0.5477138757705688
|
| 19 |
+
1750396964.5418801,3091,0.5496237874031067
|
| 20 |
+
1750397085.803721,3191,0.5543492436408997
|
| 21 |
+
1750397206.7307,3291,0.556705892086029
|
| 22 |
+
1750397327.7294452,3391,0.5581862926483154
|
| 23 |
+
1750397448.3722959,3491,0.564699113368988
|
| 24 |
+
1750397569.190787,3591,0.5667800307273865
|
| 25 |
+
1750397689.629411,3691,0.5721838474273682
|
| 26 |
+
1750399210.061806,3837,0.5721851587295532
|
| 27 |
+
1750399328.803469,3937,0.5768272280693054
|
| 28 |
+
1750399448.697824,4037,0.5804650783538818
|
| 29 |
+
1750399569.190957,4137,0.5802291631698608
|
| 30 |
+
1750399690.00176,4237,0.5834344625473022
|
| 31 |
+
1750399811.529087,4337,0.5861427783966064
|
| 32 |
+
1750399931.772715,4437,0.5860195755958557
|
| 33 |
+
1750400051.427737,4537,0.5869417786598206
|
| 34 |
+
1750400176.853729,4637,0.5894920229911804
|
| 35 |
+
1750400306.4569101,4737,0.5896464586257935
|
| 36 |
+
1750400431.092097,4837,0.5900257229804993
|
| 37 |
+
1750400550.723742,4937,0.5935091972351074
|
| 38 |
+
1750402065.168691,7571,0.594632625579834
|
| 39 |
+
1750402182.961608,7671,0.6000049114227295
|
| 40 |
+
1750402302.621356,7771,0.6004062294960022
|
| 41 |
+
1750402423.284728,7871,0.6009228229522705
|
| 42 |
+
1750402543.827255,7971,0.6010753512382507
|
| 43 |
+
1750402664.210165,8071,0.6000925302505493
|
| 44 |
+
1750402785.2010791,8171,0.6007965803146362
|
| 45 |
+
1750402905.502709,8271,0.6033070087432861
|
| 46 |
+
1750403025.571712,8371,0.6046096682548523
|
| 47 |
+
1750403145.5079029,8471,0.6046929955482483
|
| 48 |
+
1750403265.2297,8571,0.6046923995018005
|
| 49 |
+
1750403384.998875,8671,0.6068811416625977
|
| 50 |
+
1750403504.4862049,8771,0.6079203486442566
|
| 51 |
+
1750403623.7825549,8871,0.6097512245178223
|
| 52 |
+
1750403742.793798,8971,0.6084074974060059
|
| 53 |
+
1750403876.4235342,9071,0.6094215512275696
|
| 54 |
+
1750404003.115214,9171,0.6072763204574585
|
| 55 |
+
1750404121.304395,9271,0.6104883551597595
|
| 56 |
+
1750405642.011669,9439,0.6106861233711243
|
| 57 |
+
1750405761.730685,9539,0.612129271030426
|
| 58 |
+
1750405882.06578,9639,0.6121041774749756
|
| 59 |
+
1750406009.142996,9739,0.6116084456443787
|
| 60 |
+
1750406139.7740648,9839,0.6149724125862122
|
| 61 |
+
1750406266.147126,9939,0.6138026714324951
|
| 62 |
+
1750406386.9780512,10039,0.6122788190841675
|
| 63 |
+
1750406507.619675,10139,0.6135343313217163
|
| 64 |
+
1750406628.241678,10239,0.6149981617927551
|
| 65 |
+
1750406749.1838799,10339,0.6140103936195374
|
| 66 |
+
1750406870.2870731,10439,0.6159699559211731
|
| 67 |
+
1750406990.665215,10539,0.6164675354957581
|
| 68 |
+
1750407111.044385,10639,0.6145429015159607
|
| 69 |
+
1750407231.041761,10739,0.615381121635437
|
| 70 |
+
1750407350.530978,10839,0.6168584823608398
|
| 71 |
+
1750407473.886058,10939,0.6164289116859436
|
| 72 |
+
1750407599.025277,11039,0.6161783337593079
|
| 73 |
+
1750407718.2328582,11139,0.6174693703651428
|
| 74 |
+
1750409288.9252071,15045,0.6210026144981384
|
| 75 |
+
1750409406.185398,15145,0.6240753531455994
|
| 76 |
+
1750409525.7020748,15245,0.6224570870399475
|
| 77 |
+
1750409645.983714,15345,0.6200165152549744
|
| 78 |
+
1750409767.120149,15445,0.6223462224006653
|
| 79 |
+
1750409887.003285,15545,0.6240637302398682
|
| 80 |
+
1750410007.516577,15645,0.624666690826416
|
| 81 |
+
1750410126.859099,15745,0.6218351721763611
|
| 82 |
+
1750410246.7748039,15845,0.6223841905593872
|
| 83 |
+
1750410366.068408,15945,0.6235833168029785
|
| 84 |
+
1750410485.5163388,16045,0.623276948928833
|
| 85 |
+
1750410604.9105248,16145,0.6230471730232239
|
| 86 |
+
1750410723.908787,16245,0.6238241195678711
|
| 87 |
+
1750410843.0748658,16345,0.6252604126930237
|
| 88 |
+
1750410961.730179,16445,0.6225661635398865
|
| 89 |
+
1750411082.1992378,16545,0.624557614326477
|
| 90 |
+
1750411205.180066,16645,0.6239485144615173
|
| 91 |
+
1750411323.487138,16745,0.6227665543556213
|
| 92 |
+
1750411444.456806,16845,0.6249957084655762
|
| 93 |
+
1750411564.380202,16945,0.6248412728309631
|
| 94 |
+
1750411682.873339,17045,0.6236630082130432
|
| 95 |
+
1750411801.126118,17145,0.6250913143157959
|
| 96 |
+
1750411919.646475,17245,0.6253725290298462
|
| 97 |
+
1750412037.659429,17345,0.6242212057113647
|
| 98 |
+
1750413566.933687,17536,0.6268401741981506
|
| 99 |
+
1750413684.549922,17636,0.6275759935379028
|
| 100 |
+
1750413802.068516,17736,0.6274019479751587
|
| 101 |
+
1750413919.368851,17836,0.6269387006759644
|
| 102 |
+
1750414036.703834,17936,0.6291954517364502
|
| 103 |
+
1750414154.379905,18036,0.6291244029998779
|
| 104 |
+
1750414272.5889988,18136,0.6289613842964172
|
| 105 |
+
1750414391.175787,18236,0.6295741200447083
|
| 106 |
+
1750414509.7117262,18336,0.6275030374526978
|
| 107 |
+
1750414628.5175538,18436,0.6282285451889038
|
| 108 |
+
1750414749.2212071,18536,0.6290037035942078
|
| 109 |
+
1750414873.3800588,18636,0.6296923756599426
|
| 110 |
+
1750414991.208481,18736,0.6257708072662354
|
| 111 |
+
1750415108.545247,18836,0.630242645740509
|
| 112 |
+
1750415225.8621771,18936,0.627531886100769
|
| 113 |
+
1750415343.270394,19036,0.6264626383781433
|
| 114 |
+
1750415460.235528,19136,0.6282236576080322
|
| 115 |
+
1750415577.469412,19236,0.6287162899971008
|
| 116 |
+
1750415694.646375,19336,0.6296850442886353
|
| 117 |
+
1750415811.340872,19436,0.6315361261367798
|
| 118 |
+
1750415928.208522,19536,0.6279019713401794
|
| 119 |
+
1750416044.951065,19636,0.630811870098114
|
| 120 |
+
1750416161.7805922,19736,0.629878044128418
|
| 121 |
+
1750416279.460792,19836,0.6303988695144653
|
| 122 |
+
1750417804.136805,25003,0.63283771276474
|
| 123 |
+
1750417921.102397,25103,0.634482204914093
|
| 124 |
+
1750418038.5472991,25203,0.6336372494697571
|
| 125 |
+
1750418156.042629,25303,0.6343603134155273
|
| 126 |
+
1750418274.690233,25403,0.6347303986549377
|
| 127 |
+
1750418403.750201,25503,0.6332867741584778
|
| 128 |
+
1750418526.082923,25603,0.635621964931488
|
| 129 |
+
1750418642.563765,25703,0.6346629858016968
|
| 130 |
+
1750418759.2969239,25803,0.6334099173545837
|
| 131 |
+
1750418876.046859,25903,0.6345508694648743
|
| 132 |
+
1750418993.52926,26003,0.6354650855064392
|
| 133 |
+
1750419111.751643,26103,0.6344546675682068
|
| 134 |
+
1750419229.823759,26203,0.6361017227172852
|
| 135 |
+
1750419347.9499562,26303,0.6346011161804199
|
| 136 |
+
1750419466.2246022,26403,0.632273256778717
|
| 137 |
+
1750419585.835607,26503,0.6366924047470093
|
| 138 |
+
1750419704.616571,26603,0.6358627676963806
|
| 139 |
+
1750419824.766925,26703,0.6379485130310059
|
| 140 |
+
1750419942.537836,26803,0.6344301700592041
|
| 141 |
+
1750420060.6184142,26903,0.6359050273895264
|
| 142 |
+
1750420178.9125571,27003,0.6350361704826355
|
| 143 |
+
1750420297.1137972,27103,0.6330637335777283
|
| 144 |
+
1750420415.8522072,27203,0.6344761252403259
|
| 145 |
+
1750420534.734411,27303,0.6365269422531128
|
| 146 |
+
1750420653.425687,27403,0.6330030560493469
|
| 147 |
+
1750420772.227184,27503,0.6345784068107605
|
| 148 |
+
1750420890.5458748,27603,0.6361641883850098
|
| 149 |
+
1750421008.778699,27703,0.6378412842750549
|
| 150 |
+
1750421127.097027,27803,0.635398268699646
|
| 151 |
+
1750421245.309462,27903,0.6374136209487915
|
| 152 |
+
1750421363.5219321,28003,0.6376703381538391
|
| 153 |
+
1750422796.018083,28116,0.6390845775604248
|
| 154 |
+
1750422913.985208,28216,0.6385864019393921
|
| 155 |
+
1750423032.13856,28316,0.6375925540924072
|
| 156 |
+
1750423149.83459,28416,0.6396439671516418
|
| 157 |
+
1750423267.42682,28516,0.6380251049995422
|
| 158 |
+
1750423384.995723,28616,0.6388915181159973
|
| 159 |
+
1750423502.713706,28716,0.6386666893959045
|
| 160 |
+
1750423620.257157,28816,0.6381250023841858
|
| 161 |
+
1750423737.4605892,28916,0.639373779296875
|
| 162 |
+
1750423854.9022012,29016,0.6389166712760925
|
| 163 |
+
1750423973.02514,29116,0.6370416879653931
|
| 164 |
+
1750424103.93007,29216,0.6380478143692017
|
| 165 |
+
1750424237.379366,29316,0.6376948356628418
|
| 166 |
+
1750424355.43341,29416,0.6376280784606934
|
| 167 |
+
1750424473.1514509,29516,0.6373106837272644
|
| 168 |
+
1750424590.771557,29616,0.6380802989006042
|
| 169 |
+
1750424708.3291452,29716,0.6382542848587036
|
| 170 |
+
1750424825.7771049,29816,0.6359246373176575
|
| 171 |
+
1750424943.270345,29916,0.6378106474876404
|
| 172 |
+
1750425060.699276,30016,0.6363774538040161
|
| 173 |
+
1750425178.296514,30116,0.63671875
|
| 174 |
+
1750425295.891006,30216,0.6395600438117981
|
| 175 |
+
1750425414.143324,30316,0.639651358127594
|
| 176 |
+
1750425533.624413,30416,0.6388572454452515
|
| 177 |
+
1750425659.385945,30516,0.639094352722168
|
| 178 |
+
1750425783.19887,30616,0.637870728969574
|
| 179 |
+
1750425900.442911,30716,0.6388829946517944
|
| 180 |
+
1750426017.5772002,30816,0.6379086971282959
|
| 181 |
+
1750426134.723032,30916,0.6393657922744751
|
| 182 |
+
1750426251.728645,31016,0.6394289135932922
|
| 183 |
+
1750426370.172011,31116,0.6372193694114685
|
| 184 |
+
1750427110.586208,31229,0.6407451033592224
|
| 185 |
+
1750427226.9793642,31329,0.6399344205856323
|
| 186 |
+
1750427344.0258079,31429,0.6403363943099976
|
| 187 |
+
1750427461.017736,31529,0.6378547549247742
|
| 188 |
+
1750427580.1463559,31629,0.6400918960571289
|
| 189 |
+
1750427699.5624971,31729,0.6376268267631531
|
| 190 |
+
1750427816.693315,31829,0.6398045420646667
|
| 191 |
+
1750427933.695549,31929,0.6389558911323547
|
| 192 |
+
1750428051.752007,32029,0.6395710706710815
|
| 193 |
+
1750428168.643732,32129,0.6386721730232239
|
| 194 |
+
1750428285.899798,32229,0.6397224068641663
|
| 195 |
+
1750428402.7067468,32329,0.6359546780586243
|
| 196 |
+
1750428519.8862941,32429,0.6403406858444214
|
| 197 |
+
1750428636.643082,32529,0.6381691098213196
|
| 198 |
+
1750428754.3186119,32629,0.6397659182548523
|
| 199 |
+
1750428872.257874,32729,0.6390992403030396
|
| 200 |
+
1750428989.4772532,32829,0.6392248868942261
|
| 201 |
+
1750429107.404566,32929,0.64007967710495
|
| 202 |
+
1750429229.3547251,33029,0.640085756778717
|
| 203 |
+
1750429353.6662982,33129,0.6392549276351929
|
| 204 |
+
1750429470.9699478,33229,0.639375627040863
|
| 205 |
+
1750429588.0433788,33329,0.6410759687423706
|
| 206 |
+
1750429705.013918,33429,0.6397548913955688
|
| 207 |
+
1750429822.030163,33529,0.6397972106933594
|
| 208 |
+
1750429939.198564,33629,0.6389356851577759
|
| 209 |
+
1750430056.1450539,33729,0.6388548016548157
|
| 210 |
+
1750430172.9767108,33829,0.6392873525619507
|
| 211 |
+
1750430290.102144,33929,0.6401292681694031
|
| 212 |
+
1750430407.1287522,34029,0.6388995051383972
|
| 213 |
+
1750430524.0033422,34129,0.6401439905166626
|
| 214 |
+
1750430641.097196,34229,0.6416966915130615
|
| 215 |
+
1750431380.565137,34342,0.6415795683860779
|
| 216 |
+
1750431497.4990091,34442,0.6457396149635315
|
| 217 |
+
1750431616.972259,34542,0.6391391158103943
|
| 218 |
+
1750431737.77422,34642,0.6414675116539001
|
| 219 |
+
1750431855.345222,34742,0.6420894861221313
|
| 220 |
+
1750431971.797497,34842,0.6419129967689514
|
| 221 |
+
1750432092.013702,34942,0.6438413262367249
|
| 222 |
+
1750432212.7010539,35042,0.6427831053733826
|
| 223 |
+
1750432333.610374,35142,0.6404712200164795
|
| 224 |
+
1750432453.564967,35242,0.6409705877304077
|
| 225 |
+
1750432572.20208,35342,0.6433075666427612
|
| 226 |
+
1750432692.067829,35442,0.6417610049247742
|
| 227 |
+
1750432813.099978,35542,0.6427144408226013
|
| 228 |
+
1750432941.482049,35642,0.6400201916694641
|
| 229 |
+
1750433058.969825,35742,0.6435220837593079
|
| 230 |
+
1750433174.587164,35842,0.6416826248168945
|
| 231 |
+
1750433290.972367,35942,0.6424387097358704
|
| 232 |
+
1750433417.833635,36042,0.6415631175041199
|
| 233 |
+
1750433535.4006011,36142,0.6395079493522644
|
| 234 |
+
1750433651.833875,36242,0.6416727900505066
|
| 235 |
+
1750433767.894007,36342,0.6432923078536987
|
| 236 |
+
1750433886.0875251,36442,0.639417290687561
|
| 237 |
+
1750434003.356535,36542,0.6422218084335327
|
| 238 |
+
1750434121.536761,36642,0.6425012350082397
|
| 239 |
+
1750434245.319788,36742,0.6424558758735657
|
| 240 |
+
1750434364.3623009,36842,0.6420220732688904
|
| 241 |
+
1750434485.914399,36942,0.6411697268486023
|
| 242 |
+
1750434605.5461252,37042,0.642849862575531
|
| 243 |
+
1750434722.119754,37142,0.6430937647819519
|
| 244 |
+
1750434838.9487429,37242,0.6430661678314209
|
| 245 |
+
1750434955.2114959,37342,0.6417616605758667
|
| 246 |
+
1750435722.9776409,37455,0.6433213949203491
|
| 247 |
+
1750435843.0595179,37555,0.6440459489822388
|
| 248 |
+
1750435966.055239,37655,0.6429075002670288
|
| 249 |
+
1750436101.068255,37755,0.6435238718986511
|
| 250 |
+
1750436230.166233,37855,0.6430784463882446
|
| 251 |
+
1750436349.987224,37955,0.6424105167388916
|
| 252 |
+
1750436474.901657,38055,0.6450122594833374
|
| 253 |
+
1750436604.168363,38155,0.6424190998077393
|
| 254 |
+
1750436727.225525,38255,0.6437579393386841
|
| 255 |
+
1750436852.5823019,38355,0.6450024247169495
|
| 256 |
+
1750436981.771934,38455,0.6460079550743103
|
| 257 |
+
1750437106.833937,38555,0.6438449621200562
|
| 258 |
+
1750437233.7614038,38655,0.6450612545013428
|
| 259 |
+
1750437355.893782,38755,0.6443492770195007
|
| 260 |
+
1750437482.866613,38855,0.6453100442886353
|
| 261 |
+
1750437604.378762,38955,0.6427034139633179
|
| 262 |
+
1750437728.0891478,39055,0.643042266368866
|
| 263 |
+
1750437854.0641642,39155,0.6431452035903931
|
| 264 |
+
1750437979.616,39255,0.6405355334281921
|
| 265 |
+
1750438103.1675441,39355,0.6419589519500732
|
| 266 |
+
1750438227.017985,39455,0.6451151967048645
|
| 267 |
+
1750438352.80471,39555,0.642490804195404
|
| 268 |
+
1750438481.0750458,39655,0.6436923742294312
|
| 269 |
+
1750438607.4506419,39755,0.6426262259483337
|
| 270 |
+
1750438732.6586308,39855,0.6434859037399292
|
| 271 |
+
1750438850.041701,39955,0.6414963006973267
|
| 272 |
+
1750438969.573184,40055,0.6438449621200562
|
| 273 |
+
1750439094.455677,40155,0.642514705657959
|
| 274 |
+
1750439220.3763552,40255,0.6439902186393738
|
| 275 |
+
1750439346.111592,40355,0.6446102857589722
|
| 276 |
+
1750439473.4727318,40455,0.6444522142410278
|
| 277 |
+
1750440288.40138,40568,0.6467640399932861
|
| 278 |
+
1750440416.757272,40668,0.6456770896911621
|
| 279 |
+
1750440536.2355032,40768,0.6457671523094177
|
| 280 |
+
1750440656.526047,40868,0.6436586976051331
|
| 281 |
+
1750440777.034587,40968,0.6458407044410706
|
| 282 |
+
1750440904.930413,41068,0.6438198685646057
|
| 283 |
+
1750441027.7695491,41168,0.6463271975517273
|
| 284 |
+
1750441152.539819,41268,0.6448026895523071
|
| 285 |
+
1750441280.684284,41368,0.6460006237030029
|
| 286 |
+
1750441412.2877288,41468,0.6452224254608154
|
| 287 |
+
1750441539.305119,41568,0.6450667977333069
|
| 288 |
+
1750441663.1772,41668,0.6457414031028748
|
| 289 |
+
1750441786.5259461,41768,0.643865168094635
|
| 290 |
+
1750441909.4749608,41868,0.64384925365448
|
| 291 |
+
1750442031.882318,41968,0.6438081860542297
|
| 292 |
+
1750442150.156358,42068,0.6448112726211548
|
| 293 |
+
1750442267.424397,42168,0.643791675567627
|
| 294 |
+
1750442386.0849018,42268,0.6449528336524963
|
| 295 |
+
1750442502.673168,42368,0.6440759897232056
|
| 296 |
+
1750442620.647326,42468,0.6446268558502197
|
| 297 |
+
1750442738.017685,42568,0.643791675567627
|
| 298 |
+
1750442855.613087,42668,0.64360111951828
|
| 299 |
+
1750442972.90032,42768,0.6467530727386475
|
| 300 |
+
1750443090.365676,42868,0.6462420225143433
|
| 301 |
+
1750443208.134689,42968,0.6456483006477356
|
| 302 |
+
1750443327.7138891,43068,0.6449123620986938
|
| 303 |
+
1750443446.7773492,43168,0.6431586742401123
|
| 304 |
+
1750443565.0604858,43268,0.6423296332359314
|
| 305 |
+
1750443679.328869,43368,0.6451936364173889
|
| 306 |
+
1750443793.370452,43468,0.6438198685646057
|
| 307 |
+
1750443911.636725,43568,0.6434025764465332
|
| 308 |
+
1750444661.583831,43681,0.6461456418037415
|
| 309 |
+
1750444778.3801448,43781,0.6482426524162292
|
| 310 |
+
1750444896.266015,43881,0.6472058892250061
|
| 311 |
+
1750445014.5015402,43981,0.6458106637001038
|
| 312 |
+
1750445133.139083,44081,0.6457383632659912
|
| 313 |
+
1750445251.419277,44181,0.6467622518539429
|
| 314 |
+
1750445369.7066398,44281,0.6460594534873962
|
| 315 |
+
1750445487.8696551,44381,0.6467254757881165
|
| 316 |
+
1750445606.089956,44481,0.6473314762115479
|
| 317 |
+
1750445725.134633,44581,0.6449552774429321
|
| 318 |
+
1750445843.393945,44681,0.6455159187316895
|
| 319 |
+
1750445961.405494,44781,0.6454019546508789
|
| 320 |
+
1750446079.738482,44881,0.644723653793335
|
| 321 |
+
1750446197.922776,44981,0.6462389826774597
|
| 322 |
+
1750446315.902155,45081,0.644656240940094
|
| 323 |
+
1750446433.137455,45181,0.6433915495872498
|
| 324 |
+
1750446551.264723,45281,0.6457310318946838
|
| 325 |
+
1750446669.511483,45381,0.6454013586044312
|
| 326 |
+
1750446787.3911428,45481,0.6469889879226685
|
| 327 |
+
1750446905.443033,45581,0.6453155875205994
|
| 328 |
+
1750447023.4219332,45681,0.6450679898262024
|
| 329 |
+
1750447141.952272,45781,0.6430919170379639
|
| 330 |
+
1750447259.967958,45881,0.6451482772827148
|
| 331 |
+
1750447387.044731,45981,0.6459705829620361
|
| 332 |
+
1750447519.401525,46081,0.6457193493843079
|
| 333 |
+
1750447642.3075469,46181,0.6459993720054626
|
| 334 |
+
1750447782.880893,46281,0.6465814709663391
|
| 335 |
+
1750447910.034666,46381,0.6460391879081726
|
| 336 |
+
1750448034.677154,46481,0.6449068784713745
|
| 337 |
+
1750448154.9038641,46581,0.6472445130348206
|
| 338 |
+
1750448276.621535,46681,0.6447150707244873
|
| 339 |
+
1750449012.4246259,46794,0.6484406590461731
|
| 340 |
+
1750449130.954328,46894,0.6473743915557861
|
| 341 |
+
1750449250.7994518,46994,0.6473921537399292
|
| 342 |
+
1750449370.727267,47094,0.6469785571098328
|
| 343 |
+
1750449490.595724,47194,0.6494570970535278
|
| 344 |
+
1750449610.785643,47294,0.646339476108551
|
| 345 |
+
1750449730.751853,47394,0.6473553776741028
|
| 346 |
+
1750449850.567644,47494,0.6467083096504211
|
| 347 |
+
1750449970.2596061,47594,0.6475189924240112
|
| 348 |
+
1750450089.6937668,47694,0.6476299166679382
|
| 349 |
+
1750450212.124989,47794,0.64707350730896
|
| 350 |
+
1750450332.5156102,47894,0.6465030908584595
|
| 351 |
+
1750450453.031342,47994,0.6483076214790344
|
| 352 |
+
1750450572.6104681,48094,0.6435882449150085
|
| 353 |
+
1750450691.880259,48194,0.6470275521278381
|
| 354 |
+
1750450811.701461,48294,0.6456072330474854
|
| 355 |
+
1750450931.363285,48394,0.6477745175361633
|
| 356 |
+
1750451066.364867,48494,0.6458964347839355
|
| 357 |
+
1750451191.063326,48594,0.6483376026153564
|
| 358 |
+
1750451309.400269,48694,0.6470043063163757
|
| 359 |
+
1750451432.1649559,48794,0.6446225643157959
|
| 360 |
+
1750451557.380671,48894,0.645907461643219
|
| 361 |
+
1750451685.618946,48994,0.6463437676429749
|
| 362 |
+
1750451833.316152,49094,0.6467493772506714
|
| 363 |
+
1750451970.987531,49194,0.6453112959861755
|
| 364 |
+
1750452138.191225,49294,0.644321084022522
|
| 365 |
+
1750452272.043398,49394,0.6451544165611267
|
| 366 |
+
1750452401.273822,49494,0.6469804048538208
|
| 367 |
+
1750452522.711506,49594,0.6456249952316284
|
| 368 |
+
1750452640.381066,49694,0.6457083225250244
|
| 369 |
+
1750452757.507724,49794,0.6458749771118164
|
| 370 |
+
1750453526.169133,49907,0.6475443840026855
|
| 371 |
+
1750453644.4992352,50007,0.6485000252723694
|
| 372 |
+
1750453763.546672,50107,0.6503995060920715
|
| 373 |
+
1750453883.2695081,50207,0.6494748592376709
|
| 374 |
+
1750454004.7841148,50307,0.6482990384101868
|
| 375 |
+
1750454127.7688398,50407,0.6466936469078064
|
| 376 |
+
1750454251.3973289,50507,0.6460869908332825
|
| 377 |
+
1750454375.005162,50607,0.6486035585403442
|
| 378 |
+
1750454498.866722,50707,0.648090660572052
|
| 379 |
+
1750454629.7434108,50807,0.6454362869262695
|
| 380 |
+
1750454760.6950889,50907,0.6481470465660095
|
| 381 |
+
1750454884.496654,51007,0.6474632620811462
|
| 382 |
+
1750455008.0026429,51107,0.6468964219093323
|
| 383 |
+
1750455132.7498,51207,0.6478167772293091
|
| 384 |
+
1750455258.470098,51307,0.6466445922851562
|
| 385 |
+
1750455376.942017,51407,0.6477677822113037
|
| 386 |
+
1750455493.140887,51507,0.6462255120277405
|
| 387 |
+
1750455609.66634,51607,0.6452065110206604
|
| 388 |
+
1750455726.584801,51707,0.647035539150238
|
| 389 |
+
1750455844.3167229,51807,0.6464258432388306
|
| 390 |
+
1750455962.516587,51907,0.6472169160842896
|
| 391 |
+
1750456083.84438,52007,0.6468621492385864
|
| 392 |
+
1750456202.717559,52107,0.6477616429328918
|
| 393 |
+
1750456321.3288598,52207,0.6463045477867126
|
| 394 |
+
1750456440.5815392,52307,0.6483480334281921
|
| 395 |
+
1750456559.834651,52407,0.6464816331863403
|
| 396 |
+
1750456678.9792309,52507,0.6446556448936462
|
| 397 |
+
1750456798.6035829,52607,0.6461648344993591
|
| 398 |
+
1750456918.397055,52707,0.646740198135376
|
| 399 |
+
1750457038.93219,52807,0.646647036075592
|
| 400 |
+
1750457159.62396,52907,0.6466268301010132
|
| 401 |
+
1750457929.211782,53020,0.6496167182922363
|
| 402 |
+
1750458052.85809,53120,0.647800862789154
|
| 403 |
+
1750458176.928961,53220,0.6489019393920898
|
| 404 |
+
1750458303.048157,53320,0.6483253836631775
|
| 405 |
+
1750458444.2680588,53420,0.6491929888725281
|
| 406 |
+
1750458573.865906,53520,0.6491292715072632
|
| 407 |
+
1750458697.898096,53620,0.6475735306739807
|
| 408 |
+
1750458824.801345,53720,0.648048996925354
|
| 409 |
+
1750458953.433717,53820,0.6464105248451233
|
| 410 |
+
1750459075.569062,53920,0.6493860483169556
|
| 411 |
+
1750459200.4519942,54020,0.6479001045227051
|
| 412 |
+
1750459322.370071,54120,0.6457898020744324
|
| 413 |
+
1750459446.5373828,54220,0.6467236280441284
|
| 414 |
+
1750459573.650956,54320,0.6475441455841064
|
| 415 |
+
1750459704.177473,54420,0.6485986709594727
|
| 416 |
+
1750459841.2961438,54520,0.6472708582878113
|
| 417 |
+
1750459973.288098,54620,0.6489062309265137
|
| 418 |
+
1750460115.165309,54720,0.6476121544837952
|
| 419 |
+
1750460254.6476722,54820,0.6478878855705261
|
| 420 |
+
1750460391.936049,54920,0.6472965478897095
|
| 421 |
+
1750460529.7383418,55020,0.6481145620346069
|
| 422 |
+
1750460668.872111,55120,0.6473676562309265
|
| 423 |
+
1750460809.152336,55220,0.6492573618888855
|
| 424 |
+
1750460943.546547,55320,0.6486280560493469
|
| 425 |
+
1750461088.5272598,55420,0.645950973033905
|
| 426 |
+
1750461233.560725,55520,0.6471207141876221
|
| 427 |
+
1750461372.070115,55620,0.647911787033081
|
| 428 |
+
1750461516.1883001,55720,0.646936297416687
|
| 429 |
+
1750461647.543063,55820,0.6482353210449219
|
| 430 |
+
1750461798.818411,55920,0.6453725695610046
|
| 431 |
+
1750461957.278379,56020,0.6469908356666565
|
| 432 |
+
1750462705.7719848,56133,0.6504263877868652
|
| 433 |
+
1750462823.666765,56233,0.6493731737136841
|
| 434 |
+
1750462942.2239392,56333,0.6493461728096008
|
| 435 |
+
1750463060.770675,56433,0.6497120261192322
|
| 436 |
+
1750463179.4219909,56533,0.6489902138710022
|
| 437 |
+
1750463297.778173,56633,0.6487028002738953
|
| 438 |
+
1750463415.443796,56733,0.6492891907691956
|
| 439 |
+
1750463533.347379,56833,0.6493076086044312
|
| 440 |
+
1750463651.751297,56933,0.6492726802825928
|
| 441 |
+
1750463772.814304,57033,0.6496317386627197
|
| 442 |
+
1750463890.798354,57133,0.6484277248382568
|
| 443 |
+
1750464008.648691,57233,0.6509601473808289
|
| 444 |
+
1750464126.678251,57333,0.648615837097168
|
| 445 |
+
1750464243.702663,57433,0.6487248539924622
|
| 446 |
+
1750464360.734715,57533,0.6481225490570068
|
| 447 |
+
1750464477.643559,57633,0.6487144827842712
|
| 448 |
+
1750464594.534667,57733,0.6474853157997131
|
| 449 |
+
1750464711.748069,57833,0.6472322344779968
|
| 450 |
+
1750464828.272034,57933,0.6495900750160217
|
| 451 |
+
1750464944.863915,58033,0.6476734280586243
|
| 452 |
+
1750465061.6391091,58133,0.6475049257278442
|
| 453 |
+
1750465178.195018,58233,0.6494442224502563
|
| 454 |
+
1750465294.852366,58333,0.6471164226531982
|
| 455 |
+
1750465411.6870909,58433,0.6462622284889221
|
| 456 |
+
1750465528.4726589,58533,0.6461899280548096
|
| 457 |
+
1750465645.319412,58633,0.6483082175254822
|
| 458 |
+
1750465764.762222,58733,0.645909309387207
|
| 459 |
+
1750465883.438494,58833,0.6466617584228516
|
| 460 |
+
1750466000.2513611,58933,0.6474571228027344
|
| 461 |
+
1750466116.449578,59033,0.6476936340332031
|
| 462 |
+
1750466232.7494771,59133,0.6471899747848511
|
| 463 |
+
1750466954.025831,59246,0.6506918668746948
|
| 464 |
+
1750467069.798914,59346,0.6502310037612915
|
| 465 |
+
1750467185.7366168,59446,0.6510318517684937
|
| 466 |
+
1750467302.346245,59546,0.6469454765319824
|
| 467 |
+
1750467420.545009,59646,0.6488516926765442
|
| 468 |
+
1750467537.323788,59746,0.6477493643760681
|
| 469 |
+
1750467653.529954,59846,0.6498204469680786
|
| 470 |
+
1750467769.713964,59946,0.6488137245178223
|
| 471 |
+
1750467885.985452,60046,0.6511831879615784
|
| 472 |
+
1750468002.338573,60146,0.6487414240837097
|
| 473 |
+
1750468119.183342,60246,0.6491519808769226
|
| 474 |
+
1750468235.455771,60346,0.6486047506332397
|
| 475 |
+
1750468351.629285,60446,0.6491402983665466
|
| 476 |
+
1750468467.63536,60546,0.6479650735855103
|
| 477 |
+
1750468583.651401,60646,0.6487475633621216
|
| 478 |
+
1750468699.767885,60746,0.6470637321472168
|
| 479 |
+
1750468815.877007,60846,0.6506942510604858
|
| 480 |
+
1750468932.017609,60946,0.648102343082428
|
| 481 |
+
1750469047.9748092,61046,0.6479178667068481
|
| 482 |
+
1750469163.8783622,61146,0.6469007134437561
|
| 483 |
+
1750469279.843866,61246,0.6485098004341125
|
| 484 |
+
1750469395.6838698,61346,0.6492837071418762
|
| 485 |
+
1750469511.7545412,61446,0.6491458415985107
|
| 486 |
+
1750469630.049988,61546,0.6491427421569824
|
| 487 |
+
1750469747.596489,61646,0.6505208611488342
|
| 488 |
+
1750469863.2194428,61746,0.6458866596221924
|
| 489 |
+
1750469979.2194738,61846,0.6464736461639404
|
| 490 |
+
1750470095.122258,61946,0.6484969258308411
|
| 491 |
+
1750470210.562304,62046,0.6498504877090454
|
| 492 |
+
1750470326.1045911,62146,0.6460232734680176
|
| 493 |
+
1750470441.834761,62246,0.649115800857544
|
| 494 |
+
1750471163.36306,62359,0.6514950394630432
|
| 495 |
+
1750471279.086947,62459,0.6520612835884094
|
| 496 |
+
1750471395.0649018,62559,0.651094377040863
|
| 497 |
+
1750471511.038961,62659,0.6490924954414368
|
| 498 |
+
1750471627.1452959,62759,0.6493008732795715
|
| 499 |
+
1750471743.128916,62859,0.6492383480072021
|
| 500 |
+
1750471859.055058,62959,0.6501225233078003
|
| 501 |
+
1750471975.00283,63059,0.6503989100456238
|
| 502 |
+
1750472090.809558,63159,0.6498222947120667
|
| 503 |
+
1750472206.5698009,63259,0.6502689719200134
|
| 504 |
+
1750472322.232264,63359,0.6496697068214417
|
| 505 |
+
1750472437.989224,63459,0.65056312084198
|
| 506 |
+
1750472553.626755,63559,0.6496495008468628
|
| 507 |
+
1750472669.3234591,63659,0.6486868858337402
|
| 508 |
+
1750472785.0012438,63759,0.6508694887161255
|
| 509 |
+
1750472900.6525052,63859,0.6473259925842285
|
| 510 |
+
1750473016.274345,63959,0.6483345627784729
|
| 511 |
+
1750473132.548586,64059,0.6493498682975769
|
| 512 |
+
1750473248.225437,64159,0.6485306620597839
|
| 513 |
+
1750473364.321547,64259,0.6484577059745789
|
| 514 |
+
1750473480.225926,64359,0.6511145830154419
|
| 515 |
+
1750473600.990798,64459,0.6460729241371155
|
| 516 |
+
1750473725.0170329,64559,0.648897647857666
|
| 517 |
+
1750473852.577133,64659,0.650192379951477
|
| 518 |
+
1750473981.622684,64759,0.6487236618995667
|
| 519 |
+
1750474103.4737291,64859,0.649686872959137
|
| 520 |
+
1750474219.1391768,64959,0.6493664383888245
|
| 521 |
+
1750474333.681042,65059,0.6490416526794434
|
| 522 |
+
1750474448.671713,65159,0.6458510756492615
|
| 523 |
+
1750474563.9086561,65259,0.6487787961959839
|
| 524 |
+
1750474687.440989,65359,0.6486139893531799
|
| 525 |
+
1750475402.1149719,65472,0.6503641605377197
|
| 526 |
+
1750475517.346734,65572,0.6510857939720154
|
| 527 |
+
1750475632.709281,65672,0.6523467898368835
|
| 528 |
+
1750475748.152691,65772,0.6509374976158142
|
| 529 |
+
1750475863.632912,65872,0.6508296728134155
|
| 530 |
+
1750475979.121454,65972,0.6497567296028137
|
| 531 |
+
1750476094.56753,66072,0.6515582203865051
|
| 532 |
+
1750476209.744134,66172,0.6481899619102478
|
| 533 |
+
1750476324.7911448,66272,0.6485661864280701
|
| 534 |
+
1750476439.984586,66372,0.6494032144546509
|
| 535 |
+
1750476555.031313,66472,0.6489571332931519
|
| 536 |
+
1750476669.948531,66572,0.6506550312042236
|
| 537 |
+
1750476784.987467,66672,0.6500391960144043
|
| 538 |
+
1750476900.266967,66772,0.6504062414169312
|
| 539 |
+
1750477015.146752,66872,0.6476078629493713
|
| 540 |
+
1750477130.050998,66972,0.6488952040672302
|
| 541 |
+
1750477245.0096571,67072,0.648952841758728
|
| 542 |
+
1750477360.005547,67172,0.6485888361930847
|
| 543 |
+
1750477475.266708,67272,0.6496372818946838
|
| 544 |
+
1750477593.1183481,67372,0.6500251293182373
|
| 545 |
+
1750477708.7258828,67472,0.6484503746032715
|
| 546 |
+
1750477823.6328719,67572,0.6494423747062683
|
| 547 |
+
1750477938.5284438,67672,0.6505042910575867
|
| 548 |
+
1750478054.12744,67772,0.6498180031776428
|
| 549 |
+
1750478170.498666,67872,0.6492432355880737
|
| 550 |
+
1750478285.954317,67972,0.6491881012916565
|
| 551 |
+
1750478401.226903,68072,0.6486304998397827
|
| 552 |
+
1750478516.522924,68172,0.6480741500854492
|
| 553 |
+
1750478631.684822,68272,0.6496483087539673
|
| 554 |
+
1750478746.856767,68372,0.6496262550354004
|
| 555 |
+
1750478861.9361632,68472,0.6503976583480835
|
| 556 |
+
1750479575.806946,68585,0.6521384119987488
|
| 557 |
+
1750479690.32353,68685,0.6518860459327698
|
| 558 |
+
1750479805.1877701,68785,0.6505594253540039
|
| 559 |
+
1750479919.9629152,68885,0.6508958339691162
|
| 560 |
+
1750480034.8541162,68985,0.6496139764785767
|
| 561 |
+
1750480149.549331,69085,0.6507641077041626
|
| 562 |
+
1750480264.361679,69185,0.6501194834709167
|
| 563 |
+
1750480379.125248,69285,0.6503553986549377
|
| 564 |
+
1750480493.715519,69385,0.6503106355667114
|
| 565 |
+
1750480608.351685,69485,0.6501868963241577
|
| 566 |
+
1750480722.979964,69585,0.6506292819976807
|
| 567 |
+
1750480837.832553,69685,0.6509644389152527
|
| 568 |
+
1750480952.294443,69785,0.6499748826026917
|
| 569 |
+
1750481066.869496,69885,0.6507775783538818
|
| 570 |
+
1750481181.595307,69985,0.649661123752594
|
| 571 |
+
1750481296.238889,70085,0.6496807336807251
|
| 572 |
+
1750481411.1383739,70185,0.6505447030067444
|
| 573 |
+
1750481526.015284,70285,0.6501145958900452
|
| 574 |
+
1750481643.874416,70385,0.6485772132873535
|
| 575 |
+
1750481758.848754,70485,0.6489344239234924
|
| 576 |
+
1750481874.9191191,70585,0.6527248620986938
|
| 577 |
+
1750481989.3493938,70685,0.6468694806098938
|
| 578 |
+
1750482103.813694,70785,0.647337019443512
|
| 579 |
+
1750482218.308801,70885,0.6493272185325623
|
| 580 |
+
1750482332.447498,70985,0.6493443846702576
|
| 581 |
+
1750482446.494996,71085,0.6492156982421875
|
| 582 |
+
1750482560.851434,71185,0.6486396789550781
|
| 583 |
+
1750482675.167424,71285,0.6508437395095825
|
| 584 |
+
1750482789.7214952,71385,0.6502757668495178
|
| 585 |
+
1750482904.13637,71485,0.6490030884742737
|
| 586 |
+
1750483018.6129029,71585,0.6489736437797546
|
| 587 |
+
1750483731.0579789,71698,0.6515112519264221
|
| 588 |
+
1750483844.974041,71798,0.6536256074905396
|
| 589 |
+
1750483959.419848,71898,0.6514270901679993
|
| 590 |
+
1750484073.5282922,71998,0.6479448676109314
|
| 591 |
+
1750484187.93104,72098,0.651832103729248
|
| 592 |
+
1750484302.163946,72198,0.6511048078536987
|
| 593 |
+
1750484416.3585322,72298,0.65255206823349
|
| 594 |
+
1750484530.539971,72398,0.6504852771759033
|
| 595 |
+
1750484644.6875288,72498,0.651899516582489
|
| 596 |
+
1750484759.0703092,72598,0.6533210873603821
|
| 597 |
+
1750484873.4014359,72698,0.6514350175857544
|
| 598 |
+
1750484987.528637,72798,0.6496642231941223
|
| 599 |
+
1750485101.637622,72898,0.6514240503311157
|
| 600 |
+
1750485215.8779562,72998,0.6483572125434875
|
| 601 |
+
1750485330.404798,73098,0.650580883026123
|
| 602 |
+
1750485446.021368,73198,0.6504276990890503
|
| 603 |
+
1750485562.9548411,73298,0.6508204936981201
|
| 604 |
+
1750485678.785216,73398,0.6502867341041565
|
| 605 |
+
1750485793.054364,73498,0.6501164436340332
|
| 606 |
+
1750485907.37626,73598,0.6514963507652283
|
| 607 |
+
1750486021.8247418,73698,0.6492040157318115
|
| 608 |
+
1750486136.3473449,73798,0.6500563621520996
|
| 609 |
+
1750486250.709274,73898,0.6468339562416077
|
| 610 |
+
1750486364.8596091,73998,0.6491641998291016
|
| 611 |
+
1750486479.1755798,74098,0.648629903793335
|
| 612 |
+
1750486593.542068,74198,0.648883581161499
|
| 613 |
+
1750486707.85393,74298,0.6481935977935791
|
| 614 |
+
1750486822.1278121,74398,0.6489693522453308
|
| 615 |
+
1750486936.53057,74498,0.6512916684150696
|
| 616 |
+
1750487050.717908,74598,0.6485986709594727
|
| 617 |
+
1750487164.696138,74698,0.6498866677284241
|
| 618 |
+
1750487874.5115469,74811,0.6510941982269287
|
| 619 |
+
1750487987.8303921,74911,0.6525385975837708
|
| 620 |
+
1750488101.44348,75011,0.6542500257492065
|
| 621 |
+
1750488215.1874702,75111,0.6530171632766724
|
| 622 |
+
1750488328.995727,75211,0.6528841853141785
|
| 623 |
+
1750488442.779683,75311,0.6520790457725525
|
| 624 |
+
1750488556.755313,75411,0.6496041417121887
|
| 625 |
+
1750488670.543948,75511,0.649968147277832
|
| 626 |
+
1750488784.313604,75611,0.6517831087112427
|
| 627 |
+
1750488898.535667,75711,0.6522163152694702
|
| 628 |
+
1750489013.370904,75811,0.6525214314460754
|
| 629 |
+
1750489128.4487538,75911,0.6500673890113831
|
| 630 |
+
1750489242.233129,76011,0.6502261161804199
|
| 631 |
+
1750489356.006995,76111,0.6507285833358765
|
| 632 |
+
1750489469.953614,76211,0.6502076983451843
|
| 633 |
+
1750489584.488816,76311,0.6491323709487915
|
| 634 |
+
1750489700.788619,76411,0.6487775444984436
|
| 635 |
+
1750489815.4778082,76511,0.6494393348693848
|
| 636 |
+
1750489929.277719,76611,0.6519858837127686
|
| 637 |
+
1750490043.1183228,76711,0.6498388648033142
|
| 638 |
+
1750490157.016831,76811,0.6505600214004517
|
| 639 |
+
1750490270.614358,76911,0.6515839695930481
|
| 640 |
+
1750490383.968896,77011,0.6495447158813477
|
| 641 |
+
1750490497.365126,77111,0.6486427783966064
|
| 642 |
+
1750490610.886154,77211,0.648328423500061
|
| 643 |
+
1750490724.351153,77311,0.6492592096328735
|
| 644 |
+
1750490837.884931,77411,0.649756133556366
|
| 645 |
+
1750490951.459667,77511,0.6512389779090881
|
| 646 |
+
1750491064.937767,77611,0.6487855315208435
|
| 647 |
+
1750491178.461191,77711,0.6497395634651184
|
| 648 |
+
1750491292.0026398,77811,0.6497634649276733
|
| 649 |
+
1750491999.605982,77924,0.6508513689041138
|
| 650 |
+
1750492113.108762,78024,0.6516966819763184
|
| 651 |
+
1750492226.5848,78124,0.6520398259162903
|
| 652 |
+
1750492340.16163,78224,0.6514491438865662
|
| 653 |
+
1750492453.599425,78324,0.6523351669311523
|
| 654 |
+
1750492567.592646,78424,0.6511476635932922
|
| 655 |
+
1750492682.658302,78524,0.6523958444595337
|
| 656 |
+
1750492796.473689,78624,0.6519944667816162
|
| 657 |
+
1750492910.223047,78724,0.6528701186180115
|
| 658 |
+
1750493024.028586,78824,0.6531862616539001
|
| 659 |
+
1750493137.804054,78924,0.6519387364387512
|
| 660 |
+
1750493251.537948,79024,0.650509774684906
|
| 661 |
+
1750493365.2377431,79124,0.6504889726638794
|
| 662 |
+
1750493479.4787211,79224,0.6521164178848267
|
| 663 |
+
1750493594.934314,79324,0.6500012278556824
|
| 664 |
+
1750493708.180213,79424,0.6507683992385864
|
| 665 |
+
1750493821.614132,79524,0.6500869989395142
|
| 666 |
+
1750493935.008648,79624,0.6497972011566162
|
| 667 |
+
1750494048.377472,79724,0.6534552574157715
|
| 668 |
+
1750494161.808021,79824,0.6491623520851135
|
| 669 |
+
1750494275.22887,79924,0.6505827307701111
|
| 670 |
+
1750494388.272109,80024,0.6519761085510254
|
| 671 |
+
1750494501.45705,80124,0.6508719325065613
|
| 672 |
+
1750494614.844456,80224,0.6500875949859619
|
| 673 |
+
1750494728.3395991,80324,0.6502119898796082
|
| 674 |
+
1750494841.6911979,80424,0.648857831954956
|
| 675 |
+
1750494954.99737,80524,0.6491954922676086
|
| 676 |
+
1750495068.329165,80624,0.6493982672691345
|
| 677 |
+
1750495182.4239628,80724,0.6493633389472961
|
| 678 |
+
1750495295.763544,80824,0.6497040390968323
|
| 679 |
+
1750495409.068179,80924,0.6488284468650818
|
| 680 |
+
1750496116.170484,81037,0.6523707509040833
|
| 681 |
+
1750496229.2599418,81137,0.6523308753967285
|
| 682 |
+
1750496343.805463,81237,0.653846800327301
|
| 683 |
+
1750496457.285084,81337,0.6520637273788452
|
| 684 |
+
1750496570.7672498,81437,0.6530134677886963
|
| 685 |
+
1750496684.26799,81537,0.651968777179718
|
| 686 |
+
1750496797.7675068,81637,0.6531544327735901
|
| 687 |
+
1750496911.226403,81737,0.6504234075546265
|
| 688 |
+
1750497024.690944,81837,0.6540141105651855
|
| 689 |
+
1750497138.0140312,81937,0.652690589427948
|
| 690 |
+
1750497251.343064,82037,0.6488891243934631
|
| 691 |
+
1750497364.475938,82137,0.6499773263931274
|
| 692 |
+
1750497478.279586,82237,0.649258553981781
|
| 693 |
+
1750497593.521636,82337,0.64949631690979
|
| 694 |
+
1750497706.586865,82437,0.6508198380470276
|
| 695 |
+
1750497820.1206038,82537,0.6509448289871216
|
| 696 |
+
1750497933.514859,82637,0.649718165397644
|
| 697 |
+
1750498046.807204,82737,0.6509718298912048
|
| 698 |
+
1750498159.881542,82837,0.6509957313537598
|
| 699 |
+
1750498272.764572,82937,0.6518014669418335
|
| 700 |
+
1750498385.626694,83037,0.6505153179168701
|
| 701 |
+
1750498498.591943,83137,0.650303304195404
|
| 702 |
+
1750498611.842721,83237,0.6515465974807739
|
| 703 |
+
1750498725.1735182,83337,0.6495367884635925
|
| 704 |
+
1750498838.27373,83437,0.6506531834602356
|
| 705 |
+
1750498951.4907122,83537,0.6525073647499084
|
| 706 |
+
1750499064.703134,83637,0.6512150764465332
|
| 707 |
+
1750499177.862365,83737,0.6514528393745422
|
| 708 |
+
1750499291.159497,83837,0.651731014251709
|
| 709 |
+
1750499404.430131,83937,0.6489319801330566
|
| 710 |
+
1750499517.8266392,84037,0.6496292948722839
|
| 711 |
+
1750500225.367591,84150,0.6519051790237427
|
| 712 |
+
1750500338.434595,84250,0.6544748544692993
|
| 713 |
+
1750500451.6891,84350,0.6522947549819946
|
| 714 |
+
1750500565.096119,84450,0.6509847044944763
|
| 715 |
+
1750500678.472042,84550,0.6537775993347168
|
| 716 |
+
1750500791.614295,84650,0.651554524898529
|
| 717 |
+
1750500904.58726,84750,0.6504203677177429
|
| 718 |
+
1750501017.6459992,84850,0.651118278503418
|
| 719 |
+
1750501130.683456,84950,0.6531617641448975
|
| 720 |
+
1750501243.743277,85050,0.650214433670044
|
| 721 |
+
1750501357.616941,85150,0.6512022018432617
|
| 722 |
+
1750501472.321609,85250,0.6521586775779724
|
| 723 |
+
1750501585.380609,85350,0.6539999842643738
|
| 724 |
+
1750501698.436961,85450,0.6527628898620605
|
| 725 |
+
1750501811.4210482,85550,0.6517916917800903
|
| 726 |
+
1750501924.5078888,85650,0.6509864926338196
|
| 727 |
+
1750502037.468951,85750,0.6541029214859009
|
| 728 |
+
1750502150.494959,85850,0.6518939733505249
|
| 729 |
+
1750502263.440168,85950,0.6502745151519775
|
| 730 |
+
1750502376.411214,86050,0.6502175331115723
|
| 731 |
+
1750502489.262076,86150,0.6523351669311523
|
| 732 |
+
1750502601.9581828,86250,0.6517990231513977
|
| 733 |
+
1750502714.890722,86350,0.6500361561775208
|
| 734 |
+
1750502827.857285,86450,0.6501525640487671
|
| 735 |
+
1750502940.9028091,86550,0.649101734161377
|
| 736 |
+
1750503053.891348,86650,0.6505643129348755
|
| 737 |
+
1750503166.955696,86750,0.6500686407089233
|
| 738 |
+
1750503280.275767,86850,0.6483272314071655
|
| 739 |
+
1750503393.301862,86950,0.6497126221656799
|
| 740 |
+
1750503506.3926358,87050,0.6495159268379211
|
| 741 |
+
1750503620.4175518,87150,0.6491482853889465
|
| 742 |
+
1750504325.240918,87263,0.6508238911628723
|
| 743 |
+
1750504437.8989189,87363,0.653662383556366
|
| 744 |
+
1750504550.7523048,87463,0.653051495552063
|
| 745 |
+
1750504663.726395,87563,0.6535282135009766
|
| 746 |
+
1750504776.973186,87663,0.6524871587753296
|
| 747 |
+
1750504889.837835,87763,0.6525337100028992
|
| 748 |
+
1750505002.734218,87863,0.65145343542099
|
| 749 |
+
1750505115.522795,87963,0.6521084308624268
|
| 750 |
+
1750505228.403977,88063,0.6522983908653259
|
| 751 |
+
1750505342.702466,88163,0.6498529314994812
|
| 752 |
+
1750505460.6297479,88263,0.6525331139564514
|
| 753 |
+
1750505575.877179,88363,0.6522395610809326
|
| 754 |
+
1750505688.797272,88463,0.6516587138175964
|
| 755 |
+
1750505801.7769442,88563,0.653624415397644
|
| 756 |
+
1750505914.8972318,88663,0.650729775428772
|
| 757 |
+
1750506027.865003,88763,0.6519007086753845
|
| 758 |
+
1750506140.835049,88863,0.6503682732582092
|
| 759 |
+
1750506253.87393,88963,0.6514338254928589
|
| 760 |
+
1750506366.7786722,89063,0.6522561311721802
|
| 761 |
+
1750506479.536766,89163,0.652329683303833
|
| 762 |
+
1750506592.4483469,89263,0.6510680317878723
|
| 763 |
+
1750506705.31634,89363,0.6515679955482483
|
| 764 |
+
1750506818.388268,89463,0.6509608030319214
|
| 765 |
+
1750506931.425611,89563,0.6509301662445068
|
| 766 |
+
1750507044.422219,89663,0.6502536535263062
|
| 767 |
+
1750507158.00037,89763,0.650896430015564
|
| 768 |
+
1750507271.543029,89863,0.6493333578109741
|
| 769 |
+
1750507384.6583848,89963,0.650469958782196
|
| 770 |
+
1750507497.6100879,90063,0.6499258875846863
|
| 771 |
+
1750507610.53967,90163,0.6503970623016357
|
| 772 |
+
1750507723.682441,90263,0.6501384973526001
|
| 773 |
+
1750508426.5148978,90376,0.6525241136550903
|
| 774 |
+
1750508539.149127,90476,0.6531550288200378
|
| 775 |
+
1750508651.981962,90576,0.6542285680770874
|
| 776 |
+
1750508764.730967,90676,0.6539865136146545
|
| 777 |
+
1750508877.613912,90776,0.6513713002204895
|
| 778 |
+
1750508990.3952599,90876,0.6540612578392029
|
| 779 |
+
1750509103.1236968,90976,0.6530373692512512
|
| 780 |
+
1750509215.890075,91076,0.6506434082984924
|
| 781 |
+
1750509329.717459,91176,0.6504074931144714
|
| 782 |
+
1750509443.355032,91276,0.6535820960998535
|
| 783 |
+
1750509555.94369,91376,0.6515735387802124
|
| 784 |
+
1750509668.6442938,91476,0.6513652205467224
|
| 785 |
+
1750509781.362628,91576,0.6529951095581055
|
| 786 |
+
1750509894.194156,91676,0.652318000793457
|
| 787 |
+
1750510006.8420548,91776,0.6528327465057373
|
| 788 |
+
1750510119.5023942,91876,0.6515141129493713
|
| 789 |
+
1750510232.194077,91976,0.6515257358551025
|
| 790 |
+
1750510344.921349,92076,0.6537836790084839
|
| 791 |
+
1750510457.946561,92176,0.6527512073516846
|
| 792 |
+
1750510570.890898,92276,0.6509362459182739
|
| 793 |
+
1750510683.6737568,92376,0.6492291688919067
|
| 794 |
+
1750510797.512384,92476,0.6511721611022949
|
| 795 |
+
1750510910.6058948,92576,0.6513419151306152
|
| 796 |
+
1750511023.451675,92676,0.6500686407089233
|
| 797 |
+
1750511136.278432,92776,0.6467463374137878
|
| 798 |
+
1750511249.137708,92876,0.6520962119102478
|
| 799 |
+
1750511361.9892852,92976,0.6514883637428284
|
| 800 |
+
1750511474.7244558,93076,0.650035560131073
|
| 801 |
+
1750511587.554349,93176,0.6526182293891907
|
| 802 |
+
1750511700.419528,93276,0.6501151919364929
|
| 803 |
+
1750511813.14798,93376,0.6515588164329529
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750241682.491652,99,0.21416789293289185
|
| 3 |
+
1750241798.4296,199,0.30339154601097107
|
| 4 |
+
1750241914.892242,299,0.33487561345100403
|
| 5 |
+
1750242031.5578701,399,0.3558320999145508
|
| 6 |
+
1750242147.8598652,499,0.3743719458580017
|
| 7 |
+
1750242264.0177548,599,0.386802077293396
|
| 8 |
+
1750242996.703905,722,0.40653958916664124
|
| 9 |
+
1750243112.179178,822,0.4203621447086334
|
| 10 |
+
1750243227.960116,922,0.43047672510147095
|
| 11 |
+
1750243343.7493389,1022,0.44125857949256897
|
| 12 |
+
1750243459.611735,1122,0.44709068536758423
|
| 13 |
+
1750243575.247699,1222,0.45822978019714355
|
| 14 |
+
1750244307.1242821,2591,0.4886654019355774
|
| 15 |
+
1750244422.194462,2691,0.5010459423065186
|
| 16 |
+
1750244537.370603,2791,0.5089681148529053
|
| 17 |
+
1750244652.737849,2891,0.5138774514198303
|
| 18 |
+
1750244768.363161,2991,0.5198927521705627
|
| 19 |
+
1750244883.9648619,3091,0.5233259797096252
|
| 20 |
+
1750245001.597457,3191,0.530121922492981
|
| 21 |
+
1750245117.855197,3291,0.5302830934524536
|
| 22 |
+
1750245234.328162,3391,0.5343363881111145
|
| 23 |
+
1750245351.526498,3491,0.5387812256813049
|
| 24 |
+
1750245470.529834,3591,0.5402769446372986
|
| 25 |
+
1750245586.3119879,3691,0.5463467836380005
|
| 26 |
+
1750246343.139898,3837,0.5505648851394653
|
| 27 |
+
1750246458.41904,3937,0.5526918172836304
|
| 28 |
+
1750246573.937508,4037,0.5572022199630737
|
| 29 |
+
1750246689.4481301,4137,0.5602738857269287
|
| 30 |
+
1750246804.955596,4237,0.5618419051170349
|
| 31 |
+
1750246920.456448,4337,0.5658437609672546
|
| 32 |
+
1750247035.909068,4437,0.5643854141235352
|
| 33 |
+
1750247151.348026,4537,0.564669132232666
|
| 34 |
+
1750247266.8208692,4637,0.565129280090332
|
| 35 |
+
1750247382.343512,4737,0.5662316083908081
|
| 36 |
+
1750247497.887946,4837,0.5709999799728394
|
| 37 |
+
1750247613.2338188,4937,0.5710968375205994
|
| 38 |
+
1750248368.9772642,7571,0.5816092491149902
|
| 39 |
+
1750248483.672859,7671,0.5875716805458069
|
| 40 |
+
1750248599.305938,7771,0.5899681448936462
|
| 41 |
+
1750248714.951941,7871,0.5904393196105957
|
| 42 |
+
1750248830.486346,7971,0.5897787809371948
|
| 43 |
+
1750248946.585846,8071,0.5925152897834778
|
| 44 |
+
1750249062.2329412,8171,0.5895422697067261
|
| 45 |
+
1750249177.890665,8271,0.5945233106613159
|
| 46 |
+
1750249293.646674,8371,0.5957432389259338
|
| 47 |
+
1750249410.172457,8471,0.5967334508895874
|
| 48 |
+
1750249528.32505,8571,0.5945888757705688
|
| 49 |
+
1750249643.642487,8671,0.5959883332252502
|
| 50 |
+
1750249758.920171,8771,0.5973364114761353
|
| 51 |
+
1750249874.196299,8871,0.5975183844566345
|
| 52 |
+
1750249989.4330912,8971,0.5985827445983887
|
| 53 |
+
1750250104.598547,9071,0.5995110273361206
|
| 54 |
+
1750250219.699279,9171,0.5982469320297241
|
| 55 |
+
1750250334.860928,9271,0.5999650955200195
|
| 56 |
+
1750251131.926089,9439,0.6068640947341919
|
| 57 |
+
1750251246.9172008,9539,0.6070073246955872
|
| 58 |
+
1750251361.879851,9639,0.6051697134971619
|
| 59 |
+
1750251477.0158982,9739,0.6055692434310913
|
| 60 |
+
1750251592.2372851,9839,0.6048725247383118
|
| 61 |
+
1750251707.280406,9939,0.6078149676322937
|
| 62 |
+
1750251822.323809,10039,0.6038443446159363
|
| 63 |
+
1750251937.323814,10139,0.604400098323822
|
| 64 |
+
1750252052.29767,10239,0.6052733063697815
|
| 65 |
+
1750252167.3019571,10339,0.6070312261581421
|
| 66 |
+
1750252283.250794,10439,0.6061752438545227
|
| 67 |
+
1750252398.2591941,10539,0.6063510775566101
|
| 68 |
+
1750252513.236476,10639,0.608290433883667
|
| 69 |
+
1750252628.350452,10739,0.6082898378372192
|
| 70 |
+
1750252744.4092648,10839,0.6066213250160217
|
| 71 |
+
1750252860.372185,10939,0.6067665219306946
|
| 72 |
+
1750252976.300589,11039,0.6089038252830505
|
| 73 |
+
1750253090.9338,11139,0.6082156896591187
|
| 74 |
+
1750253883.5361578,15045,0.6134470105171204
|
| 75 |
+
1750253997.8850172,15145,0.616866409778595
|
| 76 |
+
1750254112.508249,15245,0.6153474450111389
|
| 77 |
+
1750254227.1968992,15345,0.6166777014732361
|
| 78 |
+
1750254341.829042,15445,0.6192064881324768
|
| 79 |
+
1750254456.525022,15545,0.6184356808662415
|
| 80 |
+
1750254571.190468,15645,0.618232250213623
|
| 81 |
+
1750254686.063215,15745,0.6188069581985474
|
| 82 |
+
1750254800.704541,15845,0.6163308620452881
|
| 83 |
+
1750254915.783387,15945,0.6184019446372986
|
| 84 |
+
1750255031.205911,16045,0.6191648244857788
|
| 85 |
+
1750255146.14499,16145,0.6177481412887573
|
| 86 |
+
1750255260.942156,16245,0.6192064881324768
|
| 87 |
+
1750255375.750377,16345,0.619608461856842
|
| 88 |
+
1750255490.58447,16445,0.6174852848052979
|
| 89 |
+
1750255605.413252,16545,0.6195269823074341
|
| 90 |
+
1750255720.2515159,16645,0.619271457195282
|
| 91 |
+
1750255835.268335,16745,0.6194338202476501
|
| 92 |
+
1750255949.566218,16845,0.6227101683616638
|
| 93 |
+
1750256063.789962,16945,0.6195894479751587
|
| 94 |
+
1750256178.188776,17045,0.6201948523521423
|
| 95 |
+
1750256293.1323211,17145,0.6186476945877075
|
| 96 |
+
1750256407.9495418,17245,0.6214761137962341
|
| 97 |
+
1750256522.734526,17345,0.6193719506263733
|
| 98 |
+
1750257331.2443368,17536,0.6209809184074402
|
| 99 |
+
1750257446.1104798,17636,0.625723659992218
|
| 100 |
+
1750257566.772247,17736,0.6239944696426392
|
| 101 |
+
1750257688.8950431,17836,0.6249521970748901
|
| 102 |
+
1750257807.802911,17936,0.6222965717315674
|
| 103 |
+
1750257922.6778638,18036,0.6244117617607117
|
| 104 |
+
1750258045.608006,18136,0.624750018119812
|
| 105 |
+
1750258160.8699641,18236,0.6236881017684937
|
| 106 |
+
1750258278.062032,18336,0.6239203214645386
|
| 107 |
+
1750258393.34393,18436,0.6234558820724487
|
| 108 |
+
1750258510.444354,18536,0.6236366629600525
|
| 109 |
+
1750258627.3465788,18636,0.6221133470535278
|
| 110 |
+
1750258741.962354,18736,0.6262475252151489
|
| 111 |
+
1750258855.813168,18836,0.6224766969680786
|
| 112 |
+
1750258969.392629,18936,0.6231942176818848
|
| 113 |
+
1750259083.114611,19036,0.6226966977119446
|
| 114 |
+
1750259197.602185,19136,0.6246813535690308
|
| 115 |
+
1750259312.669236,19236,0.6243590712547302
|
| 116 |
+
1750259428.028898,19336,0.6245042681694031
|
| 117 |
+
1750259544.013591,19436,0.6236856579780579
|
| 118 |
+
1750259660.2356632,19536,0.6241586804389954
|
| 119 |
+
1750259776.559098,19636,0.6265146732330322
|
| 120 |
+
1750259892.711581,19736,0.6263155341148376
|
| 121 |
+
1750260008.9627142,19836,0.6259657144546509
|
| 122 |
+
1750260834.1109152,25003,0.6294178366661072
|
| 123 |
+
1750260956.0177531,25103,0.6341978907585144
|
| 124 |
+
1750261074.854673,25203,0.6363382339477539
|
| 125 |
+
1750261194.548888,25303,0.6368590593338013
|
| 126 |
+
1750261319.6682591,25403,0.6354871392250061
|
| 127 |
+
1750261436.8573089,25503,0.6349025964736938
|
| 128 |
+
1750261560.118855,25603,0.6350404620170593
|
| 129 |
+
1750261678.9360929,25703,0.6320496201515198
|
| 130 |
+
1750261799.2243829,25803,0.6345171332359314
|
| 131 |
+
1750261918.739115,25903,0.6353327035903931
|
| 132 |
+
1750262033.6455271,26003,0.6329522132873535
|
| 133 |
+
1750262150.3304498,26103,0.6368737816810608
|
| 134 |
+
1750262266.430183,26203,0.6347763538360596
|
| 135 |
+
1750262383.351136,26303,0.6333602666854858
|
| 136 |
+
1750262502.0331829,26403,0.6335974335670471
|
| 137 |
+
1750262621.292386,26503,0.636484682559967
|
| 138 |
+
1750262740.728746,26603,0.63404780626297
|
| 139 |
+
1750262859.735296,26703,0.6348559856414795
|
| 140 |
+
1750262978.902925,26803,0.6339277029037476
|
| 141 |
+
1750263098.5881002,26903,0.6347910761833191
|
| 142 |
+
1750263218.1136699,27003,0.6350955963134766
|
| 143 |
+
1750263338.9563498,27103,0.63692706823349
|
| 144 |
+
1750263459.964499,27203,0.6359436511993408
|
| 145 |
+
1750263581.618609,27303,0.6360576152801514
|
| 146 |
+
1750263702.820439,27403,0.63558030128479
|
| 147 |
+
1750263824.62831,27503,0.6354460716247559
|
| 148 |
+
1750263946.3345978,27603,0.6366746425628662
|
| 149 |
+
1750264068.107608,27703,0.6340281963348389
|
| 150 |
+
1750264190.982796,27803,0.6362739205360413
|
| 151 |
+
1750264314.090821,27903,0.6344356536865234
|
| 152 |
+
1750264436.92172,28003,0.6335122585296631
|
| 153 |
+
1750265205.919146,28116,0.6398958563804626
|
| 154 |
+
1750265325.289276,28216,0.6380012035369873
|
| 155 |
+
1750265445.558454,28316,0.6381574869155884
|
| 156 |
+
1750265566.0940878,28416,0.6374521851539612
|
| 157 |
+
1750265690.059275,28516,0.6397303938865662
|
| 158 |
+
1750265812.507515,28616,0.638584554195404
|
| 159 |
+
1750265936.121522,28716,0.6386323571205139
|
| 160 |
+
1750266059.419531,28816,0.64011150598526
|
| 161 |
+
1750266182.449273,28916,0.6387775540351868
|
| 162 |
+
1750266305.094233,29016,0.6370355486869812
|
| 163 |
+
1750266428.7435389,29116,0.6374154090881348
|
| 164 |
+
1750266552.304211,29216,0.6376464366912842
|
| 165 |
+
1750266675.5758011,29316,0.6369785666465759
|
| 166 |
+
1750266805.9332962,29416,0.6378100514411926
|
| 167 |
+
1750266931.0352068,29516,0.6405612826347351
|
| 168 |
+
1750267053.373451,29616,0.6379374861717224
|
| 169 |
+
1750267177.051748,29716,0.6398143172264099
|
| 170 |
+
1750267299.0084312,29816,0.6366201043128967
|
| 171 |
+
1750267420.616881,29916,0.6380557417869568
|
| 172 |
+
1750267541.655123,30016,0.6373701095581055
|
| 173 |
+
1750267662.10873,30116,0.6375141143798828
|
| 174 |
+
1750267783.709685,30216,0.6374785304069519
|
| 175 |
+
1750267904.4887528,30316,0.6377861499786377
|
| 176 |
+
1750268026.647031,30416,0.6383756399154663
|
| 177 |
+
1750268148.171423,30516,0.6382659077644348
|
| 178 |
+
1750268271.2071621,30616,0.6402732729911804
|
| 179 |
+
1750268391.063318,30716,0.6381813883781433
|
| 180 |
+
1750268511.6307871,30816,0.6375147104263306
|
| 181 |
+
1750268636.107896,30916,0.6358823776245117
|
| 182 |
+
1750268769.003489,31016,0.6378302574157715
|
| 183 |
+
1750268901.972845,31116,0.6389013528823853
|
| 184 |
+
1750269714.6235719,31229,0.6395766139030457
|
| 185 |
+
1750269834.188169,31329,0.638219952583313
|
| 186 |
+
1750269950.9024792,31429,0.6388014554977417
|
| 187 |
+
1750270066.4540548,31529,0.6397640705108643
|
| 188 |
+
1750270182.799086,31629,0.6403087973594666
|
| 189 |
+
1750270299.3589602,31729,0.6392359137535095
|
| 190 |
+
1750270415.7364538,31829,0.6382027864456177
|
| 191 |
+
1750270530.978216,31929,0.638592541217804
|
| 192 |
+
1750270654.507533,32029,0.6403118968009949
|
| 193 |
+
1750270804.0131269,32129,0.6388529539108276
|
| 194 |
+
1750270926.15144,32229,0.6389497518539429
|
| 195 |
+
1750271042.976987,32329,0.6402603983879089
|
| 196 |
+
1750271158.2399921,32429,0.6365814805030823
|
| 197 |
+
1750271273.613036,32529,0.6382861733436584
|
| 198 |
+
1750271390.14693,32629,0.6389043927192688
|
| 199 |
+
1750271515.1493912,32729,0.6392359137535095
|
| 200 |
+
1750271634.016979,32829,0.6405753493309021
|
| 201 |
+
1750271755.758776,32929,0.6390416622161865
|
| 202 |
+
1750271874.3118231,33029,0.6402653455734253
|
| 203 |
+
1750271990.907077,33129,0.6378002762794495
|
| 204 |
+
1750272108.921928,33229,0.6391029357910156
|
| 205 |
+
1750272228.449949,33329,0.6386336088180542
|
| 206 |
+
1750272347.898315,33429,0.640414834022522
|
| 207 |
+
1750272475.2046368,33529,0.6403657793998718
|
| 208 |
+
1750272589.0914729,33629,0.6407641172409058
|
| 209 |
+
1750272705.6334841,33729,0.6396709680557251
|
| 210 |
+
1750272822.816761,33829,0.6394779682159424
|
| 211 |
+
1750272941.404253,33929,0.639205276966095
|
| 212 |
+
1750273057.228467,34029,0.6388596892356873
|
| 213 |
+
1750273173.229009,34129,0.6393554210662842
|
| 214 |
+
1750273288.4899561,34229,0.640123188495636
|
| 215 |
+
1750274030.213887,34342,0.6414807438850403
|
| 216 |
+
1750274147.662473,34442,0.6421213150024414
|
| 217 |
+
1750274266.3906848,34542,0.6436458230018616
|
| 218 |
+
1750274385.398832,34642,0.6416642069816589
|
| 219 |
+
1750274504.567491,34742,0.6416237950325012
|
| 220 |
+
1750274623.7267442,34842,0.6420839428901672
|
| 221 |
+
1750274743.5394151,34942,0.6417628526687622
|
| 222 |
+
1750274862.203712,35042,0.6417536735534668
|
| 223 |
+
1750274984.318391,35142,0.6422046422958374
|
| 224 |
+
1750275103.568575,35242,0.6413713097572327
|
| 225 |
+
1750275220.138966,35342,0.6425900459289551
|
| 226 |
+
1750275337.941765,35442,0.6412622332572937
|
| 227 |
+
1750275456.814306,35542,0.6400894522666931
|
| 228 |
+
1750275574.98401,35642,0.6420097947120667
|
| 229 |
+
1750275692.1130471,35742,0.642514705657959
|
| 230 |
+
1750275809.5709782,35842,0.6417150497436523
|
| 231 |
+
1750275926.2575302,35942,0.6393572092056274
|
| 232 |
+
1750276045.36431,36042,0.6414693593978882
|
| 233 |
+
1750276164.4712372,36142,0.6419368982315063
|
| 234 |
+
1750276284.077454,36242,0.6385643482208252
|
| 235 |
+
1750276402.926815,36342,0.6410619020462036
|
| 236 |
+
1750276524.168512,36442,0.6432456970214844
|
| 237 |
+
1750276642.803875,36542,0.6412659287452698
|
| 238 |
+
1750276760.664118,36642,0.6388565897941589
|
| 239 |
+
1750276877.8197088,36742,0.6447322368621826
|
| 240 |
+
1750276992.52645,36842,0.6426060199737549
|
| 241 |
+
1750277110.440595,36942,0.6412352919578552
|
| 242 |
+
1750277227.4103959,37042,0.641943633556366
|
| 243 |
+
1750277344.953378,37142,0.6418511271476746
|
| 244 |
+
1750277460.669297,37242,0.6425992846488953
|
| 245 |
+
1750277576.59105,37342,0.6419374942779541
|
| 246 |
+
1750278326.4555688,37455,0.6453002691268921
|
| 247 |
+
1750278442.072955,37555,0.6434295177459717
|
| 248 |
+
1750278558.4055018,37655,0.6435667872428894
|
| 249 |
+
1750278676.229068,37755,0.6441090703010559
|
| 250 |
+
1750278794.5198379,37855,0.642072319984436
|
| 251 |
+
1750278915.6727328,37955,0.645028829574585
|
| 252 |
+
1750279040.8716538,38055,0.6462420225143433
|
| 253 |
+
1750279161.820596,38155,0.6431207060813904
|
| 254 |
+
1750279280.475289,38255,0.6431145668029785
|
| 255 |
+
1750279398.834913,38355,0.6438578367233276
|
| 256 |
+
1750279516.474643,38455,0.6430367827415466
|
| 257 |
+
1750279633.829937,38555,0.6428572535514832
|
| 258 |
+
1750279750.60494,38655,0.6425576210021973
|
| 259 |
+
1750279868.3665621,38755,0.6414080858230591
|
| 260 |
+
1750279986.88538,38855,0.6441813707351685
|
| 261 |
+
1750280105.688865,38955,0.6419466733932495
|
| 262 |
+
1750280222.4142048,39055,0.6438627243041992
|
| 263 |
+
1750280338.3421881,39155,0.642498791217804
|
| 264 |
+
1750280457.493066,39255,0.6440134644508362
|
| 265 |
+
1750280576.4949849,39355,0.642962634563446
|
| 266 |
+
1750280694.2205691,39455,0.642806351184845
|
| 267 |
+
1750280812.301379,39555,0.644475519657135
|
| 268 |
+
1750280937.08998,39655,0.6416617631912231
|
| 269 |
+
1750281055.1720982,39755,0.6445490121841431
|
| 270 |
+
1750281171.948852,39855,0.6422959566116333
|
| 271 |
+
1750281289.418745,39955,0.64436274766922
|
| 272 |
+
1750281407.81874,40055,0.6438021063804626
|
| 273 |
+
1750281526.818319,40155,0.6413707137107849
|
| 274 |
+
1750281644.899203,40255,0.6412806510925293
|
| 275 |
+
1750281762.475206,40355,0.6419981718063354
|
| 276 |
+
1750281878.6185431,40455,0.6406317353248596
|
| 277 |
+
1750282626.3456159,40568,0.6449540853500366
|
| 278 |
+
1750282752.616928,40668,0.6459221839904785
|
| 279 |
+
1750282877.317843,40768,0.6436997652053833
|
| 280 |
+
1750282998.107455,40868,0.645617663860321
|
| 281 |
+
1750283122.800135,40968,0.6464546322822571
|
| 282 |
+
1750283262.3346539,41068,0.6452726721763611
|
| 283 |
+
1750283403.827907,41168,0.6459258794784546
|
| 284 |
+
1750283524.509494,41268,0.6452248692512512
|
| 285 |
+
1750283645.3601458,41368,0.6442880034446716
|
| 286 |
+
1750283768.575882,41468,0.6420226693153381
|
| 287 |
+
1750283893.432749,41568,0.6444313526153564
|
| 288 |
+
1750284018.6755168,41668,0.6438676714897156
|
| 289 |
+
1750284142.309102,41768,0.6434436440467834
|
| 290 |
+
1750284261.0508292,41868,0.6446678638458252
|
| 291 |
+
1750284381.139601,41968,0.6443517208099365
|
| 292 |
+
1750284502.250894,42068,0.6439411640167236
|
| 293 |
+
1750284623.458782,42168,0.6429129838943481
|
| 294 |
+
1750284744.15576,42268,0.64682537317276
|
| 295 |
+
1750284865.5974588,42368,0.6460331082344055
|
| 296 |
+
1750284987.773657,42468,0.6442880034446716
|
| 297 |
+
1750285110.2447739,42568,0.644547164440155
|
| 298 |
+
1750285232.50309,42668,0.6433596611022949
|
| 299 |
+
1750285354.828066,42768,0.6451323628425598
|
| 300 |
+
1750285476.54096,42868,0.6434479355812073
|
| 301 |
+
1750285597.926975,42968,0.643908679485321
|
| 302 |
+
1750285719.3653271,43068,0.6420814990997314
|
| 303 |
+
1750285840.557452,43168,0.6439160704612732
|
| 304 |
+
1750285962.101805,43268,0.6433719396591187
|
| 305 |
+
1750286083.735996,43368,0.6440735459327698
|
| 306 |
+
1750286204.703545,43468,0.6429314017295837
|
| 307 |
+
1750286327.76542,43568,0.6424681544303894
|
| 308 |
+
1750287089.93405,43681,0.6471216082572937
|
| 309 |
+
1750287210.040575,43781,0.6467695832252502
|
| 310 |
+
1750287330.680011,43881,0.6448443531990051
|
| 311 |
+
1750287452.9146292,43981,0.6471734046936035
|
| 312 |
+
1750287574.203907,44081,0.6451905369758606
|
| 313 |
+
1750287695.6190531,44181,0.6456066370010376
|
| 314 |
+
1750287834.084157,44281,0.6454748511314392
|
| 315 |
+
1750287954.796106,44381,0.6448712944984436
|
| 316 |
+
1750288079.650344,44481,0.6475992798805237
|
| 317 |
+
1750288202.913589,44581,0.6458455920219421
|
| 318 |
+
1750288328.292566,44681,0.645825982093811
|
| 319 |
+
1750288458.77768,44781,0.6448125243186951
|
| 320 |
+
1750288579.276587,44881,0.6433265805244446
|
| 321 |
+
1750288700.053348,44981,0.6453033089637756
|
| 322 |
+
1750288823.757793,45081,0.6435851454734802
|
| 323 |
+
1750288947.5033479,45181,0.6452512145042419
|
| 324 |
+
1750289071.1844292,45281,0.6451635956764221
|
| 325 |
+
1750289194.838445,45381,0.6456127166748047
|
| 326 |
+
1750289318.976344,45481,0.6473155617713928
|
| 327 |
+
1750289442.564545,45581,0.6429858803749084
|
| 328 |
+
1750289565.735361,45681,0.6448370218276978
|
| 329 |
+
1750289688.623064,45781,0.6460006237030029
|
| 330 |
+
1750289811.3699021,45881,0.6455036997795105
|
| 331 |
+
1750289943.0644841,45981,0.6461666822433472
|
| 332 |
+
1750290079.773664,46081,0.646716296672821
|
| 333 |
+
1750290201.8962939,46181,0.6448621153831482
|
| 334 |
+
1750290322.202647,46281,0.6431930065155029
|
| 335 |
+
1750290445.3588831,46381,0.6445759534835815
|
| 336 |
+
1750290568.215271,46481,0.6440943479537964
|
| 337 |
+
1750290688.385665,46581,0.6445428729057312
|
| 338 |
+
1750290808.880954,46681,0.6441415548324585
|
| 339 |
+
1750291578.4776902,46794,0.6457090973854065
|
| 340 |
+
1750291698.370927,46894,0.6475306153297424
|
| 341 |
+
1750291817.165154,46994,0.6468952298164368
|
| 342 |
+
1750291937.1691918,47094,0.6463872790336609
|
| 343 |
+
1750292057.929079,47194,0.6450998783111572
|
| 344 |
+
1750292178.0917802,47294,0.6454583406448364
|
| 345 |
+
1750292303.482858,47394,0.6467616558074951
|
| 346 |
+
1750292423.760905,47494,0.6482242345809937
|
| 347 |
+
1750292542.308008,47594,0.6451581120491028
|
| 348 |
+
1750292660.2191942,47694,0.6488652229309082
|
| 349 |
+
1750292777.5716531,47794,0.6461954712867737
|
| 350 |
+
1750292896.2476,47894,0.6477683782577515
|
| 351 |
+
1750293016.009115,47994,0.6469166874885559
|
| 352 |
+
1750293138.117504,48094,0.6451562643051147
|
| 353 |
+
1750293258.1256971,48194,0.6462162733078003
|
| 354 |
+
1750293377.4574468,48294,0.643597424030304
|
| 355 |
+
1750293496.099553,48394,0.6470036506652832
|
| 356 |
+
1750293618.36915,48494,0.6463841795921326
|
| 357 |
+
1750293752.436372,48594,0.6448590755462646
|
| 358 |
+
1750293917.4711082,48694,0.6454522013664246
|
| 359 |
+
1750294042.4210508,48794,0.6457377672195435
|
| 360 |
+
1750294162.099357,48894,0.6451789140701294
|
| 361 |
+
1750294280.357276,48994,0.6460698246955872
|
| 362 |
+
1750294398.708734,49094,0.6460833549499512
|
| 363 |
+
1750294516.704125,49194,0.6478798985481262
|
| 364 |
+
1750294637.669656,49294,0.6450986266136169
|
| 365 |
+
1750294779.062816,49394,0.6479822397232056
|
| 366 |
+
1750294917.642625,49494,0.6437861323356628
|
| 367 |
+
1750295041.37166,49594,0.6425294280052185
|
| 368 |
+
1750295159.417851,49694,0.6448572278022766
|
| 369 |
+
1750295277.586646,49794,0.646308183670044
|
| 370 |
+
1750296054.6071198,49907,0.6466804146766663
|
| 371 |
+
1750296177.2352269,50007,0.6479687690734863
|
| 372 |
+
1750296300.8120399,50107,0.6481225490570068
|
| 373 |
+
1750296424.595592,50207,0.648354172706604
|
| 374 |
+
1750296547.974368,50307,0.6471893191337585
|
| 375 |
+
1750296676.166361,50407,0.6477347016334534
|
| 376 |
+
1750296814.415308,50507,0.6458235383033752
|
| 377 |
+
1750296945.931654,50607,0.6469044089317322
|
| 378 |
+
1750297075.295464,50707,0.6463817358016968
|
| 379 |
+
1750297198.356979,50807,0.6474558711051941
|
| 380 |
+
1750297341.4102569,50907,0.647463858127594
|
| 381 |
+
1750297467.1187751,51007,0.6465808749198914
|
| 382 |
+
1750297587.6885471,51107,0.6463106870651245
|
| 383 |
+
1750297710.908989,51207,0.648296594619751
|
| 384 |
+
1750297835.818051,51307,0.647297203540802
|
| 385 |
+
1750297961.3280158,51407,0.6460373997688293
|
| 386 |
+
1750298087.4548569,51507,0.6471868753433228
|
| 387 |
+
1750298213.454858,51607,0.6428700685501099
|
| 388 |
+
1750298339.8951528,51707,0.6474828720092773
|
| 389 |
+
1750298466.7554429,51807,0.6478608846664429
|
| 390 |
+
1750298597.17128,51907,0.6451127529144287
|
| 391 |
+
1750298732.562523,52007,0.6464699506759644
|
| 392 |
+
1750298867.001442,52107,0.6459650993347168
|
| 393 |
+
1750298995.653795,52207,0.6456948518753052
|
| 394 |
+
1750299113.526079,52307,0.6475864052772522
|
| 395 |
+
1750299232.773921,52407,0.6468786597251892
|
| 396 |
+
1750299354.860131,52507,0.646700382232666
|
| 397 |
+
1750299478.810007,52607,0.6463584303855896
|
| 398 |
+
1750299603.790758,52707,0.6460520625114441
|
| 399 |
+
1750299728.0330188,52807,0.6452640891075134
|
| 400 |
+
1750299852.972244,52907,0.6472334265708923
|
| 401 |
+
1750300627.761337,53020,0.6483761668205261
|
| 402 |
+
1750300753.319436,53120,0.6485379934310913
|
| 403 |
+
1750300894.5988622,53220,0.6470998525619507
|
| 404 |
+
1750301031.386137,53320,0.6505416631698608
|
| 405 |
+
1750301153.8477619,53420,0.6486967206001282
|
| 406 |
+
1750301277.1732311,53520,0.6491133570671082
|
| 407 |
+
1750301402.075006,53620,0.6442438960075378
|
| 408 |
+
1750301520.450825,53720,0.6487236618995667
|
| 409 |
+
1750301639.662542,53820,0.6463835835456848
|
| 410 |
+
1750301760.182339,53920,0.6470925211906433
|
| 411 |
+
1750301880.992494,54020,0.6464883685112
|
| 412 |
+
1750302001.83247,54120,0.6488180160522461
|
| 413 |
+
1750302123.260853,54220,0.6466305255889893
|
| 414 |
+
1750302244.690379,54320,0.6463456153869629
|
| 415 |
+
1750302365.994741,54420,0.6472904682159424
|
| 416 |
+
1750302488.034341,54520,0.6458161473274231
|
| 417 |
+
1750302609.855891,54620,0.6480042934417725
|
| 418 |
+
1750302731.501794,54720,0.6475238800048828
|
| 419 |
+
1750302853.191099,54820,0.6470471620559692
|
| 420 |
+
1750302975.518531,54920,0.6460570096969604
|
| 421 |
+
1750303098.421973,55020,0.6483272314071655
|
| 422 |
+
1750303220.513244,55120,0.6465263366699219
|
| 423 |
+
1750303342.572195,55220,0.647531270980835
|
| 424 |
+
1750303464.636217,55320,0.6481985449790955
|
| 425 |
+
1750303586.779646,55420,0.6472640633583069
|
| 426 |
+
1750303709.085526,55520,0.6467304229736328
|
| 427 |
+
1750303830.806071,55620,0.6458835601806641
|
| 428 |
+
1750303952.466469,55720,0.6468210816383362
|
| 429 |
+
1750304073.741874,55820,0.6471930146217346
|
| 430 |
+
1750304195.127176,55920,0.6458204388618469
|
| 431 |
+
1750304316.408163,56020,0.6477947235107422
|
| 432 |
+
1750305082.660233,56133,0.648921549320221
|
| 433 |
+
1750305203.568924,56233,0.6467108130455017
|
| 434 |
+
1750305325.488572,56333,0.6503989100456238
|
| 435 |
+
1750305447.1351511,56433,0.6502788066864014
|
| 436 |
+
1750305568.840065,56533,0.6482714414596558
|
| 437 |
+
1750305690.542845,56633,0.649355411529541
|
| 438 |
+
1750305812.181129,56733,0.6499350666999817
|
| 439 |
+
1750305933.855696,56833,0.6482751369476318
|
| 440 |
+
1750306055.506382,56933,0.6461532115936279
|
| 441 |
+
1750306176.7709591,57033,0.6472175121307373
|
| 442 |
+
1750306297.884476,57133,0.6478897333145142
|
| 443 |
+
1750306419.401044,57233,0.646894633769989
|
| 444 |
+
1750306541.605247,57333,0.6485876441001892
|
| 445 |
+
1750306663.8581388,57433,0.6478548049926758
|
| 446 |
+
1750306787.340354,57533,0.6477579474449158
|
| 447 |
+
1750306909.9931881,57633,0.6465287804603577
|
| 448 |
+
1750307032.35187,57733,0.6477022171020508
|
| 449 |
+
1750307154.564481,57833,0.6472230553627014
|
| 450 |
+
1750307276.835264,57933,0.6498976945877075
|
| 451 |
+
1750307398.493126,58033,0.6464962959289551
|
| 452 |
+
1750307520.847733,58133,0.6489521861076355
|
| 453 |
+
1750307642.977959,58233,0.6469656825065613
|
| 454 |
+
1750307764.848602,58333,0.6475974321365356
|
| 455 |
+
1750307886.5466619,58433,0.6472101807594299
|
| 456 |
+
1750308008.096926,58533,0.646530032157898
|
| 457 |
+
1750308129.91144,58633,0.6472922563552856
|
| 458 |
+
1750308251.5830579,58733,0.6473553776741028
|
| 459 |
+
1750308372.8975759,58833,0.6462971568107605
|
| 460 |
+
1750308494.214208,58933,0.6466912031173706
|
| 461 |
+
1750308615.9806452,59033,0.6474724411964417
|
| 462 |
+
1750308742.1111948,59133,0.6464491486549377
|
| 463 |
+
1750309504.211555,59246,0.6506167650222778
|
| 464 |
+
1750309625.168406,59346,0.6489228010177612
|
| 465 |
+
1750309746.635493,59446,0.6505894660949707
|
| 466 |
+
1750309868.457798,59546,0.6502009630203247
|
| 467 |
+
1750309989.681639,59646,0.6506121158599854
|
| 468 |
+
1750310110.474976,59746,0.6477597951889038
|
| 469 |
+
1750310232.681849,59846,0.6491525769233704
|
| 470 |
+
1750310354.60706,59946,0.6493057608604431
|
| 471 |
+
1750310476.456109,60046,0.6474987864494324
|
| 472 |
+
1750310598.108342,60146,0.6481243968009949
|
| 473 |
+
1750310719.631812,60246,0.6481391191482544
|
| 474 |
+
1750310841.0724092,60346,0.6478572487831116
|
| 475 |
+
1750310962.477639,60446,0.6478658318519592
|
| 476 |
+
1750311084.32421,60546,0.6487451195716858
|
| 477 |
+
1750311206.1186502,60646,0.6478290557861328
|
| 478 |
+
1750311327.464293,60746,0.6479527950286865
|
| 479 |
+
1750311448.99296,60846,0.6507965922355652
|
| 480 |
+
1750311570.703797,60946,0.6489418148994446
|
| 481 |
+
1750311692.3788621,61046,0.6471636295318604
|
| 482 |
+
1750311813.798603,61146,0.6461752653121948
|
| 483 |
+
1750311935.3186,61246,0.6480496525764465
|
| 484 |
+
1750312056.882661,61346,0.6456611752510071
|
| 485 |
+
1750312178.2939782,61446,0.6465116739273071
|
| 486 |
+
1750312299.8791208,61546,0.6471225619316101
|
| 487 |
+
1750312421.214797,61646,0.6492395997047424
|
| 488 |
+
1750312543.195411,61746,0.6480588316917419
|
| 489 |
+
1750312670.233453,61846,0.6464307308197021
|
| 490 |
+
1750312793.940044,61946,0.6481586694717407
|
| 491 |
+
1750312915.588005,62046,0.6480116248130798
|
| 492 |
+
1750313037.027286,62146,0.6466923952102661
|
| 493 |
+
1750313158.1939578,62246,0.6475116610527039
|
| 494 |
+
1750313905.375668,62359,0.6485812664031982
|
| 495 |
+
1750314025.037591,62459,0.6484589576721191
|
| 496 |
+
1750314145.371774,62559,0.6488749980926514
|
| 497 |
+
1750314266.599691,62659,0.6495226621627808
|
| 498 |
+
1750314387.2132962,62759,0.6508517265319824
|
| 499 |
+
1750314507.7024288,62859,0.6497665643692017
|
| 500 |
+
1750314628.189686,62959,0.6516213417053223
|
| 501 |
+
1750314748.146301,63059,0.6487261056900024
|
| 502 |
+
1750314868.358367,63159,0.6510030627250671
|
| 503 |
+
1750314988.5014348,63259,0.6490833163261414
|
| 504 |
+
1750315108.526455,63359,0.6481225490570068
|
| 505 |
+
1750315228.3911822,63459,0.6478216648101807
|
| 506 |
+
1750315348.215651,63559,0.6495686173439026
|
| 507 |
+
1750315468.131627,63659,0.6491384506225586
|
| 508 |
+
1750315587.960984,63759,0.6480790376663208
|
| 509 |
+
1750315707.893823,63859,0.6477401852607727
|
| 510 |
+
1750315827.722427,63959,0.6481207013130188
|
| 511 |
+
1750315947.566299,64059,0.6498235464096069
|
| 512 |
+
1750316067.855606,64159,0.6486047506332397
|
| 513 |
+
1750316187.8744562,64259,0.6477928757667542
|
| 514 |
+
1750316307.531418,64359,0.6452211737632751
|
| 515 |
+
1750316427.517174,64459,0.648481011390686
|
| 516 |
+
1750316550.5545862,64559,0.649202823638916
|
| 517 |
+
1750316672.624807,64659,0.6461697220802307
|
| 518 |
+
1750316792.095057,64759,0.6472800374031067
|
| 519 |
+
1750316911.428381,64859,0.6484797596931458
|
| 520 |
+
1750317030.673044,64959,0.6474190950393677
|
| 521 |
+
1750317150.065619,65059,0.6485551595687866
|
| 522 |
+
1750317269.4576561,65159,0.6473284363746643
|
| 523 |
+
1750317388.62231,65259,0.6489478945732117
|
| 524 |
+
1750317510.425743,65359,0.6498370170593262
|
| 525 |
+
1750318246.259418,65472,0.6497201919555664
|
| 526 |
+
1750318364.616058,65572,0.651941180229187
|
| 527 |
+
1750318483.184418,65672,0.6507928967475891
|
| 528 |
+
1750318602.0962481,65772,0.6518989205360413
|
| 529 |
+
1750318720.829149,65872,0.6487677693367004
|
| 530 |
+
1750318839.9066281,65972,0.6484093070030212
|
| 531 |
+
1750318958.5665972,66072,0.6493566036224365
|
| 532 |
+
1750319077.469098,66172,0.6498039364814758
|
| 533 |
+
1750319196.1992152,66272,0.6500980257987976
|
| 534 |
+
1750319315.074125,66372,0.6488897204399109
|
| 535 |
+
1750319433.748579,66472,0.6503884792327881
|
| 536 |
+
1750319552.196927,66572,0.6489307880401611
|
| 537 |
+
1750319671.013653,66672,0.6499950885772705
|
| 538 |
+
1750319789.528258,66772,0.6480569839477539
|
| 539 |
+
1750319908.035121,66872,0.6486513614654541
|
| 540 |
+
1750320026.431205,66972,0.648715078830719
|
| 541 |
+
1750320144.7621481,67072,0.6486801505088806
|
| 542 |
+
1750320263.0357592,67172,0.6503651738166809
|
| 543 |
+
1750320381.3532522,67272,0.6496090888977051
|
| 544 |
+
1750320500.5613499,67372,0.6467726826667786
|
| 545 |
+
1750320623.54901,67472,0.6477181315422058
|
| 546 |
+
1750320741.795747,67572,0.6472892165184021
|
| 547 |
+
1750320859.855903,67672,0.6498854160308838
|
| 548 |
+
1750320978.003171,67772,0.6488057374954224
|
| 549 |
+
1750321098.499806,67872,0.6470992565155029
|
| 550 |
+
1750321216.814155,67972,0.6476672887802124
|
| 551 |
+
1750321335.143292,68072,0.6499993801116943
|
| 552 |
+
1750321453.319468,68172,0.6484957337379456
|
| 553 |
+
1750321571.501225,68272,0.6466611623764038
|
| 554 |
+
1750321690.132703,68372,0.6476348042488098
|
| 555 |
+
1750321808.66078,68472,0.6482536792755127
|
| 556 |
+
1750322547.664128,68585,0.6493887305259705
|
| 557 |
+
1750322664.425544,68685,0.6517763733863831
|
| 558 |
+
1750322782.043474,68785,0.6519050002098083
|
| 559 |
+
1750322899.7543712,68885,0.6515141129493713
|
| 560 |
+
1750323017.3889458,68985,0.649075984954834
|
| 561 |
+
1750323134.995938,69085,0.6505410671234131
|
| 562 |
+
1750323253.0190132,69185,0.6498510837554932
|
| 563 |
+
1750323370.775999,69285,0.6473155617713928
|
| 564 |
+
1750323488.555377,69385,0.6509785652160645
|
| 565 |
+
1750323606.278355,69485,0.6489037871360779
|
| 566 |
+
1750323724.203748,69585,0.6510263681411743
|
| 567 |
+
1750323841.933225,69685,0.648824155330658
|
| 568 |
+
1750323960.079792,69785,0.6503823399543762
|
| 569 |
+
1750324078.064715,69885,0.6487776041030884
|
| 570 |
+
1750324195.7457888,69985,0.6502567529678345
|
| 571 |
+
1750324313.406649,70085,0.6468223333358765
|
| 572 |
+
1750324431.42479,70185,0.6466960906982422
|
| 573 |
+
1750324550.94887,70285,0.6497089266777039
|
| 574 |
+
1750324675.2589598,70385,0.6486838459968567
|
| 575 |
+
1750324793.819502,70485,0.6489037871360779
|
| 576 |
+
1750324911.8261251,70585,0.6481801271438599
|
| 577 |
+
1750325030.1411479,70685,0.6487058997154236
|
| 578 |
+
1750325148.42432,70785,0.64988112449646
|
| 579 |
+
1750325266.557858,70885,0.6515821218490601
|
| 580 |
+
1750325384.489578,70985,0.6467652916908264
|
| 581 |
+
1750325502.346626,71085,0.6488394737243652
|
| 582 |
+
1750325620.054972,71185,0.650479793548584
|
| 583 |
+
1750325737.303225,71285,0.647771418094635
|
| 584 |
+
1750325854.335532,71385,0.6492316126823425
|
| 585 |
+
1750325971.239757,71485,0.6474711894989014
|
| 586 |
+
1750326088.271001,71585,0.6487935185432434
|
| 587 |
+
1750326816.132333,71698,0.6502522230148315
|
| 588 |
+
1750326933.194338,71798,0.6512677669525146
|
| 589 |
+
1750327050.175751,71898,0.6514301300048828
|
| 590 |
+
1750327167.495158,71998,0.6498296856880188
|
| 591 |
+
1750327284.9254432,72098,0.6511623859405518
|
| 592 |
+
1750327402.3306282,72198,0.6518124938011169
|
| 593 |
+
1750327519.8128698,72298,0.6498884558677673
|
| 594 |
+
1750327637.1756642,72398,0.6509025692939758
|
| 595 |
+
1750327754.545989,72498,0.6492757201194763
|
| 596 |
+
1750327872.0523992,72598,0.650468111038208
|
| 597 |
+
1750327989.467781,72698,0.6497953534126282
|
| 598 |
+
1750328106.91109,72798,0.6502175331115723
|
| 599 |
+
1750328224.329986,72898,0.6474730372428894
|
| 600 |
+
1750328343.594594,72998,0.6517916917800903
|
| 601 |
+
1750328461.7706828,73098,0.6490668058395386
|
| 602 |
+
1750328584.434108,73198,0.6493229269981384
|
| 603 |
+
1750328702.7211268,73298,0.6505508422851562
|
| 604 |
+
1750328820.1412349,73398,0.650176465511322
|
| 605 |
+
1750328937.665895,73498,0.6481127738952637
|
| 606 |
+
1750329055.284807,73598,0.6477389931678772
|
| 607 |
+
1750329172.769314,73698,0.649357259273529
|
| 608 |
+
1750329289.9146729,73798,0.6503124833106995
|
| 609 |
+
1750329407.0319202,73898,0.6481776833534241
|
| 610 |
+
1750329524.153242,73998,0.6477175354957581
|
| 611 |
+
1750329641.224229,74098,0.6486396789550781
|
| 612 |
+
1750329757.8959308,74198,0.6504963040351868
|
| 613 |
+
1750329874.259397,74298,0.6490269303321838
|
| 614 |
+
1750329990.60547,74398,0.6488014459609985
|
| 615 |
+
1750330108.3216069,74498,0.6469368934631348
|
| 616 |
+
1750330225.5629098,74598,0.649093747138977
|
| 617 |
+
1750330342.624664,74698,0.6504252552986145
|
| 618 |
+
1750331068.310646,74811,0.6512323617935181
|
| 619 |
+
1750331184.909408,74911,0.6507211923599243
|
| 620 |
+
1750331301.768451,75011,0.6504889726638794
|
| 621 |
+
1750331418.8345752,75111,0.6531997323036194
|
| 622 |
+
1750331535.65965,75211,0.6511715650558472
|
| 623 |
+
1750331652.5237691,75311,0.6501777172088623
|
| 624 |
+
1750331769.310387,75411,0.6503259539604187
|
| 625 |
+
1750331886.0936131,75511,0.650214433670044
|
| 626 |
+
1750332004.929323,75611,0.6505091786384583
|
| 627 |
+
1750332121.9415019,75711,0.6511041522026062
|
| 628 |
+
1750332238.997463,75811,0.6510563492774963
|
| 629 |
+
1750332356.059675,75911,0.6478394865989685
|
| 630 |
+
1750332473.099851,76011,0.6509203314781189
|
| 631 |
+
1750332592.230027,76111,0.6497402191162109
|
| 632 |
+
1750332716.068458,76211,0.6508136987686157
|
| 633 |
+
1750332834.6589582,76311,0.6482028365135193
|
| 634 |
+
1750332951.37817,76411,0.6488333344459534
|
| 635 |
+
1750333068.0262249,76511,0.6487273573875427
|
| 636 |
+
1750333184.994744,76611,0.6512953639030457
|
| 637 |
+
1750333301.572347,76711,0.6484797596931458
|
| 638 |
+
1750333418.234983,76811,0.650410532951355
|
| 639 |
+
1750333534.869468,76911,0.6503063440322876
|
| 640 |
+
1750333651.2686841,77011,0.6505821347236633
|
| 641 |
+
1750333767.357428,77111,0.6473523378372192
|
| 642 |
+
1750333883.067544,77211,0.6503553986549377
|
| 643 |
+
1750333999.2657301,77311,0.6490692496299744
|
| 644 |
+
1750334116.1577961,77411,0.6505526900291443
|
| 645 |
+
1750334232.3563468,77511,0.6493333578109741
|
| 646 |
+
1750334348.514185,77611,0.6487787961959839
|
| 647 |
+
1750334464.8386402,77711,0.6489228010177612
|
| 648 |
+
1750334581.1770968,77811,0.648578405380249
|
| 649 |
+
1750335305.515574,77924,0.6500704288482666
|
| 650 |
+
1750335421.451164,78024,0.6515514850616455
|
| 651 |
+
1750335539.5966039,78124,0.6511317491531372
|
| 652 |
+
1750335657.7199922,78224,0.6509822010993958
|
| 653 |
+
1750335775.054247,78324,0.6518909335136414
|
| 654 |
+
1750335891.6804101,78424,0.6532628536224365
|
| 655 |
+
1750336008.170381,78524,0.6509877443313599
|
| 656 |
+
1750336124.7923908,78624,0.6499687433242798
|
| 657 |
+
1750336241.045173,78724,0.6491813659667969
|
| 658 |
+
1750336357.363477,78824,0.6493700742721558
|
| 659 |
+
1750336473.563044,78924,0.6500067114830017
|
| 660 |
+
1750336592.577394,79024,0.6502812504768372
|
| 661 |
+
1750336710.247206,79124,0.6505931615829468
|
| 662 |
+
1750336826.520014,79224,0.6503744125366211
|
| 663 |
+
1750336942.538775,79324,0.6512524485588074
|
| 664 |
+
1750337058.567289,79424,0.6504086852073669
|
| 665 |
+
1750337174.580393,79524,0.6492910385131836
|
| 666 |
+
1750337290.514969,79624,0.647658109664917
|
| 667 |
+
1750337406.387496,79724,0.649397075176239
|
| 668 |
+
1750337522.300965,79824,0.6499987840652466
|
| 669 |
+
1750337637.9587681,79924,0.6483517289161682
|
| 670 |
+
1750337753.6101792,80024,0.6483290195465088
|
| 671 |
+
1750337868.836907,80124,0.6477181315422058
|
| 672 |
+
1750337984.170497,80224,0.6507726907730103
|
| 673 |
+
1750338099.979442,80324,0.6489865183830261
|
| 674 |
+
1750338215.673398,80424,0.6506593227386475
|
| 675 |
+
1750338331.3641798,80524,0.6513970494270325
|
| 676 |
+
1750338447.094371,80624,0.6503664255142212
|
| 677 |
+
1750338563.121328,80724,0.6507107615470886
|
| 678 |
+
1750338678.824536,80824,0.6490545272827148
|
| 679 |
+
1750338794.5569391,80924,0.649645209312439
|
| 680 |
+
1750339515.51679,81037,0.6530735492706299
|
| 681 |
+
1750339631.1298602,81137,0.651217520236969
|
| 682 |
+
1750339746.8785229,81237,0.6502726674079895
|
| 683 |
+
1750339862.773015,81337,0.6523235440254211
|
| 684 |
+
1750339979.4091032,81437,0.6500030755996704
|
| 685 |
+
1750340095.093813,81537,0.650927722454071
|
| 686 |
+
1750340210.6843278,81637,0.6517083048820496
|
| 687 |
+
1750340326.337384,81737,0.652924656867981
|
| 688 |
+
1750340442.3833878,81837,0.6513204574584961
|
| 689 |
+
1750340561.534982,81937,0.6520097851753235
|
| 690 |
+
1750340680.117114,82037,0.6500189900398254
|
| 691 |
+
1750340796.1884549,82137,0.6520116329193115
|
| 692 |
+
1750340911.915088,82237,0.6495036482810974
|
| 693 |
+
1750341027.543648,82337,0.6498492360115051
|
| 694 |
+
1750341143.395276,82437,0.650689959526062
|
| 695 |
+
1750341259.559998,82537,0.6496433615684509
|
| 696 |
+
1750341375.434209,82637,0.6507408022880554
|
| 697 |
+
1750341490.948511,82737,0.6499375104904175
|
| 698 |
+
1750341606.710141,82837,0.6490784287452698
|
| 699 |
+
1750341721.814566,82937,0.6491348147392273
|
| 700 |
+
1750341837.331264,83037,0.648245096206665
|
| 701 |
+
1750341953.211265,83137,0.6513798832893372
|
| 702 |
+
1750342069.0807161,83237,0.6488406658172607
|
| 703 |
+
1750342185.1426518,83337,0.6499031782150269
|
| 704 |
+
1750342300.783836,83437,0.6506298780441284
|
| 705 |
+
1750342416.592161,83537,0.6487769484519958
|
| 706 |
+
1750342532.407535,83637,0.650630533695221
|
| 707 |
+
1750342648.207677,83737,0.6485833525657654
|
| 708 |
+
1750342765.009377,83837,0.6488823294639587
|
| 709 |
+
1750342881.701005,83937,0.6510103940963745
|
| 710 |
+
1750342997.716446,84037,0.6501452326774597
|
| 711 |
+
1750343718.065668,84150,0.6516106724739075
|
| 712 |
+
1750343833.633333,84250,0.650243878364563
|
| 713 |
+
1750343949.533427,84350,0.650896430015564
|
| 714 |
+
1750344065.720902,84450,0.6520116329193115
|
| 715 |
+
1750344182.2826722,84550,0.651564359664917
|
| 716 |
+
1750344298.61443,84650,0.6520177721977234
|
| 717 |
+
1750344414.591867,84750,0.6516109108924866
|
| 718 |
+
1750344532.102746,84850,0.6519338488578796
|
| 719 |
+
1750344653.871885,84950,0.6527591943740845
|
| 720 |
+
1750344767.572968,85050,0.6507757306098938
|
| 721 |
+
1750344892.887903,85150,0.6527095437049866
|
| 722 |
+
1750379136.628101,85250,0.653421938419342
|
| 723 |
+
1750379250.96206,85350,0.650648295879364
|
| 724 |
+
1750379367.015548,85450,0.6494423747062683
|
| 725 |
+
1750379481.0428228,85550,0.6492549180984497
|
| 726 |
+
1750379593.498946,85650,0.6499558687210083
|
| 727 |
+
1750379705.965035,85750,0.6501905918121338
|
| 728 |
+
1750379820.272479,85850,0.6511335968971252
|
| 729 |
+
1750379936.0781498,85950,0.6478216648101807
|
| 730 |
+
1750380053.3225842,86050,0.6508284211158752
|
| 731 |
+
1750380171.982768,86150,0.648435652256012
|
| 732 |
+
1750380291.6105971,86250,0.6498792767524719
|
| 733 |
+
1750380411.511211,86350,0.6504166722297668
|
| 734 |
+
1750380533.9704819,86450,0.6495949625968933
|
| 735 |
+
1750380659.838042,86550,0.6476697325706482
|
| 736 |
+
1750380777.568034,86650,0.6459124088287354
|
| 737 |
+
1750380893.660362,86750,0.648826003074646
|
| 738 |
+
1750381010.8516119,86850,0.6514846682548523
|
| 739 |
+
1750381130.6362581,86950,0.6510809063911438
|
| 740 |
+
1750381252.80625,87050,0.6502947211265564
|
| 741 |
+
1750381373.937963,87150,0.6487561464309692
|
| 742 |
+
1750382120.180901,87263,0.6513592600822449
|
| 743 |
+
1750382235.866944,87363,0.6503798961639404
|
| 744 |
+
1750382365.523697,87463,0.6513124704360962
|
| 745 |
+
1750382491.619613,87563,0.6521127223968506
|
| 746 |
+
1750382614.830884,87663,0.6514797806739807
|
| 747 |
+
1750382738.181688,87763,0.6523486375808716
|
| 748 |
+
1750382858.773399,87863,0.6495551466941833
|
| 749 |
+
1750382979.250171,87963,0.6515551209449768
|
| 750 |
+
1750383101.317154,88063,0.6511887311935425
|
| 751 |
+
1750383223.180498,88163,0.6504589319229126
|
| 752 |
+
1750383346.266277,88263,0.6508162021636963
|
| 753 |
+
1750383475.137713,88363,0.6480140686035156
|
| 754 |
+
1750383602.4944682,88463,0.6508939862251282
|
| 755 |
+
1750383731.6209762,88563,0.6486960649490356
|
| 756 |
+
1750383862.741338,88663,0.6508578658103943
|
| 757 |
+
1750383990.9452841,88763,0.6488032937049866
|
| 758 |
+
1750384123.171714,88863,0.6506158113479614
|
| 759 |
+
1750384254.607017,88963,0.6502849459648132
|
| 760 |
+
1750384386.85204,89063,0.6513688564300537
|
| 761 |
+
1750384520.881977,89163,0.6468964219093323
|
| 762 |
+
1750384654.052042,89263,0.6504650712013245
|
| 763 |
+
1750384786.019552,89363,0.6480398178100586
|
| 764 |
+
1750384917.7445939,89463,0.6507499814033508
|
| 765 |
+
1750385049.460108,89563,0.6491605639457703
|
| 766 |
+
1750385185.126712,89663,0.6488234996795654
|
| 767 |
+
1750385320.041018,89763,0.6487879753112793
|
| 768 |
+
1750385454.420981,89863,0.6473633646965027
|
| 769 |
+
1750385587.665528,89963,0.648159921169281
|
| 770 |
+
1750385721.327701,90063,0.6492407917976379
|
| 771 |
+
1750385853.920752,90163,0.6485766172409058
|
| 772 |
+
1750385984.836493,90263,0.6495043039321899
|
| 773 |
+
1750386771.1338828,90376,0.6527379751205444
|
| 774 |
+
1750386891.5614269,90476,0.6509515643119812
|
| 775 |
+
1750387012.4985468,90576,0.6514166593551636
|
| 776 |
+
1750387133.300371,90676,0.6527132391929626
|
| 777 |
+
1750387253.684694,90776,0.6516789197921753
|
| 778 |
+
1750387374.3121002,90876,0.6511207222938538
|
| 779 |
+
1750387495.17602,90976,0.6511115431785583
|
| 780 |
+
1750387616.024838,91076,0.6512408256530762
|
| 781 |
+
1750387737.2461271,91176,0.6528302431106567
|
| 782 |
+
1750387858.791801,91276,0.6500024795532227
|
| 783 |
+
1750387980.692109,91376,0.6507279276847839
|
| 784 |
+
1750388103.426921,91476,0.6507695913314819
|
| 785 |
+
1750388226.078679,91576,0.6498388648033142
|
| 786 |
+
1750388348.178328,91676,0.6508400440216064
|
| 787 |
+
1750388469.837296,91776,0.6519038081169128
|
| 788 |
+
1750388591.572599,91876,0.6501691341400146
|
| 789 |
+
1750388712.8626308,91976,0.6503039002418518
|
| 790 |
+
1750388841.8957708,92076,0.6493204832077026
|
| 791 |
+
1750388972.115207,92176,0.6491703391075134
|
| 792 |
+
1750389098.6719759,92276,0.6496703624725342
|
| 793 |
+
1750389223.995049,92376,0.6493363976478577
|
| 794 |
+
1750389351.848442,92476,0.6512628793716431
|
| 795 |
+
1750389476.405678,92576,0.6492487788200378
|
| 796 |
+
1750389601.319693,92676,0.650007963180542
|
| 797 |
+
1750389726.894148,92776,0.6495955586433411
|
| 798 |
+
1750389850.07146,92876,0.6499528288841248
|
| 799 |
+
1750389972.1826189,92976,0.6504693627357483
|
| 800 |
+
1750390090.092722,93076,0.6502230167388916
|
| 801 |
+
1750390205.974981,93176,0.651092529296875
|
| 802 |
+
1750390323.627625,93276,0.6483596563339233
|
| 803 |
+
1750390442.0130482,93376,0.6499479413032532
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749977364.335526,99,0.28588664531707764
|
| 3 |
+
1749977479.028688,199,0.3745232820510864
|
| 4 |
+
1749977593.705034,299,0.4085974395275116
|
| 5 |
+
1749977708.9612758,399,0.43084007501602173
|
| 6 |
+
1749977826.4679549,499,0.449031263589859
|
| 7 |
+
1749977943.257545,599,0.4616672694683075
|
| 8 |
+
1749981000.7470999,722,0.4902438521385193
|
| 9 |
+
1749981116.730113,822,0.5027604103088379
|
| 10 |
+
1749981231.429653,922,0.5114178657531738
|
| 11 |
+
1749981346.264277,1022,0.520479142665863
|
| 12 |
+
1749981460.89554,1122,0.5261605381965637
|
| 13 |
+
1749981575.62288,1222,0.5330343246459961
|
| 14 |
+
1749984660.365978,2591,0.5397835373878479
|
| 15 |
+
1749984773.941619,2691,0.5454724431037903
|
| 16 |
+
1749984888.513812,2791,0.5498265624046326
|
| 17 |
+
1749985002.865367,2891,0.5563768148422241
|
| 18 |
+
1749985117.15324,2991,0.5628455877304077
|
| 19 |
+
1749985231.433402,3091,0.5667720437049866
|
| 20 |
+
1749985345.998219,3191,0.569880485534668
|
| 21 |
+
1749985460.906912,3291,0.5726421475410461
|
| 22 |
+
1749985575.3593469,3391,0.5760772228240967
|
| 23 |
+
1749985691.7491999,3491,0.5811237692832947
|
| 24 |
+
1749985815.587678,3591,0.580393373966217
|
| 25 |
+
1749985929.7506618,3691,0.5871672630310059
|
| 26 |
+
1749989010.094342,3837,0.5913770794868469
|
| 27 |
+
1749989123.943269,3937,0.5954620242118835
|
| 28 |
+
1749989238.09817,4037,0.5997714400291443
|
| 29 |
+
1749989352.3528981,4137,0.6012462973594666
|
| 30 |
+
1749989466.4953432,4237,0.6006200909614563
|
| 31 |
+
1749989580.637305,4337,0.6004209518432617
|
| 32 |
+
1749989694.765909,4437,0.6052303910255432
|
| 33 |
+
1749989818.2607958,4537,0.6040643453598022
|
| 34 |
+
1749989942.487204,4637,0.6051568388938904
|
| 35 |
+
1749990056.146515,4737,0.6058547496795654
|
| 36 |
+
1749990169.766712,4837,0.6068351864814758
|
| 37 |
+
1749990283.4751868,4937,0.6084963083267212
|
| 38 |
+
1749993352.9395578,7571,0.6084688901901245
|
| 39 |
+
1749993466.648562,7671,0.6101813912391663
|
| 40 |
+
1749993580.709169,7771,0.6113382577896118
|
| 41 |
+
1749993694.6656058,7871,0.6122714281082153
|
| 42 |
+
1749993815.172954,7971,0.6128994822502136
|
| 43 |
+
1749993930.671676,8071,0.6156709790229797
|
| 44 |
+
1749994044.35568,8171,0.6157383322715759
|
| 45 |
+
1749994158.1081998,8271,0.6165416836738586
|
| 46 |
+
1749994272.019066,8371,0.6152009963989258
|
| 47 |
+
1749994385.881603,8471,0.6155833601951599
|
| 48 |
+
1749994499.466678,8571,0.6163216829299927
|
| 49 |
+
1749994613.1914399,8671,0.6180055141448975
|
| 50 |
+
1749994726.919243,8771,0.6173602938652039
|
| 51 |
+
1749994840.688326,8871,0.61722731590271
|
| 52 |
+
1749994954.296221,8971,0.6184638738632202
|
| 53 |
+
1749995068.586358,9071,0.6206642389297485
|
| 54 |
+
1749995181.909915,9171,0.619077205657959
|
| 55 |
+
1749995294.907391,9271,0.6202395558357239
|
| 56 |
+
1749998394.780915,9439,0.6250227689743042
|
| 57 |
+
1749998509.561884,9539,0.6268419027328491
|
| 58 |
+
1749998624.169394,9639,0.6251703500747681
|
| 59 |
+
1749998738.764092,9739,0.6259638667106628
|
| 60 |
+
1749998853.596479,9839,0.6260729432106018
|
| 61 |
+
1749998970.952068,9939,0.6260238885879517
|
| 62 |
+
1749999090.790731,10039,0.6284595727920532
|
| 63 |
+
1749999204.6298828,10139,0.6254178881645203
|
| 64 |
+
1749999318.84687,10239,0.6281286478042603
|
| 65 |
+
1749999433.180917,10339,0.6281035542488098
|
| 66 |
+
1749999547.5268722,10439,0.627795934677124
|
| 67 |
+
1749999662.015342,10539,0.6291084289550781
|
| 68 |
+
1749999776.5737789,10639,0.6277438998222351
|
| 69 |
+
1749999890.931427,10739,0.6303106546401978
|
| 70 |
+
1750000005.481672,10839,0.6280863881111145
|
| 71 |
+
1750000119.87854,10939,0.6273688673973083
|
| 72 |
+
1750000234.334095,11039,0.6276838183403015
|
| 73 |
+
1750000348.707572,11139,0.6283755898475647
|
| 74 |
+
1750003528.256055,15045,0.62782883644104
|
| 75 |
+
1750003642.596102,15145,0.6282438635826111
|
| 76 |
+
1750003757.3387341,15245,0.6293431520462036
|
| 77 |
+
1750003873.315587,15345,0.6315287947654724
|
| 78 |
+
1750003988.3306408,15445,0.6304099559783936
|
| 79 |
+
1750004103.302248,15545,0.6282867789268494
|
| 80 |
+
1750004218.435863,15645,0.6298266053199768
|
| 81 |
+
1750004333.339542,15745,0.6305355429649353
|
| 82 |
+
1750004448.350045,15845,0.6284080743789673
|
| 83 |
+
1750004563.301732,15945,0.6274583339691162
|
| 84 |
+
1750004678.213912,16045,0.6294417977333069
|
| 85 |
+
1750004793.1398342,16145,0.6306874752044678
|
| 86 |
+
1750004908.186692,16245,0.631015956401825
|
| 87 |
+
1750005023.021367,16345,0.6281139850616455
|
| 88 |
+
1750005138.246336,16445,0.6303842067718506
|
| 89 |
+
1750005254.049628,16545,0.6315600275993347
|
| 90 |
+
1750005371.7509718,16645,0.6306795477867126
|
| 91 |
+
1750005486.9075148,16745,0.6326752305030823
|
| 92 |
+
1750005602.060213,16845,0.6310612559318542
|
| 93 |
+
1750005717.2153442,16945,0.6297892332077026
|
| 94 |
+
1750005832.324348,17045,0.6305006146430969
|
| 95 |
+
1750005947.536924,17145,0.6308155655860901
|
| 96 |
+
1750006063.083155,17245,0.6321685314178467
|
| 97 |
+
1750006178.734329,17345,0.6319099068641663
|
| 98 |
+
1750009399.704169,17536,0.6343737244606018
|
| 99 |
+
1750009514.3865962,17636,0.6345661878585815
|
| 100 |
+
1750009629.909656,17736,0.6372830867767334
|
| 101 |
+
1750009746.8921971,17836,0.635703444480896
|
| 102 |
+
1750009864.805543,17936,0.6337475180625916
|
| 103 |
+
1750009981.668085,18036,0.6362861394882202
|
| 104 |
+
1750010097.8974762,18136,0.6354865431785583
|
| 105 |
+
1750010214.100458,18236,0.6353400945663452
|
| 106 |
+
1750010331.977967,18336,0.6348345875740051
|
| 107 |
+
1750010448.701419,18436,0.6347469091415405
|
| 108 |
+
1750010565.3602402,18536,0.636064350605011
|
| 109 |
+
1750010681.492363,18636,0.6342193484306335
|
| 110 |
+
1750010795.889803,18736,0.6365196108818054
|
| 111 |
+
1750010913.419132,18836,0.6347261071205139
|
| 112 |
+
1750011028.388973,18936,0.6362769603729248
|
| 113 |
+
1750011148.152082,19036,0.6334969401359558
|
| 114 |
+
1750011271.832135,19136,0.6362677812576294
|
| 115 |
+
1750011393.6038358,19236,0.634259819984436
|
| 116 |
+
1750011512.205483,19336,0.6369742751121521
|
| 117 |
+
1750011627.390275,19436,0.6369025707244873
|
| 118 |
+
1750011741.720696,19536,0.6365998983383179
|
| 119 |
+
1750011855.223976,19636,0.6367641091346741
|
| 120 |
+
1750011969.2611399,19736,0.6374938488006592
|
| 121 |
+
1750012087.322599,19836,0.6358149647712708
|
| 122 |
+
1750015355.02448,25003,0.6355385780334473
|
| 123 |
+
1750015471.710523,25103,0.6348210573196411
|
| 124 |
+
1750015585.106417,25203,0.6347346901893616
|
| 125 |
+
1750015699.149504,25303,0.6330912709236145
|
| 126 |
+
1750015814.148569,25403,0.6347230672836304
|
| 127 |
+
1750015930.20784,25503,0.6339718103408813
|
| 128 |
+
1750016046.3103628,25603,0.6341593265533447
|
| 129 |
+
1750016161.9822211,25703,0.6346629858016968
|
| 130 |
+
1750016277.6236448,25803,0.6351673007011414
|
| 131 |
+
1750016396.300969,25903,0.6353664398193359
|
| 132 |
+
1750016521.229613,26003,0.634468138217926
|
| 133 |
+
1750016644.04537,26103,0.6366844177246094
|
| 134 |
+
1750016764.8677518,26203,0.6336568593978882
|
| 135 |
+
1750016884.577158,26303,0.6336439847946167
|
| 136 |
+
1750017013.212147,26403,0.636149525642395
|
| 137 |
+
1750017130.1586502,26503,0.6332034468650818
|
| 138 |
+
1750017248.9129531,26603,0.6349037885665894
|
| 139 |
+
1750017368.812139,26703,0.634620726108551
|
| 140 |
+
1750017488.794925,26803,0.6378597021102905
|
| 141 |
+
1750017612.375882,26903,0.6332463026046753
|
| 142 |
+
1750017732.743781,27003,0.6360012292861938
|
| 143 |
+
1750017848.489934,27103,0.6350870132446289
|
| 144 |
+
1750017963.6619039,27203,0.6337885856628418
|
| 145 |
+
1750018078.550231,27303,0.6343676447868347
|
| 146 |
+
1750018192.748084,27403,0.6369479298591614
|
| 147 |
+
1750018306.010696,27503,0.6350710988044739
|
| 148 |
+
1750018419.404276,27603,0.6357254981994629
|
| 149 |
+
1750018533.5574532,27703,0.6356292963027954
|
| 150 |
+
1750018648.440438,27803,0.6368480324745178
|
| 151 |
+
1750018764.887423,27903,0.634440541267395
|
| 152 |
+
1750018879.13066,28003,0.6343063712120056
|
| 153 |
+
1750021993.337842,28116,0.6385252475738525
|
| 154 |
+
1750022108.625397,28216,0.6396666765213013
|
| 155 |
+
1750022225.3094559,28316,0.6405214667320251
|
| 156 |
+
1750022343.5684,28416,0.6380766034126282
|
| 157 |
+
1750022465.6295328,28516,0.638329029083252
|
| 158 |
+
1750022585.200331,28616,0.6370465755462646
|
| 159 |
+
1750022698.726985,28716,0.6370342969894409
|
| 160 |
+
1750022813.0666711,28816,0.6385851502418518
|
| 161 |
+
1750022926.821846,28916,0.636216938495636
|
| 162 |
+
1750023040.3530712,29016,0.6401445865631104
|
| 163 |
+
1750023154.610132,29116,0.6370845437049866
|
| 164 |
+
1750023269.373156,29216,0.63744056224823
|
| 165 |
+
1750023384.615044,29316,0.6365986466407776
|
| 166 |
+
1750023500.060214,29416,0.6377432346343994
|
| 167 |
+
1750023615.888483,29516,0.6389510035514832
|
| 168 |
+
1750023738.4718232,29616,0.6373738050460815
|
| 169 |
+
1750023865.0513432,29716,0.635953426361084
|
| 170 |
+
1750023980.4567528,29816,0.6385453343391418
|
| 171 |
+
1750024102.657131,29916,0.6374601721763611
|
| 172 |
+
1750024225.475231,30016,0.6374080777168274
|
| 173 |
+
1750024341.848771,30116,0.6396329402923584
|
| 174 |
+
1750024459.552284,30216,0.638360321521759
|
| 175 |
+
1750024577.683449,30316,0.6390722990036011
|
| 176 |
+
1750024695.620969,30416,0.6384742856025696
|
| 177 |
+
1750024813.15393,30516,0.6379038095474243
|
| 178 |
+
1750024931.286835,30616,0.6387065052986145
|
| 179 |
+
1750025049.001534,30716,0.6384369134902954
|
| 180 |
+
1750025163.835922,30816,0.6373277902603149
|
| 181 |
+
1750025281.5920188,30916,0.6373835802078247
|
| 182 |
+
1750025397.560318,31016,0.6376991271972656
|
| 183 |
+
1750025513.6170099,31116,0.6400925517082214
|
| 184 |
+
1750026258.913583,31229,0.6403970718383789
|
| 185 |
+
1750026376.468122,31329,0.638996958732605
|
| 186 |
+
1750026494.82906,31429,0.6399515867233276
|
| 187 |
+
1750026612.074744,31529,0.6390226483345032
|
| 188 |
+
1750026729.107693,31629,0.6389669179916382
|
| 189 |
+
1750026849.120147,31729,0.6387475728988647
|
| 190 |
+
1750026966.936425,31829,0.6384184956550598
|
| 191 |
+
1750027083.108604,31929,0.637732207775116
|
| 192 |
+
1750027199.450992,32029,0.6385949850082397
|
| 193 |
+
1750027323.917527,32129,0.6390410661697388
|
| 194 |
+
1750027450.620377,32229,0.6398792862892151
|
| 195 |
+
1750027566.849419,32329,0.6390214562416077
|
| 196 |
+
1750027683.289695,32429,0.6398676633834839
|
| 197 |
+
1750027802.114905,32529,0.6373082399368286
|
| 198 |
+
1750027924.2528,32629,0.6389638781547546
|
| 199 |
+
1750028041.636798,32729,0.6387016177177429
|
| 200 |
+
1750028159.982292,32829,0.6404320001602173
|
| 201 |
+
1750028276.4083111,32929,0.6395410299301147
|
| 202 |
+
1750028392.819813,33029,0.6389172673225403
|
| 203 |
+
1750028510.07862,33129,0.6388676166534424
|
| 204 |
+
1750028626.766732,33229,0.6375190019607544
|
| 205 |
+
1750028743.2398121,33329,0.6382947564125061
|
| 206 |
+
1750028859.6580448,33429,0.6393235325813293
|
| 207 |
+
1750028977.066697,33529,0.639760434627533
|
| 208 |
+
1750029094.035702,33629,0.6389570832252502
|
| 209 |
+
1750029212.013004,33729,0.6381213068962097
|
| 210 |
+
1750029329.2934341,33829,0.6403063535690308
|
| 211 |
+
1750029445.9281878,33929,0.6385067105293274
|
| 212 |
+
1750029562.146507,34029,0.6391458511352539
|
| 213 |
+
1750029678.391045,34129,0.6408572196960449
|
| 214 |
+
1750029794.805592,34229,0.6399093270301819
|
| 215 |
+
1750030543.7282188,34342,0.6434916257858276
|
| 216 |
+
1750030659.910597,34442,0.6414521932601929
|
| 217 |
+
1750030778.590385,34542,0.6412996053695679
|
| 218 |
+
1750030905.824296,34642,0.6434313654899597
|
| 219 |
+
1750031037.9024498,34742,0.6433621048927307
|
| 220 |
+
1750031153.503683,34842,0.6425729393959045
|
| 221 |
+
1750031269.77982,34942,0.6430465579032898
|
| 222 |
+
1750031389.400544,35042,0.6422536969184875
|
| 223 |
+
1750031508.265698,35142,0.6426635980606079
|
| 224 |
+
1750031626.0132139,35242,0.6429221630096436
|
| 225 |
+
1750031743.025014,35342,0.6404129862785339
|
| 226 |
+
1750031885.475497,35442,0.6392824649810791
|
| 227 |
+
1750032010.656299,35542,0.6409381031990051
|
| 228 |
+
1750032127.904196,35642,0.640123188495636
|
| 229 |
+
1750032244.403975,35742,0.6418627500534058
|
| 230 |
+
1750032360.933419,35842,0.6440447568893433
|
| 231 |
+
1750032477.541352,35942,0.6406194567680359
|
| 232 |
+
1750032594.091157,36042,0.6400943398475647
|
| 233 |
+
1750032710.6262121,36142,0.642808198928833
|
| 234 |
+
1750032827.175454,36242,0.6403511166572571
|
| 235 |
+
1750032943.481264,36342,0.6408284306526184
|
| 236 |
+
1750033059.724577,36442,0.6408045291900635
|
| 237 |
+
1750033176.860049,36542,0.6395043134689331
|
| 238 |
+
1750033293.465724,36642,0.6416427493095398
|
| 239 |
+
1750033410.007821,36742,0.6428804993629456
|
| 240 |
+
1750033526.524544,36842,0.6391329765319824
|
| 241 |
+
1750033642.8618398,36942,0.6429295539855957
|
| 242 |
+
1750033759.300628,37042,0.640650749206543
|
| 243 |
+
1750033875.82361,37142,0.6393449902534485
|
| 244 |
+
1750033992.538295,37242,0.6417598128318787
|
| 245 |
+
1750034109.142217,37342,0.6427794098854065
|
| 246 |
+
1750034859.436337,37455,0.6443113088607788
|
| 247 |
+
1750034991.047751,37555,0.6442849040031433
|
| 248 |
+
1750035122.431795,37655,0.6436274647712708
|
| 249 |
+
1750035241.4052138,37755,0.6420379877090454
|
| 250 |
+
1750035356.513953,37855,0.6436770558357239
|
| 251 |
+
1750035471.387222,37955,0.6419662833213806
|
| 252 |
+
1750035587.5855339,38055,0.6452144384384155
|
| 253 |
+
1750035703.43985,38155,0.6444374918937683
|
| 254 |
+
1750035819.077642,38255,0.643208920955658
|
| 255 |
+
1750035936.4567711,38355,0.6431666612625122
|
| 256 |
+
1750036059.2491798,38455,0.642463207244873
|
| 257 |
+
1750036180.256274,38555,0.6442769765853882
|
| 258 |
+
1750036294.659355,38655,0.6419767141342163
|
| 259 |
+
1750036409.479987,38755,0.643569827079773
|
| 260 |
+
1750036527.079,38855,0.6429632306098938
|
| 261 |
+
1750036642.9094348,38955,0.6435980200767517
|
| 262 |
+
1750036760.497163,39055,0.6430018544197083
|
| 263 |
+
1750036875.5200331,39155,0.6436997652053833
|
| 264 |
+
1750036990.279715,39255,0.6439846754074097
|
| 265 |
+
1750037105.5676339,39355,0.6411581039428711
|
| 266 |
+
1750037220.069133,39455,0.640044093132019
|
| 267 |
+
1750037336.2036638,39555,0.6431978940963745
|
| 268 |
+
1750037451.888465,39655,0.6445000171661377
|
| 269 |
+
1750037569.520401,39755,0.6424607634544373
|
| 270 |
+
1750037687.51542,39855,0.641943633556366
|
| 271 |
+
1750037806.725086,39955,0.6436893343925476
|
| 272 |
+
1750037926.555726,40055,0.6400434970855713
|
| 273 |
+
1750038046.8097868,40155,0.6440201997756958
|
| 274 |
+
1750038169.072709,40255,0.6413872241973877
|
| 275 |
+
1750038294.868284,40355,0.6422027945518494
|
| 276 |
+
1750038427.1307719,40455,0.6437346935272217
|
| 277 |
+
1750039231.8825622,40568,0.6437143683433533
|
| 278 |
+
1750039402.4296,40668,0.646799623966217
|
| 279 |
+
1750039525.9227898,40768,0.6444270610809326
|
| 280 |
+
1750039661.6580212,40868,0.6451084613800049
|
| 281 |
+
1750039781.981627,40968,0.6428462266921997
|
| 282 |
+
1750039905.767744,41068,0.6430606842041016
|
| 283 |
+
1750040024.021368,41168,0.645976722240448
|
| 284 |
+
1750040143.7686882,41268,0.6439148187637329
|
| 285 |
+
1750040266.25108,41368,0.6431078314781189
|
| 286 |
+
1750040427.311769,41468,0.6447769403457642
|
| 287 |
+
1750040578.4353669,41568,0.6441084742546082
|
| 288 |
+
1750040707.937697,41668,0.6429558992385864
|
| 289 |
+
1750040836.492491,41768,0.6436746120452881
|
| 290 |
+
1750040960.603317,41868,0.6442616581916809
|
| 291 |
+
1750041086.99233,41968,0.6462548971176147
|
| 292 |
+
1750041210.984496,42068,0.6450679898262024
|
| 293 |
+
1750041333.8359601,42168,0.6441292762756348
|
| 294 |
+
1750041456.096718,42268,0.6440931558609009
|
| 295 |
+
1750041581.238783,42368,0.6456066370010376
|
| 296 |
+
1750041711.6029398,42468,0.6443829536437988
|
| 297 |
+
1750041852.4890041,42568,0.6441673040390015
|
| 298 |
+
1750042000.659992,42668,0.6439050436019897
|
| 299 |
+
1750042130.042489,42768,0.6438921689987183
|
| 300 |
+
1750042256.760677,42868,0.6464895606040955
|
| 301 |
+
1750042385.204942,42968,0.6443406939506531
|
| 302 |
+
1750042515.443064,43068,0.6432965993881226
|
| 303 |
+
1750042646.2321632,43168,0.639984667301178
|
| 304 |
+
1750042783.133068,43268,0.6438658237457275
|
| 305 |
+
1750042921.271679,43368,0.6428633332252502
|
| 306 |
+
1750043051.095117,43468,0.6431298851966858
|
| 307 |
+
1750043179.576931,43568,0.6439454555511475
|
| 308 |
+
1750043977.9631848,43681,0.6456713080406189
|
| 309 |
+
1750044103.102685,43781,0.6449705958366394
|
| 310 |
+
1750044229.522302,43881,0.6451991200447083
|
| 311 |
+
1750044358.612916,43981,0.6455398201942444
|
| 312 |
+
1750044486.987199,44081,0.6456978917121887
|
| 313 |
+
1750044617.962983,44181,0.645112156867981
|
| 314 |
+
1750044750.4446,44281,0.6441035270690918
|
| 315 |
+
1750044889.4328802,44381,0.6455913186073303
|
| 316 |
+
1750045027.381598,44481,0.6464969515800476
|
| 317 |
+
1750045161.603585,44581,0.6468664407730103
|
| 318 |
+
1750045290.518494,44681,0.6445355415344238
|
| 319 |
+
1750045422.188066,44781,0.6446065902709961
|
| 320 |
+
1750045575.209614,44881,0.6442456841468811
|
| 321 |
+
1750045707.2751548,44981,0.6456243991851807
|
| 322 |
+
1750045839.254016,45081,0.6449227929115295
|
| 323 |
+
1750045971.6699488,45181,0.6455116271972656
|
| 324 |
+
1750046104.338908,45281,0.6448302865028381
|
| 325 |
+
1750046239.79817,45381,0.6442965865135193
|
| 326 |
+
1750046373.61981,45481,0.6440269351005554
|
| 327 |
+
1750046504.145928,45581,0.6443995237350464
|
| 328 |
+
1750046636.505515,45681,0.6450520753860474
|
| 329 |
+
1750046767.470301,45781,0.6460845470428467
|
| 330 |
+
1750046899.701083,45881,0.6460974216461182
|
| 331 |
+
1750047032.155242,45981,0.644556999206543
|
| 332 |
+
1750047163.9177809,46081,0.6430435180664062
|
| 333 |
+
1750047293.2538261,46181,0.6419926285743713
|
| 334 |
+
1750047427.440437,46281,0.6454858779907227
|
| 335 |
+
1750047564.2944481,46381,0.6454938650131226
|
| 336 |
+
1750047701.456853,46481,0.6454050540924072
|
| 337 |
+
1750047838.4844108,46581,0.6446617841720581
|
| 338 |
+
1750047977.628417,46681,0.6460968255996704
|
| 339 |
+
1750048840.957935,46794,0.6473008990287781
|
| 340 |
+
1750048973.2961721,46894,0.6472830772399902
|
| 341 |
+
1750049107.7492101,46994,0.6469509601593018
|
| 342 |
+
1750049242.322348,47094,0.6464478969573975
|
| 343 |
+
1750049371.5563462,47194,0.6477059125900269
|
| 344 |
+
1750049513.91999,47294,0.6474650502204895
|
| 345 |
+
1750049657.056936,47394,0.6462150812149048
|
| 346 |
+
1750049797.445607,47494,0.6473970413208008
|
| 347 |
+
1750049932.04602,47594,0.6489577293395996
|
| 348 |
+
1750050066.7716742,47694,0.6467947363853455
|
| 349 |
+
1750050234.0854049,47794,0.6435348987579346
|
| 350 |
+
1750050351.418953,47894,0.645657479763031
|
| 351 |
+
1750050494.174859,47994,0.6457021832466125
|
| 352 |
+
1750050614.79619,48094,0.6438719630241394
|
| 353 |
+
1750050732.840872,48194,0.6474326252937317
|
| 354 |
+
1750050850.510725,48294,0.6457800269126892
|
| 355 |
+
1750050967.865982,48394,0.6442181468009949
|
| 356 |
+
1750051084.869573,48494,0.6448596715927124
|
| 357 |
+
1750051201.898559,48594,0.64415442943573
|
| 358 |
+
1750051318.836611,48694,0.6448884606361389
|
| 359 |
+
1750051435.858627,48794,0.6452077031135559
|
| 360 |
+
1750051552.890208,48894,0.6476826071739197
|
| 361 |
+
1750051670.031324,48994,0.6472010016441345
|
| 362 |
+
1750051787.218723,49094,0.644515335559845
|
| 363 |
+
1750051904.636208,49194,0.6462040543556213
|
| 364 |
+
1750052021.8771899,49294,0.6463437676429749
|
| 365 |
+
1750052139.210658,49394,0.642183244228363
|
| 366 |
+
1750052256.490703,49494,0.6454485058784485
|
| 367 |
+
1750052374.7578518,49594,0.6468566060066223
|
| 368 |
+
1750052492.334172,49694,0.6466029286384583
|
| 369 |
+
1750052609.824669,49794,0.642487108707428
|
| 370 |
+
1750053365.9224792,49907,0.6476205587387085
|
| 371 |
+
1750053481.476015,50007,0.6485821008682251
|
| 372 |
+
1750053599.037875,50107,0.6469797492027283
|
| 373 |
+
1750053716.7924638,50207,0.6489007472991943
|
| 374 |
+
1750053834.669989,50307,0.6461966633796692
|
| 375 |
+
1750053952.750252,50407,0.6475735306739807
|
| 376 |
+
1750054070.644894,50507,0.6470704674720764
|
| 377 |
+
1750054188.490614,50607,0.6479043960571289
|
| 378 |
+
1750054306.742827,50707,0.6474724411964417
|
| 379 |
+
1750054425.004918,50807,0.6474246382713318
|
| 380 |
+
1750054542.6853938,50907,0.6465471982955933
|
| 381 |
+
1750054660.682938,51007,0.6475410461425781
|
| 382 |
+
1750054778.191925,51107,0.6458927989006042
|
| 383 |
+
1750054895.7676291,51207,0.6464687585830688
|
| 384 |
+
1750055013.378505,51307,0.6474987864494324
|
| 385 |
+
1750055130.9935381,51407,0.6478688716888428
|
| 386 |
+
1750055248.827314,51507,0.6451807618141174
|
| 387 |
+
1750055366.549689,51607,0.6476121544837952
|
| 388 |
+
1750055484.1689339,51707,0.6458885073661804
|
| 389 |
+
1750055601.730171,51807,0.6455968022346497
|
| 390 |
+
1750055719.2045949,51907,0.6460833549499512
|
| 391 |
+
1750055836.656107,52007,0.6462745070457458
|
| 392 |
+
1750055954.067919,52107,0.6452885866165161
|
| 393 |
+
1750056071.4017038,52207,0.6477634906768799
|
| 394 |
+
1750056188.7744892,52307,0.6444822549819946
|
| 395 |
+
1750056306.286787,52407,0.6445361375808716
|
| 396 |
+
1750056424.055385,52507,0.6447904706001282
|
| 397 |
+
1750056541.635556,52607,0.6447542905807495
|
| 398 |
+
1750056665.746124,52707,0.6455514430999756
|
| 399 |
+
1750056796.684893,52807,0.6456115245819092
|
| 400 |
+
1750056914.330396,52907,0.6455478072166443
|
| 401 |
+
1750057654.000876,53020,0.6493867635726929
|
| 402 |
+
1750057770.534932,53120,0.64915931224823
|
| 403 |
+
1750057887.580597,53220,0.6495104432106018
|
| 404 |
+
1750058004.837473,53320,0.6484528183937073
|
| 405 |
+
1750058126.610375,53420,0.648715078830719
|
| 406 |
+
1750058243.350247,53520,0.6468033194541931
|
| 407 |
+
1750058360.9682329,53620,0.6470349431037903
|
| 408 |
+
1750058478.53059,53720,0.6484313607215881
|
| 409 |
+
1750058595.484344,53820,0.6487898230552673
|
| 410 |
+
1750058712.32459,53920,0.6486519575119019
|
| 411 |
+
1750058829.734424,54020,0.6468756198883057
|
| 412 |
+
1750058949.057058,54120,0.6460962295532227
|
| 413 |
+
1750059067.043211,54220,0.645268976688385
|
| 414 |
+
1750059183.71031,54320,0.6469576954841614
|
| 415 |
+
1750059300.550472,54420,0.6475827097892761
|
| 416 |
+
1750059419.021388,54520,0.6452622413635254
|
| 417 |
+
1750059535.992765,54620,0.6485557556152344
|
| 418 |
+
1750059652.9171278,54720,0.6454381346702576
|
| 419 |
+
1750059770.172056,54820,0.6457910537719727
|
| 420 |
+
1750059887.1253629,54920,0.6453903317451477
|
| 421 |
+
1750060004.00172,55020,0.6462647318840027
|
| 422 |
+
1750060120.898835,55120,0.6482334733009338
|
| 423 |
+
1750060238.7914271,55220,0.6449724435806274
|
| 424 |
+
1750060356.6630569,55320,0.6488921642303467
|
| 425 |
+
1750060483.16862,55420,0.6467518210411072
|
| 426 |
+
1750060608.273094,55520,0.6468780636787415
|
| 427 |
+
1750060724.648673,55620,0.6457769870758057
|
| 428 |
+
1750060841.193019,55720,0.6451464295387268
|
| 429 |
+
1750060957.847646,55820,0.6480183601379395
|
| 430 |
+
1750061074.541267,55920,0.6465483903884888
|
| 431 |
+
1750061191.1322691,56020,0.6459154486656189
|
| 432 |
+
1750061929.540071,56133,0.6493289470672607
|
| 433 |
+
1750062046.4490082,56233,0.6499944925308228
|
| 434 |
+
1750062162.9919949,56333,0.6509313583374023
|
| 435 |
+
1750062280.384002,56433,0.6472922563552856
|
| 436 |
+
1750062397.188073,56533,0.6470606327056885
|
| 437 |
+
1750062513.561744,56633,0.6480293869972229
|
| 438 |
+
1750062629.979378,56733,0.6478112936019897
|
| 439 |
+
1750062746.4312499,56833,0.6493836045265198
|
| 440 |
+
1750062862.8876128,56933,0.6481072306632996
|
| 441 |
+
1750062979.306707,57033,0.64878249168396
|
| 442 |
+
1750063095.5481331,57133,0.6467052698135376
|
| 443 |
+
1750063212.493322,57233,0.6481035351753235
|
| 444 |
+
1750063328.775289,57333,0.6488075852394104
|
| 445 |
+
1750063445.034789,57433,0.6460257172584534
|
| 446 |
+
1750063561.407937,57533,0.6473590731620789
|
| 447 |
+
1750063677.673996,57633,0.6485949754714966
|
| 448 |
+
1750063793.694189,57733,0.6475416421890259
|
| 449 |
+
1750063909.95166,57833,0.6466213464736938
|
| 450 |
+
1750064027.247638,57933,0.6497169137001038
|
| 451 |
+
1750064143.514656,58033,0.6465557813644409
|
| 452 |
+
1750064268.079637,58133,0.6479840874671936
|
| 453 |
+
1750064424.337912,58233,0.6463425159454346
|
| 454 |
+
1750064548.345029,58333,0.6467218399047852
|
| 455 |
+
1750064664.193969,58433,0.6449418067932129
|
| 456 |
+
1750064780.292584,58533,0.6467867493629456
|
| 457 |
+
1750064896.502843,58633,0.6476826071739197
|
| 458 |
+
1750065012.795582,58733,0.6480741500854492
|
| 459 |
+
1750065128.8592482,58833,0.6465104222297668
|
| 460 |
+
1750065244.7908301,58933,0.6474007368087769
|
| 461 |
+
1750065360.6975,59033,0.6470018625259399
|
| 462 |
+
1750065476.57175,59133,0.647394597530365
|
| 463 |
+
1750066215.112386,59246,0.6484884023666382
|
| 464 |
+
1750066329.233048,59346,0.6517101526260376
|
| 465 |
+
1750066446.0869088,59446,0.6494681239128113
|
| 466 |
+
1750066563.986378,59546,0.6496850252151489
|
| 467 |
+
1750066680.343473,59646,0.6474074721336365
|
| 468 |
+
1750066796.7458298,59746,0.649565577507019
|
| 469 |
+
1750066912.6179461,59846,0.6474013328552246
|
| 470 |
+
1750067029.49361,59946,0.6498302817344666
|
| 471 |
+
1750067145.9068658,60046,0.6463137269020081
|
| 472 |
+
1750067261.559467,60146,0.648062527179718
|
| 473 |
+
1750067378.464273,60246,0.6487009525299072
|
| 474 |
+
1750067494.541793,60346,0.6484540700912476
|
| 475 |
+
1750067611.426126,60446,0.6447340846061707
|
| 476 |
+
1750067728.451967,60546,0.6489546298980713
|
| 477 |
+
1750067845.480575,60646,0.6487022042274475
|
| 478 |
+
1750067963.274455,60746,0.6478694677352905
|
| 479 |
+
1750068079.824903,60846,0.6502383351325989
|
| 480 |
+
1750068195.613735,60946,0.6492922902107239
|
| 481 |
+
1750068327.311218,61046,0.6487003564834595
|
| 482 |
+
1750068454.94793,61146,0.6478210687637329
|
| 483 |
+
1750068570.154111,61246,0.6484748721122742
|
| 484 |
+
1750068685.7284281,61346,0.6482732892036438
|
| 485 |
+
1750068801.346897,61446,0.646964430809021
|
| 486 |
+
1750068916.927058,61546,0.6464099287986755
|
| 487 |
+
1750069032.448956,61646,0.644658088684082
|
| 488 |
+
1750069147.9146209,61746,0.6485943794250488
|
| 489 |
+
1750069263.3070111,61846,0.6458688974380493
|
| 490 |
+
1750069378.775149,61946,0.6487181186676025
|
| 491 |
+
1750069494.211877,62046,0.6477941274642944
|
| 492 |
+
1750069609.57893,62146,0.6474037766456604
|
| 493 |
+
1750069724.84948,62246,0.6479589343070984
|
| 494 |
+
1750070447.851841,62359,0.651902973651886
|
| 495 |
+
1750070561.9951239,62459,0.6503787040710449
|
| 496 |
+
1750070676.8718872,62559,0.6500968337059021
|
| 497 |
+
1750070791.8396,62659,0.6510557532310486
|
| 498 |
+
1750070906.979104,62759,0.6499785780906677
|
| 499 |
+
1750071022.06126,62859,0.649732232093811
|
| 500 |
+
1750071137.246733,62959,0.6478468179702759
|
| 501 |
+
1750071256.552842,63059,0.6495073437690735
|
| 502 |
+
1750071372.671751,63159,0.6482971906661987
|
| 503 |
+
1750071487.657099,63259,0.6481047868728638
|
| 504 |
+
1750071602.834729,63359,0.648909330368042
|
| 505 |
+
1750071718.017049,63459,0.646631121635437
|
| 506 |
+
1750071833.306231,63559,0.6482463479042053
|
| 507 |
+
1750071948.5643551,63659,0.6479852795600891
|
| 508 |
+
1750072063.602099,63759,0.6481752395629883
|
| 509 |
+
1750072179.286376,63859,0.6478750109672546
|
| 510 |
+
1750072296.222941,63959,0.6486703157424927
|
| 511 |
+
1750072418.746772,64059,0.6489123702049255
|
| 512 |
+
1750072533.577402,64159,0.6501323580741882
|
| 513 |
+
1750072648.612843,64259,0.6470416784286499
|
| 514 |
+
1750072763.5893078,64359,0.6490704417228699
|
| 515 |
+
1750072878.450794,64459,0.6459466814994812
|
| 516 |
+
1750072993.365532,64559,0.6475882530212402
|
| 517 |
+
1750073108.697324,64659,0.6503645777702332
|
| 518 |
+
1750073223.617787,64759,0.6470171809196472
|
| 519 |
+
1750073338.752395,64859,0.6477291584014893
|
| 520 |
+
1750073453.570859,64959,0.6484718322753906
|
| 521 |
+
1750073568.435877,65059,0.6459577083587646
|
| 522 |
+
1750073683.24477,65159,0.6472359299659729
|
| 523 |
+
1750073798.1748412,65259,0.6485876441001892
|
| 524 |
+
1750073913.0885818,65359,0.6493455767631531
|
| 525 |
+
1750074644.202455,65472,0.650331974029541
|
| 526 |
+
1750074758.912836,65572,0.6519914269447327
|
| 527 |
+
1750074875.099701,65672,0.650109052658081
|
| 528 |
+
1750074990.943136,65772,0.6503970623016357
|
| 529 |
+
1750075106.007064,65872,0.6483119130134583
|
| 530 |
+
1750075221.057364,65972,0.6512475609779358
|
| 531 |
+
1750075336.13061,66072,0.6481102705001831
|
| 532 |
+
1750075451.236589,66172,0.6488688588142395
|
| 533 |
+
1750075566.052592,66272,0.6494044065475464
|
| 534 |
+
1750075682.7699761,66372,0.6493076086044312
|
| 535 |
+
1750075797.9876559,66472,0.6478333473205566
|
| 536 |
+
1750075914.64273,66572,0.6474276781082153
|
| 537 |
+
1750076030.050213,66672,0.6496495008468628
|
| 538 |
+
1750076152.2348318,66772,0.6493915319442749
|
| 539 |
+
1750076269.6426702,66872,0.6476317644119263
|
| 540 |
+
1750076384.59956,66972,0.648086428642273
|
| 541 |
+
1750076499.781887,67072,0.6497089266777039
|
| 542 |
+
1750076614.9142919,67172,0.6489056348800659
|
| 543 |
+
1750076729.905608,67272,0.6496078372001648
|
| 544 |
+
1750076845.027034,67372,0.6489534378051758
|
| 545 |
+
1750076960.0806148,67472,0.6497972011566162
|
| 546 |
+
1750077074.876384,67572,0.649520218372345
|
| 547 |
+
1750077189.7312992,67672,0.6471053957939148
|
| 548 |
+
1750077304.536205,67772,0.6471991539001465
|
| 549 |
+
1750077419.491337,67872,0.6477248668670654
|
| 550 |
+
1750077534.3444228,67972,0.6475986242294312
|
| 551 |
+
1750077649.246627,68072,0.6473278403282166
|
| 552 |
+
1750077764.127095,68172,0.6482389569282532
|
| 553 |
+
1750077878.92101,68272,0.6501243710517883
|
| 554 |
+
1750077993.704979,68372,0.6485391855239868
|
| 555 |
+
1750078108.401723,68472,0.6455429196357727
|
| 556 |
+
1750078836.657377,68585,0.6516741514205933
|
| 557 |
+
1750078952.580533,68685,0.6496519446372986
|
| 558 |
+
1750079067.407628,68785,0.6507120132446289
|
| 559 |
+
1750079182.252828,68885,0.6490698456764221
|
| 560 |
+
1750079297.0283499,68985,0.6507880091667175
|
| 561 |
+
1750079411.772536,69085,0.6514895558357239
|
| 562 |
+
1750079526.6152508,69185,0.6502475738525391
|
| 563 |
+
1750079641.374357,69285,0.648604154586792
|
| 564 |
+
1750079756.093072,69385,0.649006724357605
|
| 565 |
+
1750079870.5768309,69485,0.6492598056793213
|
| 566 |
+
1750079992.450662,69585,0.6508265733718872
|
| 567 |
+
1750080116.739509,69685,0.6490214467048645
|
| 568 |
+
1750080231.016501,69785,0.648647665977478
|
| 569 |
+
1750080345.28207,69885,0.6501409411430359
|
| 570 |
+
1750080459.8231602,69985,0.648421585559845
|
| 571 |
+
1750080574.2639868,70085,0.6481715440750122
|
| 572 |
+
1750080688.757041,70185,0.6504570841789246
|
| 573 |
+
1750080803.202705,70285,0.6495674252510071
|
| 574 |
+
1750080917.704134,70385,0.6511856913566589
|
| 575 |
+
1750081032.162951,70485,0.6491391062736511
|
| 576 |
+
1750081146.549643,70585,0.6491948366165161
|
| 577 |
+
1750081261.078706,70685,0.6476237773895264
|
| 578 |
+
1750081375.446201,70785,0.6472598314285278
|
| 579 |
+
1750081489.6819131,70885,0.649506151676178
|
| 580 |
+
1750081604.042305,70985,0.6490165591239929
|
| 581 |
+
1750081718.3400252,71085,0.6489497423171997
|
| 582 |
+
1750081832.732856,71185,0.6479442119598389
|
| 583 |
+
1750081947.114954,71285,0.6481096744537354
|
| 584 |
+
1750082061.835982,71385,0.6467849016189575
|
| 585 |
+
1750082176.418093,71485,0.6477126479148865
|
| 586 |
+
1750082290.7188091,71585,0.6497445106506348
|
| 587 |
+
1750083016.513731,71698,0.6524151563644409
|
| 588 |
+
1750083130.618523,71798,0.6513198614120483
|
| 589 |
+
1750083244.589661,71898,0.6509871482849121
|
| 590 |
+
1750083359.089848,71998,0.6508817672729492
|
| 591 |
+
1750083473.078088,72098,0.6499442458152771
|
| 592 |
+
1750083587.289967,72198,0.6507530808448792
|
| 593 |
+
1750083701.352993,72298,0.6499148011207581
|
| 594 |
+
1750083815.663315,72398,0.6481636166572571
|
| 595 |
+
1750083936.105367,72498,0.650898277759552
|
| 596 |
+
1750084065.999089,72598,0.6477634906768799
|
| 597 |
+
1750084179.5669591,72698,0.6490398049354553
|
| 598 |
+
1750084293.404572,72798,0.6496537923812866
|
| 599 |
+
1750084409.61257,72898,0.6489595770835876
|
| 600 |
+
1750084530.014266,72998,0.6506764888763428
|
| 601 |
+
1750084642.7470279,73098,0.6500404477119446
|
| 602 |
+
1750084756.535712,73198,0.6486256122589111
|
| 603 |
+
1750084871.964978,73298,0.6500183939933777
|
| 604 |
+
1750084986.151422,73398,0.650077223777771
|
| 605 |
+
1750085100.4449952,73498,0.6502646803855896
|
| 606 |
+
1750085214.790596,73598,0.6472273468971252
|
| 607 |
+
1750085329.468748,73698,0.6477739214897156
|
| 608 |
+
1750085443.722016,73798,0.6485349535942078
|
| 609 |
+
1750085557.787304,73898,0.6490380167961121
|
| 610 |
+
1750085673.177803,73998,0.6487604379653931
|
| 611 |
+
1750085787.612976,74098,0.6494344472885132
|
| 612 |
+
1750085913.490603,74198,0.649268388748169
|
| 613 |
+
1750086052.901354,74298,0.6489779353141785
|
| 614 |
+
1750086182.2634351,74398,0.6496483087539673
|
| 615 |
+
1750086295.016717,74498,0.6478670239448547
|
| 616 |
+
1750086435.7205381,74598,0.6491844654083252
|
| 617 |
+
1750086582.7606962,74698,0.6491010785102844
|
| 618 |
+
1750087650.0419421,74811,0.6522228717803955
|
| 619 |
+
1750087762.584516,74911,0.6522107720375061
|
| 620 |
+
1750087875.505641,75011,0.6498799324035645
|
| 621 |
+
1750087988.452775,75111,0.6499907970428467
|
| 622 |
+
1750088101.767231,75211,0.65114825963974
|
| 623 |
+
1750088215.3928099,75311,0.6526415348052979
|
| 624 |
+
1750088331.228487,75411,0.651384174823761
|
| 625 |
+
1750088446.234497,75511,0.6497800350189209
|
| 626 |
+
1750088560.0593631,75611,0.6517898440361023
|
| 627 |
+
1750088674.036354,75711,0.6499993801116943
|
| 628 |
+
1750088787.784639,75811,0.6519804000854492
|
| 629 |
+
1750088901.776468,75911,0.6504987478256226
|
| 630 |
+
1750089014.103643,76011,0.6487267017364502
|
| 631 |
+
1750089127.109042,76111,0.6484436392784119
|
| 632 |
+
1750089240.3520262,76211,0.6512634754180908
|
| 633 |
+
1750089354.353836,76311,0.6479203701019287
|
| 634 |
+
1750089468.5235279,76411,0.6494595408439636
|
| 635 |
+
1750089583.547188,76511,0.6499166488647461
|
| 636 |
+
1750089704.054522,76611,0.6496145725250244
|
| 637 |
+
1750089822.3331609,76711,0.6495196223258972
|
| 638 |
+
1750089941.994915,76811,0.6495153307914734
|
| 639 |
+
1750090061.784815,76911,0.6490931510925293
|
| 640 |
+
1750090182.254617,77011,0.6484087109565735
|
| 641 |
+
1750090303.953204,77111,0.6500018239021301
|
| 642 |
+
1750090426.952368,77211,0.648506760597229
|
| 643 |
+
1750090548.443975,77311,0.6480318903923035
|
| 644 |
+
1750090669.956901,77411,0.6486170291900635
|
| 645 |
+
1750090791.7959669,77511,0.6492151021957397
|
| 646 |
+
1750090912.972746,77611,0.6473553776741028
|
| 647 |
+
1750091033.934365,77711,0.6462597846984863
|
| 648 |
+
1750091164.33149,77811,0.6510331034660339
|
| 649 |
+
1750091964.664192,77924,0.6533048152923584
|
| 650 |
+
1750092091.235796,78024,0.6523431539535522
|
| 651 |
+
1750092208.4739149,78124,0.6510931253433228
|
| 652 |
+
1750092328.204187,78224,0.6501372456550598
|
| 653 |
+
1750092447.8787808,78324,0.6499123573303223
|
| 654 |
+
1750092567.8858712,78424,0.6484730243682861
|
| 655 |
+
1750092687.8550222,78524,0.6504344344139099
|
| 656 |
+
1750092811.3799438,78624,0.6524816155433655
|
| 657 |
+
1750092929.918206,78724,0.6506648063659668
|
| 658 |
+
1750093053.76854,78824,0.6516305208206177
|
| 659 |
+
1750093177.572756,78924,0.6513609290122986
|
| 660 |
+
1750093302.106108,79024,0.653305172920227
|
| 661 |
+
1750093429.367929,79124,0.6484840512275696
|
| 662 |
+
1750093557.5396092,79224,0.649454653263092
|
| 663 |
+
1750093680.4307358,79324,0.6518210768699646
|
| 664 |
+
1750093806.49715,79424,0.6496446132659912
|
| 665 |
+
1750093931.0879638,79524,0.6506440043449402
|
| 666 |
+
1750094046.997188,79624,0.6500864028930664
|
| 667 |
+
1750094161.746679,79724,0.6513866186141968
|
| 668 |
+
1750094278.252318,79824,0.6487426161766052
|
| 669 |
+
1750094397.103918,79924,0.648730993270874
|
| 670 |
+
1750094517.025068,80024,0.6489289402961731
|
| 671 |
+
1750094642.6882298,80124,0.6502524614334106
|
| 672 |
+
1750094775.814713,80224,0.649855375289917
|
| 673 |
+
1750094903.003746,80324,0.6477358937263489
|
| 674 |
+
1750095022.039825,80424,0.6478602886199951
|
| 675 |
+
1750095140.6187148,80524,0.6489865183830261
|
| 676 |
+
1750095260.4530401,80624,0.6482934951782227
|
| 677 |
+
1750095378.731529,80724,0.6490281820297241
|
| 678 |
+
1750095496.937608,80824,0.6485232710838318
|
| 679 |
+
1750095616.3465102,80924,0.6485385894775391
|
| 680 |
+
1750096401.501398,81037,0.6525385975837708
|
| 681 |
+
1750096526.1197782,81137,0.6502799987792969
|
| 682 |
+
1750096644.770298,81237,0.6528933644294739
|
| 683 |
+
1750096759.472527,81337,0.6510306596755981
|
| 684 |
+
1750096873.226041,81437,0.651094377040863
|
| 685 |
+
1750096987.332403,81537,0.6506212949752808
|
| 686 |
+
1750097102.048306,81637,0.6519405841827393
|
| 687 |
+
1750097216.469391,81737,0.6492205858230591
|
| 688 |
+
1750097331.152544,81837,0.649980366230011
|
| 689 |
+
1750097445.0099788,81937,0.6509393453598022
|
| 690 |
+
1750097558.881762,82037,0.6526182889938354
|
| 691 |
+
1750097674.117058,82137,0.6499534249305725
|
| 692 |
+
1750097788.172687,82237,0.6503940224647522
|
| 693 |
+
1750097903.974227,82337,0.6475943922996521
|
| 694 |
+
1750098019.9126558,82437,0.6499638557434082
|
| 695 |
+
1750098132.7968042,82537,0.6512077450752258
|
| 696 |
+
1750098246.327349,82637,0.649770200252533
|
| 697 |
+
1750098359.4387681,82737,0.6514387130737305
|
| 698 |
+
1750098474.935789,82837,0.6485177874565125
|
| 699 |
+
1750098589.37578,82937,0.6507077217102051
|
| 700 |
+
1750098703.584759,83037,0.6483749747276306
|
| 701 |
+
1750098818.363471,83137,0.649534285068512
|
| 702 |
+
1750098932.86682,83237,0.650955855846405
|
| 703 |
+
1750099047.605303,83337,0.6501758694648743
|
| 704 |
+
1750099162.009273,83437,0.6494081020355225
|
| 705 |
+
1750099276.285405,83537,0.6501213312149048
|
| 706 |
+
1750099390.24512,83637,0.6502935290336609
|
| 707 |
+
1750099504.52185,83737,0.6514497399330139
|
| 708 |
+
1750099620.872819,83837,0.6484656929969788
|
| 709 |
+
1750099738.8319669,83937,0.6472879648208618
|
| 710 |
+
1750099854.784019,84037,0.6502016186714172
|
| 711 |
+
1750100599.842554,84150,0.6516321301460266
|
| 712 |
+
1750100719.342059,84250,0.6523364186286926
|
| 713 |
+
1750100835.149472,84350,0.6532328724861145
|
| 714 |
+
1750100951.360066,84450,0.6507886052131653
|
| 715 |
+
1750101067.94403,84550,0.6532910466194153
|
| 716 |
+
1750101184.3113961,84650,0.650813102722168
|
| 717 |
+
1750101300.6346161,84750,0.6497732996940613
|
| 718 |
+
1750101417.3176908,84850,0.6496176719665527
|
| 719 |
+
1750101533.636374,84950,0.6501342058181763
|
| 720 |
+
1750101649.565685,85050,0.6528940200805664
|
| 721 |
+
1750101765.909247,85150,0.6525833606719971
|
| 722 |
+
1750101881.584228,85250,0.650132954120636
|
| 723 |
+
1750102000.73386,85350,0.6496201157569885
|
| 724 |
+
1750102123.7391832,85450,0.6501519680023193
|
| 725 |
+
1750102238.8457978,85550,0.6488964557647705
|
| 726 |
+
1750102359.9214761,85650,0.6510980129241943
|
| 727 |
+
1750102475.4553149,85750,0.6516562700271606
|
| 728 |
+
1750102591.233095,85850,0.6508400440216064
|
| 729 |
+
1750102706.973902,85950,0.6507493853569031
|
| 730 |
+
1750102822.656201,86050,0.6494552493095398
|
| 731 |
+
1750102938.4214451,86150,0.6484871506690979
|
| 732 |
+
1750103054.184406,86250,0.6494491696357727
|
| 733 |
+
1750103174.913878,86350,0.6497573256492615
|
| 734 |
+
1750103291.4358752,86450,0.6503756046295166
|
| 735 |
+
1750103408.954601,86550,0.6502782106399536
|
| 736 |
+
1750103525.450433,86650,0.6492156982421875
|
| 737 |
+
1750103643.345119,86750,0.6502463221549988
|
| 738 |
+
1750103758.7680361,86850,0.6517242789268494
|
| 739 |
+
1750103874.44716,86950,0.64891117811203
|
| 740 |
+
1750103995.1904159,87050,0.6503370404243469
|
| 741 |
+
1750104113.313233,87150,0.648857831954956
|
| 742 |
+
1750104867.213655,87263,0.6533442735671997
|
| 743 |
+
1750104990.644952,87363,0.6527530550956726
|
| 744 |
+
1750105108.749578,87463,0.6530067324638367
|
| 745 |
+
1750105228.482618,87563,0.650898277759552
|
| 746 |
+
1750105345.200011,87663,0.6505967974662781
|
| 747 |
+
1750105462.401325,87763,0.6541727781295776
|
| 748 |
+
1750105583.0651278,87863,0.6510753631591797
|
| 749 |
+
1750105717.216706,87963,0.6523413062095642
|
| 750 |
+
1750105838.682082,88063,0.6511335968971252
|
| 751 |
+
1750105975.438334,88163,0.6527175307273865
|
| 752 |
+
1750106106.627712,88263,0.6487070918083191
|
| 753 |
+
1750106242.7790568,88363,0.6508768200874329
|
| 754 |
+
1750106367.5398169,88463,0.6527836918830872
|
| 755 |
+
1750106491.0528882,88563,0.6500986814498901
|
| 756 |
+
1750106614.614609,88663,0.6493241190910339
|
| 757 |
+
1750106735.016927,88763,0.6510257124900818
|
| 758 |
+
1750106857.7620852,88863,0.6492800116539001
|
| 759 |
+
1750106986.702441,88963,0.6512181162834167
|
| 760 |
+
1750107111.717518,89063,0.6506360173225403
|
| 761 |
+
1750107228.7076979,89163,0.6517395973205566
|
| 762 |
+
1750107346.9738932,89263,0.6490159630775452
|
| 763 |
+
1750107464.920545,89363,0.6501629948616028
|
| 764 |
+
1750107583.2309482,89463,0.6505747437477112
|
| 765 |
+
1750107699.763712,89563,0.6477003693580627
|
| 766 |
+
1750107818.607779,89663,0.6475716829299927
|
| 767 |
+
1750107939.036907,89763,0.6489228010177612
|
| 768 |
+
1750108058.216803,89863,0.6506924033164978
|
| 769 |
+
1750108175.199019,89963,0.6502628922462463
|
| 770 |
+
1750108292.703619,90063,0.6497432589530945
|
| 771 |
+
1750108411.528075,90163,0.6469877362251282
|
| 772 |
+
1750108529.149643,90263,0.6497542858123779
|
| 773 |
+
1750109327.6874151,90376,0.651892364025116
|
| 774 |
+
1750109448.1817029,90476,0.6531562209129333
|
| 775 |
+
1750109564.803989,90576,0.6521072387695312
|
| 776 |
+
1750109682.5154922,90676,0.6510747671127319
|
| 777 |
+
1750109807.18699,90776,0.6527775526046753
|
| 778 |
+
1750109937.623831,90876,0.6504724025726318
|
| 779 |
+
1750110058.013789,90976,0.6522940993309021
|
| 780 |
+
1750110177.417755,91076,0.6534093022346497
|
| 781 |
+
1750110294.637894,91176,0.6521856784820557
|
| 782 |
+
1750110416.915083,91276,0.6513940095901489
|
| 783 |
+
1750110538.5843232,91376,0.6504846811294556
|
| 784 |
+
1750110661.573292,91476,0.6513933539390564
|
| 785 |
+
1750110784.1757429,91576,0.6507052779197693
|
| 786 |
+
1750110907.472345,91676,0.650814950466156
|
| 787 |
+
1750111031.5547361,91776,0.6509736776351929
|
| 788 |
+
1750111153.6419892,91876,0.6515055298805237
|
| 789 |
+
1750111272.164583,91976,0.6513664126396179
|
| 790 |
+
1750111387.447068,92076,0.6497408151626587
|
| 791 |
+
1750111502.772604,92176,0.6496139764785767
|
| 792 |
+
1750111621.903015,92276,0.6504093408584595
|
| 793 |
+
1750111738.397706,92376,0.6518149375915527
|
| 794 |
+
1750111854.356266,92476,0.6491886973381042
|
| 795 |
+
1750111970.475776,92576,0.6509883403778076
|
| 796 |
+
1750112092.236366,92676,0.6475692391395569
|
| 797 |
+
1750112215.338778,92776,0.650664210319519
|
| 798 |
+
1750112333.134115,92876,0.6502971649169922
|
| 799 |
+
1750112453.4675221,92976,0.6502291560173035
|
| 800 |
+
1750112576.257438,93076,0.6490208506584167
|
| 801 |
+
1750112697.693574,93176,0.6500085592269897
|
| 802 |
+
1750112819.9915361,93276,0.6529031991958618
|
| 803 |
+
1750112948.117852,93376,0.6488376259803772
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard (1).csv
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749862029.663543,99,0.37294554710388184
|
| 3 |
+
1749862156.9849339,199,0.44629713892936707
|
| 4 |
+
1749862283.002182,299,0.47867053747177124
|
| 5 |
+
1749862404.551185,399,0.49936822056770325
|
| 6 |
+
1749862523.688343,499,0.5137384533882141
|
| 7 |
+
1749862647.099201,599,0.5275991559028625
|
| 8 |
+
1749863398.260582,722,0.5418893694877625
|
| 9 |
+
1749863512.803018,822,0.5529560446739197
|
| 10 |
+
1749863628.045519,922,0.5599608421325684
|
| 11 |
+
1749863743.546858,1022,0.5664816498756409
|
| 12 |
+
1749863858.946523,1122,0.5740582346916199
|
| 13 |
+
1749863974.606943,1222,0.5792315602302551
|
| 14 |
+
1749864707.312323,2591,0.5740908980369568
|
| 15 |
+
1749864822.3112042,2691,0.5770991444587708
|
| 16 |
+
1749864937.8522801,2791,0.5825880765914917
|
| 17 |
+
1749865053.508802,2891,0.5861800312995911
|
| 18 |
+
1749865168.944853,2991,0.5914608240127563
|
| 19 |
+
1749865284.4459488,3091,0.594249963760376
|
| 20 |
+
1749865399.783348,3191,0.5992579460144043
|
| 21 |
+
1749865514.924843,3291,0.602853536605835
|
| 22 |
+
1749865630.1243489,3391,0.6052868366241455
|
| 23 |
+
1749865745.3801239,3491,0.6072555184364319
|
| 24 |
+
1749865860.57861,3591,0.6097333431243896
|
| 25 |
+
1749865976.224461,3691,0.611083984375
|
| 26 |
+
1749866732.477282,3837,0.6175699830055237
|
| 27 |
+
1749866847.373405,3937,0.6213619709014893
|
| 28 |
+
1749866961.69667,4037,0.6234620809555054
|
| 29 |
+
1749867076.207936,4137,0.6233278512954712
|
| 30 |
+
1749867192.9578712,4237,0.6250610947608948
|
| 31 |
+
1749867307.9066849,4337,0.6263699531555176
|
| 32 |
+
1749867423.839284,4437,0.6277560591697693
|
| 33 |
+
1749867540.36063,4537,0.6290692090988159
|
| 34 |
+
1749867656.4509041,4637,0.6298345327377319
|
| 35 |
+
1749867772.5633621,4737,0.6320093274116516
|
| 36 |
+
1749867888.3156312,4837,0.6330803036689758
|
| 37 |
+
1749868006.0029519,4937,0.6348605155944824
|
| 38 |
+
1749868808.441995,7571,0.6303296089172363
|
| 39 |
+
1749868930.6764002,7671,0.6296606659889221
|
| 40 |
+
1749869053.484579,7771,0.6313474178314209
|
| 41 |
+
1749869174.837471,7871,0.632144570350647
|
| 42 |
+
1749869292.225559,7971,0.6332916021347046
|
| 43 |
+
1749869411.03731,8071,0.6333509683609009
|
| 44 |
+
1749869535.8628109,8171,0.6353659629821777
|
| 45 |
+
1749869649.086516,8271,0.6361371278762817
|
| 46 |
+
1749869763.021135,8371,0.6366482973098755
|
| 47 |
+
1749869877.361543,8471,0.6357555389404297
|
| 48 |
+
1749869991.890194,8571,0.6368321180343628
|
| 49 |
+
1749870112.390059,8671,0.6386470794677734
|
| 50 |
+
1749870228.080665,8771,0.6386367082595825
|
| 51 |
+
1749870342.9029372,8871,0.6378971338272095
|
| 52 |
+
1749870457.246625,8971,0.6396408081054688
|
| 53 |
+
1749870571.635715,9071,0.6408162117004395
|
| 54 |
+
1749870686.231644,9171,0.6400809288024902
|
| 55 |
+
1749870800.516695,9271,0.6418086290359497
|
| 56 |
+
1749871605.094102,9439,0.64491206407547
|
| 57 |
+
1749871737.065096,9539,0.6465832591056824
|
| 58 |
+
1749871851.9842901,9639,0.6466361284255981
|
| 59 |
+
1749871967.5529048,9739,0.6476323008537292
|
| 60 |
+
1749872083.907424,9839,0.6464681029319763
|
| 61 |
+
1749872201.8901188,9939,0.6458964347839355
|
| 62 |
+
1749872363.6062431,10039,0.6475790143013
|
| 63 |
+
1749872546.685531,10139,0.6460607051849365
|
| 64 |
+
1749872663.447436,10239,0.6478688716888428
|
| 65 |
+
1749873472.751728,10339,0.6373345255851746
|
| 66 |
+
1749873586.958823,10439,0.6448805332183838
|
| 67 |
+
1749873702.9145112,10539,0.6457089185714722
|
| 68 |
+
1749873816.705306,10639,0.6463573575019836
|
| 69 |
+
1749873930.537557,10739,0.6457958817481995
|
| 70 |
+
1749874044.9519339,10839,0.6481281518936157
|
| 71 |
+
1749874159.8627908,10939,0.6469950675964355
|
| 72 |
+
1749874275.146302,11039,0.6485129594802856
|
| 73 |
+
1749874390.6844969,11139,0.6499018669128418
|
| 74 |
+
1749875174.8603241,15045,0.6436793208122253
|
| 75 |
+
1749875290.682424,15145,0.6420380473136902
|
| 76 |
+
1749875415.815707,15245,0.6425821781158447
|
| 77 |
+
1749875530.4505758,15345,0.6420215368270874
|
| 78 |
+
1749875644.9556422,15445,0.6440042853355408
|
| 79 |
+
1749875759.9207602,15545,0.6424780488014221
|
| 80 |
+
1749875879.3361962,15645,0.643294095993042
|
| 81 |
+
1749875993.4695501,15745,0.6448836326599121
|
| 82 |
+
1749876107.5972888,15845,0.645183801651001
|
| 83 |
+
1749876221.7461262,15945,0.6461532711982727
|
| 84 |
+
1749876336.225884,16045,0.6454576849937439
|
| 85 |
+
1749876450.965764,16145,0.6456298828125
|
| 86 |
+
1749876566.629506,16245,0.6443301439285278
|
| 87 |
+
1749876683.568193,16345,0.645946204662323
|
| 88 |
+
1749876798.040498,16445,0.6467732191085815
|
| 89 |
+
1749876911.571116,16545,0.6462554931640625
|
| 90 |
+
1749877025.1111512,16645,0.6451244950294495
|
| 91 |
+
1749877139.379872,16745,0.6458805203437805
|
| 92 |
+
1749877254.228658,16845,0.6443285346031189
|
| 93 |
+
1749877374.360051,16945,0.6479160189628601
|
| 94 |
+
1749877492.01997,17045,0.6478552222251892
|
| 95 |
+
1749877608.5502021,17145,0.6497830748558044
|
| 96 |
+
1749877724.957767,17245,0.6486115455627441
|
| 97 |
+
1749877841.109853,17345,0.6480239033699036
|
| 98 |
+
1749878646.2594078,17536,0.6495139598846436
|
| 99 |
+
1749878760.912617,17636,0.6510331630706787
|
| 100 |
+
1749878877.23084,17736,0.6514031291007996
|
| 101 |
+
1749878993.918535,17836,0.6505551338195801
|
| 102 |
+
1749879109.034369,17936,0.6511745452880859
|
| 103 |
+
1749879223.9821818,18036,0.6540765166282654
|
| 104 |
+
1749879339.26787,18136,0.6506806015968323
|
| 105 |
+
1749879454.726379,18236,0.6503374576568604
|
| 106 |
+
1749879570.0690088,18336,0.6529502868652344
|
| 107 |
+
1749879685.1710541,18436,0.6511238217353821
|
| 108 |
+
1749879800.3635142,18536,0.6518282890319824
|
| 109 |
+
1749879915.033729,18636,0.6516628861427307
|
| 110 |
+
1749880029.6866162,18736,0.6537311673164368
|
| 111 |
+
1749880144.222166,18836,0.653042197227478
|
| 112 |
+
1749880258.8637629,18936,0.6531310081481934
|
| 113 |
+
1749880373.32494,19036,0.651985228061676
|
| 114 |
+
1749880488.133607,19136,0.6532859802246094
|
| 115 |
+
1749880602.5153239,19236,0.6535618305206299
|
| 116 |
+
1749880717.016221,19336,0.6526396870613098
|
| 117 |
+
1749880831.561332,19436,0.6540868282318115
|
| 118 |
+
1749880946.107208,19536,0.6526824831962585
|
| 119 |
+
1749881060.5496352,19636,0.652843177318573
|
| 120 |
+
1749881174.887777,19736,0.6523430347442627
|
| 121 |
+
1749881289.034664,19836,0.6538118124008179
|
| 122 |
+
1749882085.5701241,25003,0.6411749720573425
|
| 123 |
+
1749882199.2805178,25103,0.6289074420928955
|
| 124 |
+
1749882313.069938,25203,0.63006192445755
|
| 125 |
+
1749882426.932492,25303,0.6306214332580566
|
| 126 |
+
1749882540.776115,25403,0.6305362582206726
|
| 127 |
+
1749882655.2986329,25503,0.6331127882003784
|
| 128 |
+
1749882771.058639,25603,0.6321122646331787
|
| 129 |
+
1749882884.853459,25703,0.6309130191802979
|
| 130 |
+
1749882999.084863,25803,0.6323768496513367
|
| 131 |
+
1749883113.03004,25903,0.6333639621734619
|
| 132 |
+
1749883227.049516,26003,0.6328328251838684
|
| 133 |
+
1749883340.962351,26103,0.6330544948577881
|
| 134 |
+
1749883454.8910239,26203,0.6316843628883362
|
| 135 |
+
1749883568.748449,26303,0.6329068541526794
|
| 136 |
+
1749883682.626584,26403,0.6340925693511963
|
| 137 |
+
1749883796.266156,26503,0.634718120098114
|
| 138 |
+
1749883909.823321,26603,0.6345930695533752
|
| 139 |
+
1749884023.612312,26703,0.6305355429649353
|
| 140 |
+
1749884137.3909109,26803,0.6329914331436157
|
| 141 |
+
1749884250.964849,26903,0.6338057518005371
|
| 142 |
+
1749884364.620685,27003,0.6310195922851562
|
| 143 |
+
1749884478.288471,27103,0.6320237517356873
|
| 144 |
+
1749884591.921928,27203,0.6323473453521729
|
| 145 |
+
1749884705.4580832,27303,0.6338775753974915
|
| 146 |
+
1749884819.04635,27403,0.6349583268165588
|
| 147 |
+
1749884932.5079958,27503,0.6327939629554749
|
| 148 |
+
1749885045.950988,27603,0.6340410709381104
|
| 149 |
+
1749885159.388893,27703,0.6335666179656982
|
| 150 |
+
1749885272.804197,27803,0.6340423226356506
|
| 151 |
+
1749885386.316233,27903,0.6334601640701294
|
| 152 |
+
1749885499.822531,28003,0.63486647605896
|
| 153 |
+
1749886221.504969,28116,0.6372697353363037
|
| 154 |
+
1749886334.385321,28216,0.6380821466445923
|
| 155 |
+
1749886448.043535,28316,0.6341930627822876
|
| 156 |
+
1749886566.996864,28416,0.6381385326385498
|
| 157 |
+
1749886683.145211,28516,0.6390631198883057
|
| 158 |
+
1749886801.945816,28616,0.6362524628639221
|
| 159 |
+
1749886917.753129,28716,0.6383652091026306
|
| 160 |
+
1749887031.577143,28816,0.6372789144515991
|
| 161 |
+
1749887145.237945,28916,0.6368517875671387
|
| 162 |
+
1749887259.142705,29016,0.6355586051940918
|
| 163 |
+
1749887372.8702788,29116,0.6365960836410522
|
| 164 |
+
1749887486.513908,29216,0.6369326710700989
|
| 165 |
+
1749887600.1771169,29316,0.6356692314147949
|
| 166 |
+
1749887714.078214,29416,0.6381360292434692
|
| 167 |
+
1749887827.802154,29516,0.6395895481109619
|
| 168 |
+
1749887941.250756,29616,0.6351495981216431
|
| 169 |
+
1749888054.684053,29716,0.6337200403213501
|
| 170 |
+
1749888168.055861,29816,0.6380612850189209
|
| 171 |
+
1749888281.437411,29916,0.6367456912994385
|
| 172 |
+
1749888395.104567,30016,0.6368082165718079
|
| 173 |
+
1749888508.717651,30116,0.637988269329071
|
| 174 |
+
1749888622.26287,30216,0.6383621692657471
|
| 175 |
+
1749888735.716015,30316,0.6390760540962219
|
| 176 |
+
1749888849.314946,30416,0.6382609009742737
|
| 177 |
+
1749888962.671913,30516,0.6376262903213501
|
| 178 |
+
1749889075.962262,30616,0.6375894546508789
|
| 179 |
+
1749889189.209306,30716,0.6379967927932739
|
| 180 |
+
1749889302.4496808,30816,0.6370212435722351
|
| 181 |
+
1749889415.703003,30916,0.6353185772895813
|
| 182 |
+
1749889528.950649,31016,0.6387556791305542
|
| 183 |
+
1749889642.150322,31116,0.6378328204154968
|
| 184 |
+
1749890378.956782,31229,0.6372666358947754
|
| 185 |
+
1749890505.625979,31329,0.6375833749771118
|
| 186 |
+
1749890618.191977,31429,0.6399643421173096
|
| 187 |
+
1749890731.541603,31529,0.6371794939041138
|
| 188 |
+
1749890844.967264,31629,0.6412948369979858
|
| 189 |
+
1749890958.658777,31729,0.6381122469902039
|
| 190 |
+
1749891071.891078,31829,0.6379870772361755
|
| 191 |
+
1749891184.9368472,31929,0.636507511138916
|
| 192 |
+
1749891298.383119,32029,0.6393314599990845
|
| 193 |
+
1749891411.6084638,32129,0.6372302770614624
|
| 194 |
+
1749891524.8479302,32229,0.6382989883422852
|
| 195 |
+
1749891638.000088,32329,0.6384246349334717
|
| 196 |
+
1749891750.925784,32429,0.642183244228363
|
| 197 |
+
1749891863.9485052,32529,0.6387585401535034
|
| 198 |
+
1749891976.981396,32629,0.6382800936698914
|
| 199 |
+
1749892090.007157,32729,0.6391066908836365
|
| 200 |
+
1749892204.878405,32829,0.6372695565223694
|
| 201 |
+
1749892318.016267,32929,0.6377881765365601
|
| 202 |
+
1749892431.1136398,33029,0.6381900310516357
|
| 203 |
+
1749892544.650914,33129,0.6379098296165466
|
| 204 |
+
1749892658.351629,33229,0.6390986442565918
|
| 205 |
+
1749892771.5412412,33329,0.6394632458686829
|
| 206 |
+
1749892884.5655081,33429,0.638789176940918
|
| 207 |
+
1749892997.703425,33529,0.6404674649238586
|
| 208 |
+
1749893113.111906,33629,0.6377144455909729
|
| 209 |
+
1749893226.4987352,33729,0.6403603553771973
|
| 210 |
+
1749893339.7236722,33829,0.6399890184402466
|
| 211 |
+
1749893452.709901,33929,0.6386091113090515
|
| 212 |
+
1749893565.764252,34029,0.638846218585968
|
| 213 |
+
1749893678.8613818,34129,0.6392867565155029
|
| 214 |
+
1749893793.811114,34229,0.6415501236915588
|
| 215 |
+
1749894523.4330702,34342,0.6422275900840759
|
| 216 |
+
1749894635.668705,34442,0.6422102451324463
|
| 217 |
+
1749894748.496078,34542,0.6423694491386414
|
| 218 |
+
1749894862.105796,34642,0.6409814953804016
|
| 219 |
+
1749894975.7232609,34742,0.6424775123596191
|
| 220 |
+
1749895088.827214,34842,0.6412819027900696
|
| 221 |
+
1749895201.8739011,34942,0.6421397924423218
|
| 222 |
+
1749895315.383924,35042,0.6428180932998657
|
| 223 |
+
1749895428.870367,35142,0.6411274671554565
|
| 224 |
+
1749895542.779712,35242,0.6404625773429871
|
| 225 |
+
1749895655.7400692,35342,0.6413607597351074
|
| 226 |
+
1749895768.754721,35442,0.6396623253822327
|
| 227 |
+
1749895881.675998,35542,0.6398554444313049
|
| 228 |
+
1749895994.7287948,35642,0.6418105363845825
|
| 229 |
+
1749896107.590622,35742,0.6404362320899963
|
| 230 |
+
1749896220.531776,35842,0.6405752301216125
|
| 231 |
+
1749896334.0023758,35942,0.6391710638999939
|
| 232 |
+
1749896447.1242828,36042,0.639792263507843
|
| 233 |
+
1749896560.152203,36142,0.6422731280326843
|
| 234 |
+
1749896673.189848,36242,0.6405914425849915
|
| 235 |
+
1749896786.198764,36342,0.641568124294281
|
| 236 |
+
1749896899.2863219,36442,0.6413493156433105
|
| 237 |
+
1749897012.305343,36542,0.6412314772605896
|
| 238 |
+
1749897125.3817248,36642,0.6421323418617249
|
| 239 |
+
1749897238.33933,36742,0.6419852375984192
|
| 240 |
+
1749897351.259258,36842,0.6420484185218811
|
| 241 |
+
1749897465.7836952,36942,0.641920268535614
|
| 242 |
+
1749897578.788863,37042,0.640053927898407
|
| 243 |
+
1749897692.2813172,37142,0.6422847509384155
|
| 244 |
+
1749897805.190093,37242,0.642421543598175
|
| 245 |
+
1749897919.005985,37342,0.6421249508857727
|
| 246 |
+
1749898639.5689752,37455,0.6431224942207336
|
| 247 |
+
1749898751.988163,37555,0.6455073356628418
|
| 248 |
+
1749898864.6416771,37655,0.6462562084197998
|
| 249 |
+
1749898977.266717,37755,0.6433696746826172
|
| 250 |
+
1749899094.0932312,37855,0.6439356803894043
|
| 251 |
+
1749899207.426185,37955,0.6446089148521423
|
| 252 |
+
1749899321.114516,38055,0.6461760997772217
|
| 253 |
+
1749899435.41614,38155,0.6425666809082031
|
| 254 |
+
1749899548.3957949,38255,0.6415822505950928
|
| 255 |
+
1749899661.12898,38355,0.6433603763580322
|
| 256 |
+
1749899773.907331,38455,0.6439739465713501
|
| 257 |
+
1749899887.206593,38555,0.6431937217712402
|
| 258 |
+
1749900000.022349,38655,0.6433701515197754
|
| 259 |
+
1749900112.838345,38755,0.6402089595794678
|
| 260 |
+
1749900225.587293,38855,0.6431488990783691
|
| 261 |
+
1749900338.40116,38955,0.6427022814750671
|
| 262 |
+
1749900451.419723,39055,0.642128050327301
|
| 263 |
+
1749900564.1494222,39155,0.6431225538253784
|
| 264 |
+
1749900676.855479,39255,0.6424416303634644
|
| 265 |
+
1749900789.689708,39355,0.6426573395729065
|
| 266 |
+
1749900902.666292,39455,0.6421151161193848
|
| 267 |
+
1749901015.986363,39555,0.6435821652412415
|
| 268 |
+
1749901129.7744472,39655,0.6438963413238525
|
| 269 |
+
1749901242.9367309,39755,0.641447901725769
|
| 270 |
+
1749901355.883141,39855,0.6451898813247681
|
| 271 |
+
1749901468.810418,39955,0.6418604850769043
|
| 272 |
+
1749901581.483256,40055,0.6420018672943115
|
| 273 |
+
1749901694.195169,40155,0.6414068341255188
|
| 274 |
+
1749901807.6175969,40255,0.6405832171440125
|
| 275 |
+
1749901928.495046,40355,0.6415613293647766
|
| 276 |
+
1749902049.321397,40455,0.6413848400115967
|
| 277 |
+
1749902758.697643,40568,0.6457716822624207
|
| 278 |
+
1749902871.2450671,40668,0.646309494972229
|
| 279 |
+
1749902983.9136572,40768,0.6463437676429749
|
| 280 |
+
1749903096.5065012,40868,0.6450349688529968
|
| 281 |
+
1749903209.1087449,40968,0.6446881294250488
|
| 282 |
+
1749903321.672395,41068,0.644142746925354
|
| 283 |
+
1749903434.1538491,41168,0.6432740092277527
|
| 284 |
+
1749903546.677047,41268,0.6437448859214783
|
| 285 |
+
1749903663.135898,41368,0.6455336213111877
|
| 286 |
+
1749903775.808806,41468,0.6436740756034851
|
| 287 |
+
1749903888.313575,41568,0.6446691751480103
|
| 288 |
+
1749904000.9246452,41668,0.6452683806419373
|
| 289 |
+
1749904113.586472,41768,0.6439473032951355
|
| 290 |
+
1749904226.149442,41868,0.6451581716537476
|
| 291 |
+
1749904338.615375,41968,0.6452315449714661
|
| 292 |
+
1749904451.142393,42068,0.6444404721260071
|
| 293 |
+
1749904563.662851,42168,0.6423818469047546
|
| 294 |
+
1749904678.6194618,42268,0.6442590355873108
|
| 295 |
+
1749904791.92433,42368,0.6440293788909912
|
| 296 |
+
1749904905.461743,42468,0.6425467729568481
|
| 297 |
+
1749905018.109602,42568,0.6436562538146973
|
| 298 |
+
1749905130.8406038,42668,0.6423113346099854
|
| 299 |
+
1749905243.4874198,42768,0.6433395147323608
|
| 300 |
+
1749905356.34921,42868,0.6426182389259338
|
| 301 |
+
1749905469.8472939,42968,0.6429547071456909
|
| 302 |
+
1749905582.4856489,43068,0.6445577144622803
|
| 303 |
+
1749905700.7709599,43168,0.6432076096534729
|
| 304 |
+
1749905813.246316,43268,0.6452598571777344
|
| 305 |
+
1749905925.7469351,43368,0.6450968980789185
|
| 306 |
+
1749906038.568206,43468,0.6417151093482971
|
| 307 |
+
1749906152.325112,43568,0.6453571319580078
|
| 308 |
+
1749906869.613624,43681,0.6455939412117004
|
| 309 |
+
1749906981.776615,43781,0.6470116376876831
|
| 310 |
+
1749907094.291646,43881,0.6461580395698547
|
| 311 |
+
1749907206.7288609,43981,0.6449430584907532
|
| 312 |
+
1749907319.248631,44081,0.6480114459991455
|
| 313 |
+
1749907431.68859,44181,0.6469368934631348
|
| 314 |
+
1749907544.137596,44281,0.6462560296058655
|
| 315 |
+
1749907656.623639,44381,0.6483475565910339
|
| 316 |
+
1749907769.0745878,44481,0.6476715207099915
|
| 317 |
+
1749907881.511359,44581,0.6467322707176208
|
| 318 |
+
1749907993.944629,44681,0.6418707370758057
|
| 319 |
+
1749908106.3325398,44781,0.6422119736671448
|
| 320 |
+
1749908218.755035,44881,0.6456181406974792
|
| 321 |
+
1749908332.6752698,44981,0.6438400149345398
|
| 322 |
+
1749908445.41487,45081,0.6453412771224976
|
| 323 |
+
1749908561.7160451,45181,0.6472585201263428
|
| 324 |
+
1749908674.349626,45281,0.6463475227355957
|
| 325 |
+
1749908787.409377,45381,0.6437843441963196
|
| 326 |
+
1749908900.228266,45481,0.6451470255851746
|
| 327 |
+
1749909012.759907,45581,0.6464168429374695
|
| 328 |
+
1749909125.33191,45681,0.6457372307777405
|
| 329 |
+
1749909238.6822069,45781,0.6420699954032898
|
| 330 |
+
1749909353.055778,45881,0.6455863118171692
|
| 331 |
+
1749909465.583371,45981,0.6430723071098328
|
| 332 |
+
1749909578.791123,46081,0.6466464400291443
|
| 333 |
+
1749909695.810501,46181,0.6415648460388184
|
| 334 |
+
1749909816.994015,46281,0.6458131670951843
|
| 335 |
+
1749909931.178684,46381,0.6433743834495544
|
| 336 |
+
1749910044.3576698,46481,0.6447113156318665
|
| 337 |
+
1749910156.936951,46581,0.6447990536689758
|
| 338 |
+
1749910269.43704,46681,0.643962562084198
|
| 339 |
+
1749910985.444436,46794,0.6470857858657837
|
| 340 |
+
1749911097.5499258,46894,0.6493186950683594
|
| 341 |
+
1749911209.9833422,46994,0.6470249891281128
|
| 342 |
+
1749911323.2898078,47094,0.6450704336166382
|
| 343 |
+
1749911435.872323,47194,0.6471397876739502
|
| 344 |
+
1749911548.425044,47294,0.6435807943344116
|
| 345 |
+
1749911660.932256,47394,0.6473038196563721
|
| 346 |
+
1749911776.825709,47494,0.6473247408866882
|
| 347 |
+
1749911900.3222861,47594,0.6455411314964294
|
| 348 |
+
1749912026.3668249,47694,0.6466323137283325
|
| 349 |
+
1749912157.162115,47794,0.6445478200912476
|
| 350 |
+
1749912274.590166,47894,0.646281898021698
|
| 351 |
+
1749912391.31292,47994,0.6441708207130432
|
| 352 |
+
1749912505.9149718,48094,0.6465533375740051
|
| 353 |
+
1749912619.9158368,48194,0.6457281708717346
|
| 354 |
+
1749912733.845898,48294,0.6469725966453552
|
| 355 |
+
1749912848.251153,48394,0.6476579308509827
|
| 356 |
+
1749912962.486568,48494,0.6469281911849976
|
| 357 |
+
1749913083.380466,48594,0.6445661187171936
|
| 358 |
+
1749913213.3275251,48694,0.6448878645896912
|
| 359 |
+
1749913327.168751,48794,0.6478087902069092
|
| 360 |
+
1749913445.553847,48894,0.6467218995094299
|
| 361 |
+
1749913562.00287,48994,0.6449093818664551
|
| 362 |
+
1749913676.5021539,49094,0.6456226110458374
|
| 363 |
+
1749913865.943388,49194,0.6480283141136169
|
| 364 |
+
1749913978.782634,49294,0.644949734210968
|
| 365 |
+
1749914091.5461361,49394,0.6477023363113403
|
| 366 |
+
1749914205.0430741,49494,0.6454227566719055
|
| 367 |
+
1749914317.857575,49594,0.6468780636787415
|
| 368 |
+
1749914436.420907,49694,0.6437995433807373
|
| 369 |
+
1749914548.4780428,49794,0.6442689895629883
|
| 370 |
+
1749915254.64979,49907,0.6468920707702637
|
| 371 |
+
1749915366.522047,50007,0.6443278193473816
|
| 372 |
+
1749915479.121721,50107,0.64507657289505
|
| 373 |
+
1749915591.986209,50207,0.6475539207458496
|
| 374 |
+
1749915705.132767,50307,0.6458376049995422
|
| 375 |
+
1749915818.162699,50407,0.6470894813537598
|
| 376 |
+
1749915931.2429461,50507,0.6459890007972717
|
| 377 |
+
1749916044.412135,50607,0.6449429988861084
|
| 378 |
+
1749916157.798768,50707,0.6434136033058167
|
| 379 |
+
1749916270.845293,50807,0.6452168822288513
|
| 380 |
+
1749916385.228409,50907,0.6414031982421875
|
| 381 |
+
1749916498.514762,51007,0.6429454684257507
|
| 382 |
+
1749916610.5736582,51107,0.645865797996521
|
| 383 |
+
1749916722.826114,51207,0.6456354260444641
|
| 384 |
+
1749916835.510131,51307,0.6453517079353333
|
| 385 |
+
1749916948.500181,51407,0.6425925493240356
|
| 386 |
+
1749917061.547553,51507,0.6441556215286255
|
| 387 |
+
1749917175.452366,51607,0.6438621282577515
|
| 388 |
+
1749917289.691377,51707,0.6460551619529724
|
| 389 |
+
1749917402.7212632,51807,0.6445820927619934
|
| 390 |
+
1749917515.7485409,51907,0.6466335654258728
|
| 391 |
+
1749917628.76803,52007,0.6439675092697144
|
| 392 |
+
1749917741.809431,52107,0.6454479098320007
|
| 393 |
+
1749917854.9519842,52207,0.6439056396484375
|
| 394 |
+
1749917968.173187,52307,0.643708348274231
|
| 395 |
+
1749918081.338985,52407,0.6449944972991943
|
| 396 |
+
1749918194.488252,52507,0.6446917653083801
|
| 397 |
+
1749918307.705295,52607,0.6453357934951782
|
| 398 |
+
1749918420.946692,52707,0.6437267065048218
|
| 399 |
+
1749918534.1307259,52807,0.6483670473098755
|
| 400 |
+
1749918647.290252,52907,0.6452763676643372
|
| 401 |
+
1749919349.8012888,53020,0.6482431888580322
|
| 402 |
+
1749919462.727903,53120,0.6488265991210938
|
| 403 |
+
1749919575.7412622,53220,0.6478548049926758
|
| 404 |
+
1749919688.842592,53320,0.6473872661590576
|
| 405 |
+
1749919801.817652,53420,0.647352933883667
|
| 406 |
+
1749919914.7892962,53520,0.6466537714004517
|
| 407 |
+
1749920027.768988,53620,0.6455276012420654
|
| 408 |
+
1749920141.053708,53720,0.6472206115722656
|
| 409 |
+
1749920254.0534441,53820,0.6466146111488342
|
| 410 |
+
1749920367.0409348,53920,0.6457653045654297
|
| 411 |
+
1749920479.953151,54020,0.6479485034942627
|
| 412 |
+
1749920592.791873,54120,0.6481966972351074
|
| 413 |
+
1749920705.670143,54220,0.647784948348999
|
| 414 |
+
1749920818.720621,54320,0.6458646059036255
|
| 415 |
+
1749920931.794077,54420,0.6477463245391846
|
| 416 |
+
1749921045.317086,54520,0.6461666822433472
|
| 417 |
+
1749921159.993518,54620,0.6487022042274475
|
| 418 |
+
1749921273.078507,54720,0.6468180418014526
|
| 419 |
+
1749921386.2036648,54820,0.6433663964271545
|
| 420 |
+
1749921499.324935,54920,0.6466029286384583
|
| 421 |
+
1749921612.4671302,55020,0.6448517441749573
|
| 422 |
+
1749921725.574382,55120,0.6463658213615417
|
| 423 |
+
1749921838.710418,55220,0.645363986492157
|
| 424 |
+
1749921951.791944,55320,0.6455214619636536
|
| 425 |
+
1749922065.369843,55420,0.6459031701087952
|
| 426 |
+
1749922178.5358238,55520,0.6457732915878296
|
| 427 |
+
1749922291.767361,55620,0.6463260054588318
|
| 428 |
+
1749922404.942503,55720,0.6452855467796326
|
| 429 |
+
1749922518.24964,55820,0.6466127634048462
|
| 430 |
+
1749922631.665324,55920,0.6458290219306946
|
| 431 |
+
1749922745.3478122,56020,0.6472273468971252
|
| 432 |
+
1749923447.429276,56133,0.6463302373886108
|
| 433 |
+
1749923561.056462,56233,0.6482003927230835
|
| 434 |
+
1749923673.753302,56333,0.6483168005943298
|
| 435 |
+
1749923786.9378948,56433,0.6495551466941833
|
| 436 |
+
1749923900.16594,56533,0.6490747332572937
|
| 437 |
+
1749924013.5890381,56633,0.6481636166572571
|
| 438 |
+
1749924127.013432,56733,0.6470484137535095
|
| 439 |
+
1749924240.250842,56833,0.6492579579353333
|
| 440 |
+
1749924353.401773,56933,0.6456274390220642
|
| 441 |
+
1749924466.8444622,57033,0.6473854184150696
|
| 442 |
+
1749924580.147986,57133,0.6476078629493713
|
| 443 |
+
1749924693.80851,57233,0.6493817567825317
|
| 444 |
+
1749924809.654303,57333,0.6484301686286926
|
| 445 |
+
1749924924.456167,57433,0.6469117403030396
|
| 446 |
+
1749925038.634381,57533,0.6482880115509033
|
| 447 |
+
1749925152.2785192,57633,0.6481899619102478
|
| 448 |
+
1749925265.869144,57733,0.64661705493927
|
| 449 |
+
1749925379.272862,57833,0.6464669108390808
|
| 450 |
+
1749925491.8101661,57933,0.6488008499145508
|
| 451 |
+
1749925604.6834948,58033,0.6441654562950134
|
| 452 |
+
1749925717.869805,58133,0.6466636061668396
|
| 453 |
+
1749925831.2332642,58233,0.6475287675857544
|
| 454 |
+
1749925944.81216,58333,0.6480196118354797
|
| 455 |
+
1749926058.425607,58433,0.6473321318626404
|
| 456 |
+
1749926172.1372879,58533,0.6467843055725098
|
| 457 |
+
1749926285.852811,58633,0.6482695937156677
|
| 458 |
+
1749926400.246176,58733,0.64788419008255
|
| 459 |
+
1749926514.2801862,58833,0.6464325785636902
|
| 460 |
+
1749926628.411781,58933,0.6449589729309082
|
| 461 |
+
1749926742.487724,59033,0.6453645825386047
|
| 462 |
+
1749926856.6434848,59133,0.6469466686248779
|
| 463 |
+
1749927559.5214038,59246,0.6503404378890991
|
| 464 |
+
1749927672.020673,59346,0.6473168134689331
|
| 465 |
+
1749927785.250925,59446,0.6481862664222717
|
| 466 |
+
1749927898.710655,59546,0.649619460105896
|
| 467 |
+
1749928012.247091,59646,0.6465483903884888
|
| 468 |
+
1749928126.010963,59746,0.649186909198761
|
| 469 |
+
1749928239.80563,59846,0.6484472751617432
|
| 470 |
+
1749928353.680489,59946,0.6479632258415222
|
| 471 |
+
1749928467.482208,60046,0.64899080991745
|
| 472 |
+
1749928581.240331,60146,0.6511568427085876
|
| 473 |
+
1749928695.511598,60246,0.6487745046615601
|
| 474 |
+
1749928811.3063512,60346,0.6479558944702148
|
| 475 |
+
1749928925.040396,60446,0.6488529443740845
|
| 476 |
+
1749929038.6185012,60546,0.6495551466941833
|
| 477 |
+
1749929152.2053819,60646,0.6491721868515015
|
| 478 |
+
1749929265.851369,60746,0.6482187509536743
|
| 479 |
+
1749929379.502666,60846,0.6466194987297058
|
| 480 |
+
1749929493.5331898,60946,0.6470288038253784
|
| 481 |
+
1749929607.4053981,61046,0.6479650735855103
|
| 482 |
+
1749929721.085204,61146,0.6456948518753052
|
| 483 |
+
1749929834.74776,61246,0.6463167667388916
|
| 484 |
+
1749929948.555677,61346,0.6469117403030396
|
| 485 |
+
1749930063.395309,61446,0.6496709585189819
|
| 486 |
+
1749930177.1792881,61546,0.6468149423599243
|
| 487 |
+
1749930291.206655,61646,0.6464289426803589
|
| 488 |
+
1749930405.0233212,61746,0.6476335525512695
|
| 489 |
+
1749930519.116152,61846,0.6473082304000854
|
| 490 |
+
1749930633.092383,61946,0.6477475762367249
|
| 491 |
+
1749930746.959587,62046,0.645340085029602
|
| 492 |
+
1749930860.721993,62146,0.6493327021598816
|
| 493 |
+
1749930974.7665339,62246,0.6471899747848511
|
| 494 |
+
1749931678.899295,62359,0.648399293422699
|
| 495 |
+
1749931792.6391659,62459,0.6488786935806274
|
| 496 |
+
1749931906.5278468,62559,0.6509184837341309
|
| 497 |
+
1749932020.4616919,62659,0.6484221816062927
|
| 498 |
+
1749932134.672566,62759,0.6522690057754517
|
| 499 |
+
1749932248.6759748,62859,0.6484540700912476
|
| 500 |
+
1749932362.711755,62959,0.6484025716781616
|
| 501 |
+
1749932476.661569,63059,0.6470717191696167
|
| 502 |
+
1749932590.790066,63159,0.6500177383422852
|
| 503 |
+
1749932704.8376472,63259,0.6474350690841675
|
| 504 |
+
1749932819.889387,63359,0.6481654644012451
|
| 505 |
+
1749932936.8226,63459,0.6483504772186279
|
| 506 |
+
1749933051.687947,63559,0.6519791483879089
|
| 507 |
+
1749933165.7263381,63659,0.6482064723968506
|
| 508 |
+
1749933279.444386,63759,0.6480998992919922
|
| 509 |
+
1749933393.378773,63859,0.6485667824745178
|
| 510 |
+
1749933507.367212,63959,0.6471758484840393
|
| 511 |
+
1749933623.911954,64059,0.6477560997009277
|
| 512 |
+
1749933738.056524,64159,0.6477352976799011
|
| 513 |
+
1749933852.191925,64259,0.6470110416412354
|
| 514 |
+
1749933966.306999,64359,0.6480882167816162
|
| 515 |
+
1749934080.359732,64459,0.6488682627677917
|
| 516 |
+
1749934194.425351,64559,0.6470747590065002
|
| 517 |
+
1749934308.54075,64659,0.6494074463844299
|
| 518 |
+
1749934422.480662,64759,0.6468547582626343
|
| 519 |
+
1749934536.808921,64859,0.6491286754608154
|
| 520 |
+
1749934650.823456,64959,0.648867666721344
|
| 521 |
+
1749934764.7258089,65059,0.6472849249839783
|
| 522 |
+
1749934878.7077188,65159,0.6478468179702759
|
| 523 |
+
1749934992.883969,65259,0.6479583382606506
|
| 524 |
+
1749935106.933438,65359,0.6497653126716614
|
| 525 |
+
1749935813.731499,65472,0.6506868600845337
|
| 526 |
+
1749935927.942222,65572,0.6503027081489563
|
| 527 |
+
1749936041.607051,65672,0.6512806415557861
|
| 528 |
+
1749936155.296227,65772,0.6489803791046143
|
| 529 |
+
1749936270.8974578,65872,0.6508885025978088
|
| 530 |
+
1749936385.587658,65972,0.6498168110847473
|
| 531 |
+
1749936499.060506,66072,0.6503278017044067
|
| 532 |
+
1749936612.441948,66172,0.6502352952957153
|
| 533 |
+
1749936726.748907,66272,0.6489626169204712
|
| 534 |
+
1749936841.875183,66372,0.6480649709701538
|
| 535 |
+
1749936956.4900322,66472,0.6499338150024414
|
| 536 |
+
1749937069.998561,66572,0.6472322344779968
|
| 537 |
+
1749937183.7017019,66672,0.6475245356559753
|
| 538 |
+
1749937298.516124,66772,0.6491470336914062
|
| 539 |
+
1749937411.92944,66872,0.647495687007904
|
| 540 |
+
1749937526.436012,66972,0.6476641893386841
|
| 541 |
+
1749937641.158314,67072,0.6466764807701111
|
| 542 |
+
1749937756.0765388,67172,0.6474534273147583
|
| 543 |
+
1749937871.194944,67272,0.6498247385025024
|
| 544 |
+
1749937986.103464,67372,0.647672176361084
|
| 545 |
+
1749938111.256961,67472,0.6493241190910339
|
| 546 |
+
1749938234.540023,67572,0.649218738079071
|
| 547 |
+
1749938358.600482,67672,0.6481427550315857
|
| 548 |
+
1749938482.183712,67772,0.6489521861076355
|
| 549 |
+
1749938604.988889,67872,0.6490012407302856
|
| 550 |
+
1749938730.016806,67972,0.6490533351898193
|
| 551 |
+
1749938855.713066,68072,0.6489706039428711
|
| 552 |
+
1749938984.016691,68172,0.6467726826667786
|
| 553 |
+
1749939111.805879,68272,0.6495416760444641
|
| 554 |
+
1749939237.4458308,68372,0.6492695808410645
|
| 555 |
+
1749939362.440269,68472,0.6486568450927734
|
| 556 |
+
1749940143.424747,68585,0.6501026153564453
|
| 557 |
+
1749940267.8671591,68685,0.6520465612411499
|
| 558 |
+
1749940406.536912,68785,0.65306556224823
|
| 559 |
+
1749940539.007791,68885,0.6500367522239685
|
| 560 |
+
1749940665.279575,68985,0.6502971649169922
|
| 561 |
+
1749940787.382014,69085,0.6517028212547302
|
| 562 |
+
1749940907.63042,69185,0.6525441408157349
|
| 563 |
+
1749941027.280397,69285,0.6494277119636536
|
| 564 |
+
1749941156.2273762,69385,0.6480973958969116
|
| 565 |
+
1749941287.1213481,69485,0.6496758460998535
|
| 566 |
+
1749941414.0066822,69585,0.6504148244857788
|
| 567 |
+
1749941543.8237681,69685,0.6497818827629089
|
| 568 |
+
1749941674.046098,69785,0.6492977738380432
|
| 569 |
+
1749941812.787934,69885,0.64699387550354
|
| 570 |
+
1749941962.015331,69985,0.6491678953170776
|
| 571 |
+
1749942102.411791,70085,0.6491384506225586
|
| 572 |
+
1749942233.3373299,70185,0.6481152176856995
|
| 573 |
+
1749942359.018425,70285,0.6494773030281067
|
| 574 |
+
1749942480.5869482,70385,0.6486470699310303
|
| 575 |
+
1749942601.3076742,70485,0.6487181186676025
|
| 576 |
+
1749942721.842447,70585,0.6483076214790344
|
| 577 |
+
1749942847.5958269,70685,0.6500759720802307
|
| 578 |
+
1749942983.723243,70785,0.6499319672584534
|
| 579 |
+
1749943115.342161,70885,0.6474846601486206
|
| 580 |
+
1749943244.810354,70985,0.6511532068252563
|
| 581 |
+
1749943381.720277,71085,0.6476777195930481
|
| 582 |
+
1749943509.7974699,71185,0.6476617455482483
|
| 583 |
+
1749943636.8749802,71285,0.6472451090812683
|
| 584 |
+
1749943769.3658261,71385,0.6470766067504883
|
| 585 |
+
1749943902.5753548,71485,0.6476035714149475
|
| 586 |
+
1749944037.523066,71585,0.6469583511352539
|
| 587 |
+
1749944826.5780659,71698,0.6512960195541382
|
| 588 |
+
1749944953.716961,71798,0.65183025598526
|
| 589 |
+
1749945082.082031,71898,0.6517236232757568
|
| 590 |
+
1749945210.61923,71998,0.6500888466835022
|
| 591 |
+
1749945339.124059,72098,0.6505330801010132
|
| 592 |
+
1749945468.641827,72198,0.6502751111984253
|
| 593 |
+
1749945598.1349041,72298,0.6493455767631531
|
| 594 |
+
1749945728.573417,72398,0.6493124961853027
|
| 595 |
+
1749945858.0623538,72498,0.6513002514839172
|
| 596 |
+
1749945982.3945642,72598,0.6496317386627197
|
| 597 |
+
1749946111.765118,72698,0.6507402062416077
|
| 598 |
+
1749946240.630443,72798,0.6511532068252563
|
| 599 |
+
1749946368.180806,72898,0.6491826176643372
|
| 600 |
+
1749946496.832103,72998,0.6513100266456604
|
| 601 |
+
1749946625.482293,73098,0.6511054039001465
|
| 602 |
+
1749946755.102677,73198,0.6496274471282959
|
| 603 |
+
1749946885.840373,73298,0.6478989124298096
|
| 604 |
+
1749947015.730135,73398,0.6493860483169556
|
| 605 |
+
1749947148.454241,73498,0.649440586566925
|
| 606 |
+
1749947279.728344,73598,0.6481335759162903
|
| 607 |
+
1749947410.7961621,73698,0.648591935634613
|
| 608 |
+
1749947532.051818,73798,0.6477193832397461
|
| 609 |
+
1749947658.5383391,73898,0.6488223075866699
|
| 610 |
+
1749947785.23248,73998,0.6482095718383789
|
| 611 |
+
1749947905.7281501,74098,0.6481219530105591
|
| 612 |
+
1749948037.10495,74198,0.649522066116333
|
| 613 |
+
1749948162.964204,74298,0.6486262083053589
|
| 614 |
+
1749948299.8906748,74398,0.649992048740387
|
| 615 |
+
1749948431.189439,74498,0.6464987993240356
|
| 616 |
+
1749948562.154502,74598,0.6457542777061462
|
| 617 |
+
1749948692.440773,74698,0.6492316126823425
|
| 618 |
+
1749949544.119091,74811,0.6499026417732239
|
| 619 |
+
1749949662.2882051,74911,0.651816189289093
|
| 620 |
+
1749949782.29321,75011,0.65162193775177
|
| 621 |
+
1749949903.6215732,75111,0.6497769355773926
|
| 622 |
+
1749950025.437519,75211,0.652581512928009
|
| 623 |
+
1749950147.172297,75311,0.6517457365989685
|
| 624 |
+
1749950269.3430872,75411,0.6511011123657227
|
| 625 |
+
1749950391.183779,75511,0.6535349488258362
|
| 626 |
+
1749950512.9859998,75611,0.6526746153831482
|
| 627 |
+
1749950634.5976489,75711,0.6515275835990906
|
| 628 |
+
1749950755.841614,75811,0.6468645930290222
|
| 629 |
+
1749950876.902978,75911,0.6469895839691162
|
| 630 |
+
1749950997.8276448,76011,0.6505018472671509
|
| 631 |
+
1749951118.90695,76111,0.6481991410255432
|
| 632 |
+
1749951242.0590239,76211,0.6500465869903564
|
| 633 |
+
1749951365.13945,76311,0.6522291898727417
|
| 634 |
+
1749951485.726495,76411,0.6512941122055054
|
| 635 |
+
1749951606.384018,76511,0.6485955715179443
|
| 636 |
+
1749951727.3418372,76611,0.6492347121238708
|
| 637 |
+
1749951848.91982,76711,0.6510809063911438
|
| 638 |
+
1749951970.4396238,76811,0.6507787704467773
|
| 639 |
+
1749952092.018361,76911,0.6465024352073669
|
| 640 |
+
1749952212.437438,77011,0.6503309011459351
|
| 641 |
+
1749952333.3358028,77111,0.6472941040992737
|
| 642 |
+
1749952454.374986,77211,0.6507310271263123
|
| 643 |
+
1749952577.957252,77311,0.646462619304657
|
| 644 |
+
1749952698.933182,77411,0.6501801609992981
|
| 645 |
+
1749952821.444906,77511,0.6480913162231445
|
| 646 |
+
1749952943.532354,77611,0.6492763757705688
|
| 647 |
+
1749953065.092947,77711,0.6491703391075134
|
| 648 |
+
1749953183.2942998,77811,0.6481268405914307
|
| 649 |
+
1749953919.8155909,77924,0.6514578461647034
|
| 650 |
+
1749954039.144916,78024,0.6536059975624084
|
| 651 |
+
1749954164.892725,78124,0.6516213417053223
|
| 652 |
+
1749954287.405833,78224,0.6496967077255249
|
| 653 |
+
1749954411.896708,78324,0.6509160399436951
|
| 654 |
+
1749954532.976285,78424,0.6475796699523926
|
| 655 |
+
1749954651.8212879,78524,0.6513621211051941
|
| 656 |
+
1749954768.340374,78624,0.6514209508895874
|
| 657 |
+
1749954889.928726,78724,0.6494865417480469
|
| 658 |
+
1749955013.7541099,78824,0.6507163047790527
|
| 659 |
+
1749955135.127798,78924,0.648713231086731
|
| 660 |
+
1749955250.329943,79024,0.6503958106040955
|
| 661 |
+
1749955368.0772521,79124,0.6481421589851379
|
| 662 |
+
1749955488.714318,79224,0.6509117484092712
|
| 663 |
+
1749955608.994308,79324,0.6496789455413818
|
| 664 |
+
1749955730.32185,79424,0.6509148478507996
|
| 665 |
+
1749955852.183527,79524,0.65162193775177
|
| 666 |
+
1749955982.130076,79624,0.6507677435874939
|
| 667 |
+
1749956105.558641,79724,0.6485790610313416
|
| 668 |
+
1749956225.0044448,79824,0.6489969491958618
|
| 669 |
+
1749956346.8952608,79924,0.6521139740943909
|
| 670 |
+
1749956467.616684,80024,0.6509135961532593
|
| 671 |
+
1749956589.9021041,80124,0.6493566036224365
|
| 672 |
+
1749956710.854453,80224,0.6497260928153992
|
| 673 |
+
1749956831.13642,80324,0.6498333215713501
|
| 674 |
+
1749956952.777001,80424,0.6474724411964417
|
| 675 |
+
1749957072.210849,80524,0.648578405380249
|
| 676 |
+
1749957191.451542,80624,0.6495128870010376
|
| 677 |
+
1749957314.737707,80724,0.6525374054908752
|
| 678 |
+
1749957435.151362,80824,0.6503774523735046
|
| 679 |
+
1749957557.393923,80924,0.6495735049247742
|
| 680 |
+
1749958292.639041,81037,0.6502798199653625
|
| 681 |
+
1749958412.06761,81137,0.6510465741157532
|
| 682 |
+
1749958532.686303,81237,0.651066780090332
|
| 683 |
+
1749958645.7889438,81337,0.6499913930892944
|
| 684 |
+
1749958759.571202,81437,0.6506991386413574
|
| 685 |
+
1749958874.801936,81537,0.6515716910362244
|
| 686 |
+
1749958991.03166,81637,0.649645209312439
|
| 687 |
+
1749959107.828541,81737,0.6534062623977661
|
| 688 |
+
1749959224.8999429,81837,0.6517438888549805
|
| 689 |
+
1749959342.0457559,81937,0.6472377181053162
|
| 690 |
+
1749959458.717472,82037,0.6487628817558289
|
| 691 |
+
1749959575.365946,82137,0.6506679058074951
|
| 692 |
+
1749959691.9944792,82237,0.6509387493133545
|
| 693 |
+
1749959809.0819032,82337,0.6492481827735901
|
| 694 |
+
1749959926.093535,82437,0.6509411931037903
|
| 695 |
+
1749960043.8178828,82537,0.6513333320617676
|
| 696 |
+
1749960161.549503,82637,0.650521457195282
|
| 697 |
+
1749960279.3088372,82737,0.6513921618461609
|
| 698 |
+
1749960397.345453,82837,0.6535490155220032
|
| 699 |
+
1749960515.7569332,82937,0.651286780834198
|
| 700 |
+
1749960634.0833302,83037,0.6500453352928162
|
| 701 |
+
1749960752.65109,83137,0.6486304998397827
|
| 702 |
+
1749960871.347281,83237,0.648453414440155
|
| 703 |
+
1749960990.1292732,83337,0.648758590221405
|
| 704 |
+
1749961108.411705,83437,0.6494736671447754
|
| 705 |
+
1749961226.271459,83537,0.6482904553413391
|
| 706 |
+
1749961343.8049831,83637,0.6508358120918274
|
| 707 |
+
1749961461.005431,83737,0.650747537612915
|
| 708 |
+
1749961577.844734,83837,0.6494197249412537
|
| 709 |
+
1749961694.582011,83937,0.6514062285423279
|
| 710 |
+
1749961811.7100859,84037,0.6520459651947021
|
| 711 |
+
1749962538.324168,84150,0.6520133018493652
|
| 712 |
+
1749962654.3846312,84250,0.6523498892784119
|
| 713 |
+
1749962770.139613,84350,0.6531066298484802
|
| 714 |
+
1749962886.967903,84450,0.6516783237457275
|
| 715 |
+
1749963002.7153668,84550,0.6499571204185486
|
| 716 |
+
1749963118.68018,84650,0.6509124040603638
|
| 717 |
+
1749963234.560016,84750,0.6497187614440918
|
| 718 |
+
1749963350.778174,84850,0.6517395973205566
|
| 719 |
+
1749963466.768766,84950,0.6517120003700256
|
| 720 |
+
1749963582.5068312,85050,0.6510888338088989
|
| 721 |
+
1749963697.9346209,85150,0.650672197341919
|
| 722 |
+
1749963813.328256,85250,0.648465096950531
|
| 723 |
+
1749963928.9572961,85350,0.6514068841934204
|
| 724 |
+
1749964044.29373,85450,0.6517898440361023
|
| 725 |
+
1749964159.5894961,85550,0.6503884792327881
|
| 726 |
+
1749964275.069377,85650,0.6498480439186096
|
| 727 |
+
1749964390.285314,85750,0.6514013409614563
|
| 728 |
+
1749964505.738982,85850,0.6507444977760315
|
| 729 |
+
1749964620.9884312,85950,0.6518247723579407
|
| 730 |
+
1749964736.0891361,86050,0.6507181525230408
|
| 731 |
+
1749964851.121147,86150,0.6493345499038696
|
| 732 |
+
1749964966.152081,86250,0.651191771030426
|
| 733 |
+
1749965081.159734,86350,0.6515423059463501
|
| 734 |
+
1749965196.013607,86450,0.6494301557540894
|
| 735 |
+
1749965310.919723,86550,0.6503309011459351
|
| 736 |
+
1749965425.7250419,86650,0.6484840512275696
|
| 737 |
+
1749965540.5652092,86750,0.6490238904953003
|
| 738 |
+
1749965655.315326,86850,0.6500490307807922
|
| 739 |
+
1749965770.049461,86950,0.6497481465339661
|
| 740 |
+
1749965884.834232,87050,0.6497408151626587
|
| 741 |
+
1749965999.8417149,87150,0.6487524509429932
|
| 742 |
+
1749966711.9277241,87263,0.6526259779930115
|
| 743 |
+
1749966826.452731,87363,0.6557187438011169
|
| 744 |
+
1749966941.486036,87463,0.6512144804000854
|
| 745 |
+
1749967056.0634692,87563,0.652887225151062
|
| 746 |
+
1749967171.025635,87663,0.6515324711799622
|
| 747 |
+
1749967285.560586,87763,0.6503400802612305
|
| 748 |
+
1749967400.0741189,87863,0.6521697044372559
|
| 749 |
+
1749967514.531109,87963,0.6512916684150696
|
| 750 |
+
1749967628.8952188,88063,0.6526513695716858
|
| 751 |
+
1749967743.326265,88163,0.6508504748344421
|
| 752 |
+
1749967857.77909,88263,0.6511495113372803
|
| 753 |
+
1749967972.186653,88363,0.6517524719238281
|
| 754 |
+
1749968086.6080792,88463,0.6508412957191467
|
| 755 |
+
1749968201.192553,88563,0.6497830748558044
|
| 756 |
+
1749968315.548358,88663,0.6478333473205566
|
| 757 |
+
1749968429.965076,88763,0.6519607901573181
|
| 758 |
+
1749968544.336524,88863,0.6503326892852783
|
| 759 |
+
1749968658.703823,88963,0.6507499814033508
|
| 760 |
+
1749968773.079833,89063,0.6491066217422485
|
| 761 |
+
1749968887.395246,89163,0.6507052779197693
|
| 762 |
+
1749969001.696439,89263,0.6486783027648926
|
| 763 |
+
1749969115.9041128,89363,0.6508688926696777
|
| 764 |
+
1749969230.1204438,89463,0.649645209312439
|
| 765 |
+
1749969344.436378,89563,0.6521825790405273
|
| 766 |
+
1749969458.747808,89663,0.6497873663902283
|
| 767 |
+
1749969572.979925,89763,0.6509847044944763
|
| 768 |
+
1749969687.2085052,89863,0.6497610211372375
|
| 769 |
+
1749969801.430084,89963,0.6509503722190857
|
| 770 |
+
1749969916.392996,90063,0.6496807336807251
|
| 771 |
+
1749970033.5500932,90163,0.6488866209983826
|
| 772 |
+
1749970147.7666628,90263,0.6514172554016113
|
| 773 |
+
1749970854.433412,90376,0.6510876417160034
|
| 774 |
+
1749970968.305205,90476,0.6535937786102295
|
| 775 |
+
1749971082.257281,90576,0.6536029577255249
|
| 776 |
+
1749971196.191396,90676,0.6521390676498413
|
| 777 |
+
1749971310.241806,90776,0.6517383456230164
|
| 778 |
+
1749971424.201389,90876,0.6515649557113647
|
| 779 |
+
1749971538.050328,90976,0.6490882635116577
|
| 780 |
+
1749971652.028349,91076,0.6509369015693665
|
| 781 |
+
1749971765.8967092,91176,0.6530330777168274
|
| 782 |
+
1749971879.7971148,91276,0.6518627405166626
|
| 783 |
+
1749971993.616958,91376,0.6540888547897339
|
| 784 |
+
1749972107.447367,91476,0.6501213312149048
|
| 785 |
+
1749972221.310951,91576,0.6517695784568787
|
| 786 |
+
1749972335.184167,91676,0.6498695015907288
|
| 787 |
+
1749972449.0105689,91776,0.6509405374526978
|
| 788 |
+
1749972562.8870301,91876,0.649412989616394
|
| 789 |
+
1749972676.74808,91976,0.6519399285316467
|
| 790 |
+
1749972790.527348,92076,0.6517003774642944
|
| 791 |
+
1749972904.268828,92176,0.6503774523735046
|
| 792 |
+
1749973018.014807,92276,0.6529123783111572
|
| 793 |
+
1749973131.713702,92376,0.6517420411109924
|
| 794 |
+
1749973245.464949,92476,0.648855984210968
|
| 795 |
+
1749973359.115923,92576,0.650745689868927
|
| 796 |
+
1749973472.805828,92676,0.6503241658210754
|
| 797 |
+
1749973587.9607658,92776,0.6507254838943481
|
| 798 |
+
1749973701.850806,92876,0.6507604122161865
|
| 799 |
+
1749973817.199614,92976,0.6514717936515808
|
| 800 |
+
1749973932.78748,93076,0.6498952507972717
|
| 801 |
+
1749974046.669132,93176,0.6498903036117554
|
| 802 |
+
1749974160.730736,93276,0.6485962271690369
|
| 803 |
+
1749974274.685258,93376,0.6504834294319153
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv
ADDED
|
@@ -0,0 +1,803 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749862029.663543,99,0.37294554710388184
|
| 3 |
+
1749862156.9849339,199,0.44629713892936707
|
| 4 |
+
1749862283.002182,299,0.47867053747177124
|
| 5 |
+
1749862404.551185,399,0.49936822056770325
|
| 6 |
+
1749862523.688343,499,0.5137384533882141
|
| 7 |
+
1749862647.099201,599,0.5275991559028625
|
| 8 |
+
1749863398.260582,722,0.5418893694877625
|
| 9 |
+
1749863512.803018,822,0.5529560446739197
|
| 10 |
+
1749863628.045519,922,0.5599608421325684
|
| 11 |
+
1749863743.546858,1022,0.5664816498756409
|
| 12 |
+
1749863858.946523,1122,0.5740582346916199
|
| 13 |
+
1749863974.606943,1222,0.5792315602302551
|
| 14 |
+
1749864707.312323,2591,0.5740908980369568
|
| 15 |
+
1749864822.3112042,2691,0.5770991444587708
|
| 16 |
+
1749864937.8522801,2791,0.5825880765914917
|
| 17 |
+
1749865053.508802,2891,0.5861800312995911
|
| 18 |
+
1749865168.944853,2991,0.5914608240127563
|
| 19 |
+
1749865284.4459488,3091,0.594249963760376
|
| 20 |
+
1749865399.783348,3191,0.5992579460144043
|
| 21 |
+
1749865514.924843,3291,0.602853536605835
|
| 22 |
+
1749865630.1243489,3391,0.6052868366241455
|
| 23 |
+
1749865745.3801239,3491,0.6072555184364319
|
| 24 |
+
1749865860.57861,3591,0.6097333431243896
|
| 25 |
+
1749865976.224461,3691,0.611083984375
|
| 26 |
+
1749866732.477282,3837,0.6175699830055237
|
| 27 |
+
1749866847.373405,3937,0.6213619709014893
|
| 28 |
+
1749866961.69667,4037,0.6234620809555054
|
| 29 |
+
1749867076.207936,4137,0.6233278512954712
|
| 30 |
+
1749867192.9578712,4237,0.6250610947608948
|
| 31 |
+
1749867307.9066849,4337,0.6263699531555176
|
| 32 |
+
1749867423.839284,4437,0.6277560591697693
|
| 33 |
+
1749867540.36063,4537,0.6290692090988159
|
| 34 |
+
1749867656.4509041,4637,0.6298345327377319
|
| 35 |
+
1749867772.5633621,4737,0.6320093274116516
|
| 36 |
+
1749867888.3156312,4837,0.6330803036689758
|
| 37 |
+
1749868006.0029519,4937,0.6348605155944824
|
| 38 |
+
1749868808.441995,7571,0.6303296089172363
|
| 39 |
+
1749868930.6764002,7671,0.6296606659889221
|
| 40 |
+
1749869053.484579,7771,0.6313474178314209
|
| 41 |
+
1749869174.837471,7871,0.632144570350647
|
| 42 |
+
1749869292.225559,7971,0.6332916021347046
|
| 43 |
+
1749869411.03731,8071,0.6333509683609009
|
| 44 |
+
1749869535.8628109,8171,0.6353659629821777
|
| 45 |
+
1749869649.086516,8271,0.6361371278762817
|
| 46 |
+
1749869763.021135,8371,0.6366482973098755
|
| 47 |
+
1749869877.361543,8471,0.6357555389404297
|
| 48 |
+
1749869991.890194,8571,0.6368321180343628
|
| 49 |
+
1749870112.390059,8671,0.6386470794677734
|
| 50 |
+
1749870228.080665,8771,0.6386367082595825
|
| 51 |
+
1749870342.9029372,8871,0.6378971338272095
|
| 52 |
+
1749870457.246625,8971,0.6396408081054688
|
| 53 |
+
1749870571.635715,9071,0.6408162117004395
|
| 54 |
+
1749870686.231644,9171,0.6400809288024902
|
| 55 |
+
1749870800.516695,9271,0.6418086290359497
|
| 56 |
+
1749871605.094102,9439,0.64491206407547
|
| 57 |
+
1749871737.065096,9539,0.6465832591056824
|
| 58 |
+
1749871851.9842901,9639,0.6466361284255981
|
| 59 |
+
1749871967.5529048,9739,0.6476323008537292
|
| 60 |
+
1749872083.907424,9839,0.6464681029319763
|
| 61 |
+
1749872201.8901188,9939,0.6458964347839355
|
| 62 |
+
1749872363.6062431,10039,0.6475790143013
|
| 63 |
+
1749872546.685531,10139,0.6460607051849365
|
| 64 |
+
1749872663.447436,10239,0.6478688716888428
|
| 65 |
+
1749873472.751728,10339,0.6373345255851746
|
| 66 |
+
1749873586.958823,10439,0.6448805332183838
|
| 67 |
+
1749873702.9145112,10539,0.6457089185714722
|
| 68 |
+
1749873816.705306,10639,0.6463573575019836
|
| 69 |
+
1749873930.537557,10739,0.6457958817481995
|
| 70 |
+
1749874044.9519339,10839,0.6481281518936157
|
| 71 |
+
1749874159.8627908,10939,0.6469950675964355
|
| 72 |
+
1749874275.146302,11039,0.6485129594802856
|
| 73 |
+
1749874390.6844969,11139,0.6499018669128418
|
| 74 |
+
1749875174.8603241,15045,0.6436793208122253
|
| 75 |
+
1749875290.682424,15145,0.6420380473136902
|
| 76 |
+
1749875415.815707,15245,0.6425821781158447
|
| 77 |
+
1749875530.4505758,15345,0.6420215368270874
|
| 78 |
+
1749875644.9556422,15445,0.6440042853355408
|
| 79 |
+
1749875759.9207602,15545,0.6424780488014221
|
| 80 |
+
1749875879.3361962,15645,0.643294095993042
|
| 81 |
+
1749875993.4695501,15745,0.6448836326599121
|
| 82 |
+
1749876107.5972888,15845,0.645183801651001
|
| 83 |
+
1749876221.7461262,15945,0.6461532711982727
|
| 84 |
+
1749876336.225884,16045,0.6454576849937439
|
| 85 |
+
1749876450.965764,16145,0.6456298828125
|
| 86 |
+
1749876566.629506,16245,0.6443301439285278
|
| 87 |
+
1749876683.568193,16345,0.645946204662323
|
| 88 |
+
1749876798.040498,16445,0.6467732191085815
|
| 89 |
+
1749876911.571116,16545,0.6462554931640625
|
| 90 |
+
1749877025.1111512,16645,0.6451244950294495
|
| 91 |
+
1749877139.379872,16745,0.6458805203437805
|
| 92 |
+
1749877254.228658,16845,0.6443285346031189
|
| 93 |
+
1749877374.360051,16945,0.6479160189628601
|
| 94 |
+
1749877492.01997,17045,0.6478552222251892
|
| 95 |
+
1749877608.5502021,17145,0.6497830748558044
|
| 96 |
+
1749877724.957767,17245,0.6486115455627441
|
| 97 |
+
1749877841.109853,17345,0.6480239033699036
|
| 98 |
+
1749878646.2594078,17536,0.6495139598846436
|
| 99 |
+
1749878760.912617,17636,0.6510331630706787
|
| 100 |
+
1749878877.23084,17736,0.6514031291007996
|
| 101 |
+
1749878993.918535,17836,0.6505551338195801
|
| 102 |
+
1749879109.034369,17936,0.6511745452880859
|
| 103 |
+
1749879223.9821818,18036,0.6540765166282654
|
| 104 |
+
1749879339.26787,18136,0.6506806015968323
|
| 105 |
+
1749879454.726379,18236,0.6503374576568604
|
| 106 |
+
1749879570.0690088,18336,0.6529502868652344
|
| 107 |
+
1749879685.1710541,18436,0.6511238217353821
|
| 108 |
+
1749879800.3635142,18536,0.6518282890319824
|
| 109 |
+
1749879915.033729,18636,0.6516628861427307
|
| 110 |
+
1749880029.6866162,18736,0.6537311673164368
|
| 111 |
+
1749880144.222166,18836,0.653042197227478
|
| 112 |
+
1749880258.8637629,18936,0.6531310081481934
|
| 113 |
+
1749880373.32494,19036,0.651985228061676
|
| 114 |
+
1749880488.133607,19136,0.6532859802246094
|
| 115 |
+
1749880602.5153239,19236,0.6535618305206299
|
| 116 |
+
1749880717.016221,19336,0.6526396870613098
|
| 117 |
+
1749880831.561332,19436,0.6540868282318115
|
| 118 |
+
1749880946.107208,19536,0.6526824831962585
|
| 119 |
+
1749881060.5496352,19636,0.652843177318573
|
| 120 |
+
1749881174.887777,19736,0.6523430347442627
|
| 121 |
+
1749881289.034664,19836,0.6538118124008179
|
| 122 |
+
1749882085.5701241,25003,0.6411749720573425
|
| 123 |
+
1749882199.2805178,25103,0.6289074420928955
|
| 124 |
+
1749882313.069938,25203,0.63006192445755
|
| 125 |
+
1749882426.932492,25303,0.6306214332580566
|
| 126 |
+
1749882540.776115,25403,0.6305362582206726
|
| 127 |
+
1749882655.2986329,25503,0.6331127882003784
|
| 128 |
+
1749882771.058639,25603,0.6321122646331787
|
| 129 |
+
1749882884.853459,25703,0.6309130191802979
|
| 130 |
+
1749882999.084863,25803,0.6323768496513367
|
| 131 |
+
1749883113.03004,25903,0.6333639621734619
|
| 132 |
+
1749883227.049516,26003,0.6328328251838684
|
| 133 |
+
1749883340.962351,26103,0.6330544948577881
|
| 134 |
+
1749883454.8910239,26203,0.6316843628883362
|
| 135 |
+
1749883568.748449,26303,0.6329068541526794
|
| 136 |
+
1749883682.626584,26403,0.6340925693511963
|
| 137 |
+
1749883796.266156,26503,0.634718120098114
|
| 138 |
+
1749883909.823321,26603,0.6345930695533752
|
| 139 |
+
1749884023.612312,26703,0.6305355429649353
|
| 140 |
+
1749884137.3909109,26803,0.6329914331436157
|
| 141 |
+
1749884250.964849,26903,0.6338057518005371
|
| 142 |
+
1749884364.620685,27003,0.6310195922851562
|
| 143 |
+
1749884478.288471,27103,0.6320237517356873
|
| 144 |
+
1749884591.921928,27203,0.6323473453521729
|
| 145 |
+
1749884705.4580832,27303,0.6338775753974915
|
| 146 |
+
1749884819.04635,27403,0.6349583268165588
|
| 147 |
+
1749884932.5079958,27503,0.6327939629554749
|
| 148 |
+
1749885045.950988,27603,0.6340410709381104
|
| 149 |
+
1749885159.388893,27703,0.6335666179656982
|
| 150 |
+
1749885272.804197,27803,0.6340423226356506
|
| 151 |
+
1749885386.316233,27903,0.6334601640701294
|
| 152 |
+
1749885499.822531,28003,0.63486647605896
|
| 153 |
+
1749886221.504969,28116,0.6372697353363037
|
| 154 |
+
1749886334.385321,28216,0.6380821466445923
|
| 155 |
+
1749886448.043535,28316,0.6341930627822876
|
| 156 |
+
1749886566.996864,28416,0.6381385326385498
|
| 157 |
+
1749886683.145211,28516,0.6390631198883057
|
| 158 |
+
1749886801.945816,28616,0.6362524628639221
|
| 159 |
+
1749886917.753129,28716,0.6383652091026306
|
| 160 |
+
1749887031.577143,28816,0.6372789144515991
|
| 161 |
+
1749887145.237945,28916,0.6368517875671387
|
| 162 |
+
1749887259.142705,29016,0.6355586051940918
|
| 163 |
+
1749887372.8702788,29116,0.6365960836410522
|
| 164 |
+
1749887486.513908,29216,0.6369326710700989
|
| 165 |
+
1749887600.1771169,29316,0.6356692314147949
|
| 166 |
+
1749887714.078214,29416,0.6381360292434692
|
| 167 |
+
1749887827.802154,29516,0.6395895481109619
|
| 168 |
+
1749887941.250756,29616,0.6351495981216431
|
| 169 |
+
1749888054.684053,29716,0.6337200403213501
|
| 170 |
+
1749888168.055861,29816,0.6380612850189209
|
| 171 |
+
1749888281.437411,29916,0.6367456912994385
|
| 172 |
+
1749888395.104567,30016,0.6368082165718079
|
| 173 |
+
1749888508.717651,30116,0.637988269329071
|
| 174 |
+
1749888622.26287,30216,0.6383621692657471
|
| 175 |
+
1749888735.716015,30316,0.6390760540962219
|
| 176 |
+
1749888849.314946,30416,0.6382609009742737
|
| 177 |
+
1749888962.671913,30516,0.6376262903213501
|
| 178 |
+
1749889075.962262,30616,0.6375894546508789
|
| 179 |
+
1749889189.209306,30716,0.6379967927932739
|
| 180 |
+
1749889302.4496808,30816,0.6370212435722351
|
| 181 |
+
1749889415.703003,30916,0.6353185772895813
|
| 182 |
+
1749889528.950649,31016,0.6387556791305542
|
| 183 |
+
1749889642.150322,31116,0.6378328204154968
|
| 184 |
+
1749890378.956782,31229,0.6372666358947754
|
| 185 |
+
1749890505.625979,31329,0.6375833749771118
|
| 186 |
+
1749890618.191977,31429,0.6399643421173096
|
| 187 |
+
1749890731.541603,31529,0.6371794939041138
|
| 188 |
+
1749890844.967264,31629,0.6412948369979858
|
| 189 |
+
1749890958.658777,31729,0.6381122469902039
|
| 190 |
+
1749891071.891078,31829,0.6379870772361755
|
| 191 |
+
1749891184.9368472,31929,0.636507511138916
|
| 192 |
+
1749891298.383119,32029,0.6393314599990845
|
| 193 |
+
1749891411.6084638,32129,0.6372302770614624
|
| 194 |
+
1749891524.8479302,32229,0.6382989883422852
|
| 195 |
+
1749891638.000088,32329,0.6384246349334717
|
| 196 |
+
1749891750.925784,32429,0.642183244228363
|
| 197 |
+
1749891863.9485052,32529,0.6387585401535034
|
| 198 |
+
1749891976.981396,32629,0.6382800936698914
|
| 199 |
+
1749892090.007157,32729,0.6391066908836365
|
| 200 |
+
1749892204.878405,32829,0.6372695565223694
|
| 201 |
+
1749892318.016267,32929,0.6377881765365601
|
| 202 |
+
1749892431.1136398,33029,0.6381900310516357
|
| 203 |
+
1749892544.650914,33129,0.6379098296165466
|
| 204 |
+
1749892658.351629,33229,0.6390986442565918
|
| 205 |
+
1749892771.5412412,33329,0.6394632458686829
|
| 206 |
+
1749892884.5655081,33429,0.638789176940918
|
| 207 |
+
1749892997.703425,33529,0.6404674649238586
|
| 208 |
+
1749893113.111906,33629,0.6377144455909729
|
| 209 |
+
1749893226.4987352,33729,0.6403603553771973
|
| 210 |
+
1749893339.7236722,33829,0.6399890184402466
|
| 211 |
+
1749893452.709901,33929,0.6386091113090515
|
| 212 |
+
1749893565.764252,34029,0.638846218585968
|
| 213 |
+
1749893678.8613818,34129,0.6392867565155029
|
| 214 |
+
1749893793.811114,34229,0.6415501236915588
|
| 215 |
+
1749894523.4330702,34342,0.6422275900840759
|
| 216 |
+
1749894635.668705,34442,0.6422102451324463
|
| 217 |
+
1749894748.496078,34542,0.6423694491386414
|
| 218 |
+
1749894862.105796,34642,0.6409814953804016
|
| 219 |
+
1749894975.7232609,34742,0.6424775123596191
|
| 220 |
+
1749895088.827214,34842,0.6412819027900696
|
| 221 |
+
1749895201.8739011,34942,0.6421397924423218
|
| 222 |
+
1749895315.383924,35042,0.6428180932998657
|
| 223 |
+
1749895428.870367,35142,0.6411274671554565
|
| 224 |
+
1749895542.779712,35242,0.6404625773429871
|
| 225 |
+
1749895655.7400692,35342,0.6413607597351074
|
| 226 |
+
1749895768.754721,35442,0.6396623253822327
|
| 227 |
+
1749895881.675998,35542,0.6398554444313049
|
| 228 |
+
1749895994.7287948,35642,0.6418105363845825
|
| 229 |
+
1749896107.590622,35742,0.6404362320899963
|
| 230 |
+
1749896220.531776,35842,0.6405752301216125
|
| 231 |
+
1749896334.0023758,35942,0.6391710638999939
|
| 232 |
+
1749896447.1242828,36042,0.639792263507843
|
| 233 |
+
1749896560.152203,36142,0.6422731280326843
|
| 234 |
+
1749896673.189848,36242,0.6405914425849915
|
| 235 |
+
1749896786.198764,36342,0.641568124294281
|
| 236 |
+
1749896899.2863219,36442,0.6413493156433105
|
| 237 |
+
1749897012.305343,36542,0.6412314772605896
|
| 238 |
+
1749897125.3817248,36642,0.6421323418617249
|
| 239 |
+
1749897238.33933,36742,0.6419852375984192
|
| 240 |
+
1749897351.259258,36842,0.6420484185218811
|
| 241 |
+
1749897465.7836952,36942,0.641920268535614
|
| 242 |
+
1749897578.788863,37042,0.640053927898407
|
| 243 |
+
1749897692.2813172,37142,0.6422847509384155
|
| 244 |
+
1749897805.190093,37242,0.642421543598175
|
| 245 |
+
1749897919.005985,37342,0.6421249508857727
|
| 246 |
+
1749898639.5689752,37455,0.6431224942207336
|
| 247 |
+
1749898751.988163,37555,0.6455073356628418
|
| 248 |
+
1749898864.6416771,37655,0.6462562084197998
|
| 249 |
+
1749898977.266717,37755,0.6433696746826172
|
| 250 |
+
1749899094.0932312,37855,0.6439356803894043
|
| 251 |
+
1749899207.426185,37955,0.6446089148521423
|
| 252 |
+
1749899321.114516,38055,0.6461760997772217
|
| 253 |
+
1749899435.41614,38155,0.6425666809082031
|
| 254 |
+
1749899548.3957949,38255,0.6415822505950928
|
| 255 |
+
1749899661.12898,38355,0.6433603763580322
|
| 256 |
+
1749899773.907331,38455,0.6439739465713501
|
| 257 |
+
1749899887.206593,38555,0.6431937217712402
|
| 258 |
+
1749900000.022349,38655,0.6433701515197754
|
| 259 |
+
1749900112.838345,38755,0.6402089595794678
|
| 260 |
+
1749900225.587293,38855,0.6431488990783691
|
| 261 |
+
1749900338.40116,38955,0.6427022814750671
|
| 262 |
+
1749900451.419723,39055,0.642128050327301
|
| 263 |
+
1749900564.1494222,39155,0.6431225538253784
|
| 264 |
+
1749900676.855479,39255,0.6424416303634644
|
| 265 |
+
1749900789.689708,39355,0.6426573395729065
|
| 266 |
+
1749900902.666292,39455,0.6421151161193848
|
| 267 |
+
1749901015.986363,39555,0.6435821652412415
|
| 268 |
+
1749901129.7744472,39655,0.6438963413238525
|
| 269 |
+
1749901242.9367309,39755,0.641447901725769
|
| 270 |
+
1749901355.883141,39855,0.6451898813247681
|
| 271 |
+
1749901468.810418,39955,0.6418604850769043
|
| 272 |
+
1749901581.483256,40055,0.6420018672943115
|
| 273 |
+
1749901694.195169,40155,0.6414068341255188
|
| 274 |
+
1749901807.6175969,40255,0.6405832171440125
|
| 275 |
+
1749901928.495046,40355,0.6415613293647766
|
| 276 |
+
1749902049.321397,40455,0.6413848400115967
|
| 277 |
+
1749902758.697643,40568,0.6457716822624207
|
| 278 |
+
1749902871.2450671,40668,0.646309494972229
|
| 279 |
+
1749902983.9136572,40768,0.6463437676429749
|
| 280 |
+
1749903096.5065012,40868,0.6450349688529968
|
| 281 |
+
1749903209.1087449,40968,0.6446881294250488
|
| 282 |
+
1749903321.672395,41068,0.644142746925354
|
| 283 |
+
1749903434.1538491,41168,0.6432740092277527
|
| 284 |
+
1749903546.677047,41268,0.6437448859214783
|
| 285 |
+
1749903663.135898,41368,0.6455336213111877
|
| 286 |
+
1749903775.808806,41468,0.6436740756034851
|
| 287 |
+
1749903888.313575,41568,0.6446691751480103
|
| 288 |
+
1749904000.9246452,41668,0.6452683806419373
|
| 289 |
+
1749904113.586472,41768,0.6439473032951355
|
| 290 |
+
1749904226.149442,41868,0.6451581716537476
|
| 291 |
+
1749904338.615375,41968,0.6452315449714661
|
| 292 |
+
1749904451.142393,42068,0.6444404721260071
|
| 293 |
+
1749904563.662851,42168,0.6423818469047546
|
| 294 |
+
1749904678.6194618,42268,0.6442590355873108
|
| 295 |
+
1749904791.92433,42368,0.6440293788909912
|
| 296 |
+
1749904905.461743,42468,0.6425467729568481
|
| 297 |
+
1749905018.109602,42568,0.6436562538146973
|
| 298 |
+
1749905130.8406038,42668,0.6423113346099854
|
| 299 |
+
1749905243.4874198,42768,0.6433395147323608
|
| 300 |
+
1749905356.34921,42868,0.6426182389259338
|
| 301 |
+
1749905469.8472939,42968,0.6429547071456909
|
| 302 |
+
1749905582.4856489,43068,0.6445577144622803
|
| 303 |
+
1749905700.7709599,43168,0.6432076096534729
|
| 304 |
+
1749905813.246316,43268,0.6452598571777344
|
| 305 |
+
1749905925.7469351,43368,0.6450968980789185
|
| 306 |
+
1749906038.568206,43468,0.6417151093482971
|
| 307 |
+
1749906152.325112,43568,0.6453571319580078
|
| 308 |
+
1749906869.613624,43681,0.6455939412117004
|
| 309 |
+
1749906981.776615,43781,0.6470116376876831
|
| 310 |
+
1749907094.291646,43881,0.6461580395698547
|
| 311 |
+
1749907206.7288609,43981,0.6449430584907532
|
| 312 |
+
1749907319.248631,44081,0.6480114459991455
|
| 313 |
+
1749907431.68859,44181,0.6469368934631348
|
| 314 |
+
1749907544.137596,44281,0.6462560296058655
|
| 315 |
+
1749907656.623639,44381,0.6483475565910339
|
| 316 |
+
1749907769.0745878,44481,0.6476715207099915
|
| 317 |
+
1749907881.511359,44581,0.6467322707176208
|
| 318 |
+
1749907993.944629,44681,0.6418707370758057
|
| 319 |
+
1749908106.3325398,44781,0.6422119736671448
|
| 320 |
+
1749908218.755035,44881,0.6456181406974792
|
| 321 |
+
1749908332.6752698,44981,0.6438400149345398
|
| 322 |
+
1749908445.41487,45081,0.6453412771224976
|
| 323 |
+
1749908561.7160451,45181,0.6472585201263428
|
| 324 |
+
1749908674.349626,45281,0.6463475227355957
|
| 325 |
+
1749908787.409377,45381,0.6437843441963196
|
| 326 |
+
1749908900.228266,45481,0.6451470255851746
|
| 327 |
+
1749909012.759907,45581,0.6464168429374695
|
| 328 |
+
1749909125.33191,45681,0.6457372307777405
|
| 329 |
+
1749909238.6822069,45781,0.6420699954032898
|
| 330 |
+
1749909353.055778,45881,0.6455863118171692
|
| 331 |
+
1749909465.583371,45981,0.6430723071098328
|
| 332 |
+
1749909578.791123,46081,0.6466464400291443
|
| 333 |
+
1749909695.810501,46181,0.6415648460388184
|
| 334 |
+
1749909816.994015,46281,0.6458131670951843
|
| 335 |
+
1749909931.178684,46381,0.6433743834495544
|
| 336 |
+
1749910044.3576698,46481,0.6447113156318665
|
| 337 |
+
1749910156.936951,46581,0.6447990536689758
|
| 338 |
+
1749910269.43704,46681,0.643962562084198
|
| 339 |
+
1749910985.444436,46794,0.6470857858657837
|
| 340 |
+
1749911097.5499258,46894,0.6493186950683594
|
| 341 |
+
1749911209.9833422,46994,0.6470249891281128
|
| 342 |
+
1749911323.2898078,47094,0.6450704336166382
|
| 343 |
+
1749911435.872323,47194,0.6471397876739502
|
| 344 |
+
1749911548.425044,47294,0.6435807943344116
|
| 345 |
+
1749911660.932256,47394,0.6473038196563721
|
| 346 |
+
1749911776.825709,47494,0.6473247408866882
|
| 347 |
+
1749911900.3222861,47594,0.6455411314964294
|
| 348 |
+
1749912026.3668249,47694,0.6466323137283325
|
| 349 |
+
1749912157.162115,47794,0.6445478200912476
|
| 350 |
+
1749912274.590166,47894,0.646281898021698
|
| 351 |
+
1749912391.31292,47994,0.6441708207130432
|
| 352 |
+
1749912505.9149718,48094,0.6465533375740051
|
| 353 |
+
1749912619.9158368,48194,0.6457281708717346
|
| 354 |
+
1749912733.845898,48294,0.6469725966453552
|
| 355 |
+
1749912848.251153,48394,0.6476579308509827
|
| 356 |
+
1749912962.486568,48494,0.6469281911849976
|
| 357 |
+
1749913083.380466,48594,0.6445661187171936
|
| 358 |
+
1749913213.3275251,48694,0.6448878645896912
|
| 359 |
+
1749913327.168751,48794,0.6478087902069092
|
| 360 |
+
1749913445.553847,48894,0.6467218995094299
|
| 361 |
+
1749913562.00287,48994,0.6449093818664551
|
| 362 |
+
1749913676.5021539,49094,0.6456226110458374
|
| 363 |
+
1749913865.943388,49194,0.6480283141136169
|
| 364 |
+
1749913978.782634,49294,0.644949734210968
|
| 365 |
+
1749914091.5461361,49394,0.6477023363113403
|
| 366 |
+
1749914205.0430741,49494,0.6454227566719055
|
| 367 |
+
1749914317.857575,49594,0.6468780636787415
|
| 368 |
+
1749914436.420907,49694,0.6437995433807373
|
| 369 |
+
1749914548.4780428,49794,0.6442689895629883
|
| 370 |
+
1749915254.64979,49907,0.6468920707702637
|
| 371 |
+
1749915366.522047,50007,0.6443278193473816
|
| 372 |
+
1749915479.121721,50107,0.64507657289505
|
| 373 |
+
1749915591.986209,50207,0.6475539207458496
|
| 374 |
+
1749915705.132767,50307,0.6458376049995422
|
| 375 |
+
1749915818.162699,50407,0.6470894813537598
|
| 376 |
+
1749915931.2429461,50507,0.6459890007972717
|
| 377 |
+
1749916044.412135,50607,0.6449429988861084
|
| 378 |
+
1749916157.798768,50707,0.6434136033058167
|
| 379 |
+
1749916270.845293,50807,0.6452168822288513
|
| 380 |
+
1749916385.228409,50907,0.6414031982421875
|
| 381 |
+
1749916498.514762,51007,0.6429454684257507
|
| 382 |
+
1749916610.5736582,51107,0.645865797996521
|
| 383 |
+
1749916722.826114,51207,0.6456354260444641
|
| 384 |
+
1749916835.510131,51307,0.6453517079353333
|
| 385 |
+
1749916948.500181,51407,0.6425925493240356
|
| 386 |
+
1749917061.547553,51507,0.6441556215286255
|
| 387 |
+
1749917175.452366,51607,0.6438621282577515
|
| 388 |
+
1749917289.691377,51707,0.6460551619529724
|
| 389 |
+
1749917402.7212632,51807,0.6445820927619934
|
| 390 |
+
1749917515.7485409,51907,0.6466335654258728
|
| 391 |
+
1749917628.76803,52007,0.6439675092697144
|
| 392 |
+
1749917741.809431,52107,0.6454479098320007
|
| 393 |
+
1749917854.9519842,52207,0.6439056396484375
|
| 394 |
+
1749917968.173187,52307,0.643708348274231
|
| 395 |
+
1749918081.338985,52407,0.6449944972991943
|
| 396 |
+
1749918194.488252,52507,0.6446917653083801
|
| 397 |
+
1749918307.705295,52607,0.6453357934951782
|
| 398 |
+
1749918420.946692,52707,0.6437267065048218
|
| 399 |
+
1749918534.1307259,52807,0.6483670473098755
|
| 400 |
+
1749918647.290252,52907,0.6452763676643372
|
| 401 |
+
1749919349.8012888,53020,0.6482431888580322
|
| 402 |
+
1749919462.727903,53120,0.6488265991210938
|
| 403 |
+
1749919575.7412622,53220,0.6478548049926758
|
| 404 |
+
1749919688.842592,53320,0.6473872661590576
|
| 405 |
+
1749919801.817652,53420,0.647352933883667
|
| 406 |
+
1749919914.7892962,53520,0.6466537714004517
|
| 407 |
+
1749920027.768988,53620,0.6455276012420654
|
| 408 |
+
1749920141.053708,53720,0.6472206115722656
|
| 409 |
+
1749920254.0534441,53820,0.6466146111488342
|
| 410 |
+
1749920367.0409348,53920,0.6457653045654297
|
| 411 |
+
1749920479.953151,54020,0.6479485034942627
|
| 412 |
+
1749920592.791873,54120,0.6481966972351074
|
| 413 |
+
1749920705.670143,54220,0.647784948348999
|
| 414 |
+
1749920818.720621,54320,0.6458646059036255
|
| 415 |
+
1749920931.794077,54420,0.6477463245391846
|
| 416 |
+
1749921045.317086,54520,0.6461666822433472
|
| 417 |
+
1749921159.993518,54620,0.6487022042274475
|
| 418 |
+
1749921273.078507,54720,0.6468180418014526
|
| 419 |
+
1749921386.2036648,54820,0.6433663964271545
|
| 420 |
+
1749921499.324935,54920,0.6466029286384583
|
| 421 |
+
1749921612.4671302,55020,0.6448517441749573
|
| 422 |
+
1749921725.574382,55120,0.6463658213615417
|
| 423 |
+
1749921838.710418,55220,0.645363986492157
|
| 424 |
+
1749921951.791944,55320,0.6455214619636536
|
| 425 |
+
1749922065.369843,55420,0.6459031701087952
|
| 426 |
+
1749922178.5358238,55520,0.6457732915878296
|
| 427 |
+
1749922291.767361,55620,0.6463260054588318
|
| 428 |
+
1749922404.942503,55720,0.6452855467796326
|
| 429 |
+
1749922518.24964,55820,0.6466127634048462
|
| 430 |
+
1749922631.665324,55920,0.6458290219306946
|
| 431 |
+
1749922745.3478122,56020,0.6472273468971252
|
| 432 |
+
1749923447.429276,56133,0.6463302373886108
|
| 433 |
+
1749923561.056462,56233,0.6482003927230835
|
| 434 |
+
1749923673.753302,56333,0.6483168005943298
|
| 435 |
+
1749923786.9378948,56433,0.6495551466941833
|
| 436 |
+
1749923900.16594,56533,0.6490747332572937
|
| 437 |
+
1749924013.5890381,56633,0.6481636166572571
|
| 438 |
+
1749924127.013432,56733,0.6470484137535095
|
| 439 |
+
1749924240.250842,56833,0.6492579579353333
|
| 440 |
+
1749924353.401773,56933,0.6456274390220642
|
| 441 |
+
1749924466.8444622,57033,0.6473854184150696
|
| 442 |
+
1749924580.147986,57133,0.6476078629493713
|
| 443 |
+
1749924693.80851,57233,0.6493817567825317
|
| 444 |
+
1749924809.654303,57333,0.6484301686286926
|
| 445 |
+
1749924924.456167,57433,0.6469117403030396
|
| 446 |
+
1749925038.634381,57533,0.6482880115509033
|
| 447 |
+
1749925152.2785192,57633,0.6481899619102478
|
| 448 |
+
1749925265.869144,57733,0.64661705493927
|
| 449 |
+
1749925379.272862,57833,0.6464669108390808
|
| 450 |
+
1749925491.8101661,57933,0.6488008499145508
|
| 451 |
+
1749925604.6834948,58033,0.6441654562950134
|
| 452 |
+
1749925717.869805,58133,0.6466636061668396
|
| 453 |
+
1749925831.2332642,58233,0.6475287675857544
|
| 454 |
+
1749925944.81216,58333,0.6480196118354797
|
| 455 |
+
1749926058.425607,58433,0.6473321318626404
|
| 456 |
+
1749926172.1372879,58533,0.6467843055725098
|
| 457 |
+
1749926285.852811,58633,0.6482695937156677
|
| 458 |
+
1749926400.246176,58733,0.64788419008255
|
| 459 |
+
1749926514.2801862,58833,0.6464325785636902
|
| 460 |
+
1749926628.411781,58933,0.6449589729309082
|
| 461 |
+
1749926742.487724,59033,0.6453645825386047
|
| 462 |
+
1749926856.6434848,59133,0.6469466686248779
|
| 463 |
+
1749927559.5214038,59246,0.6503404378890991
|
| 464 |
+
1749927672.020673,59346,0.6473168134689331
|
| 465 |
+
1749927785.250925,59446,0.6481862664222717
|
| 466 |
+
1749927898.710655,59546,0.649619460105896
|
| 467 |
+
1749928012.247091,59646,0.6465483903884888
|
| 468 |
+
1749928126.010963,59746,0.649186909198761
|
| 469 |
+
1749928239.80563,59846,0.6484472751617432
|
| 470 |
+
1749928353.680489,59946,0.6479632258415222
|
| 471 |
+
1749928467.482208,60046,0.64899080991745
|
| 472 |
+
1749928581.240331,60146,0.6511568427085876
|
| 473 |
+
1749928695.511598,60246,0.6487745046615601
|
| 474 |
+
1749928811.3063512,60346,0.6479558944702148
|
| 475 |
+
1749928925.040396,60446,0.6488529443740845
|
| 476 |
+
1749929038.6185012,60546,0.6495551466941833
|
| 477 |
+
1749929152.2053819,60646,0.6491721868515015
|
| 478 |
+
1749929265.851369,60746,0.6482187509536743
|
| 479 |
+
1749929379.502666,60846,0.6466194987297058
|
| 480 |
+
1749929493.5331898,60946,0.6470288038253784
|
| 481 |
+
1749929607.4053981,61046,0.6479650735855103
|
| 482 |
+
1749929721.085204,61146,0.6456948518753052
|
| 483 |
+
1749929834.74776,61246,0.6463167667388916
|
| 484 |
+
1749929948.555677,61346,0.6469117403030396
|
| 485 |
+
1749930063.395309,61446,0.6496709585189819
|
| 486 |
+
1749930177.1792881,61546,0.6468149423599243
|
| 487 |
+
1749930291.206655,61646,0.6464289426803589
|
| 488 |
+
1749930405.0233212,61746,0.6476335525512695
|
| 489 |
+
1749930519.116152,61846,0.6473082304000854
|
| 490 |
+
1749930633.092383,61946,0.6477475762367249
|
| 491 |
+
1749930746.959587,62046,0.645340085029602
|
| 492 |
+
1749930860.721993,62146,0.6493327021598816
|
| 493 |
+
1749930974.7665339,62246,0.6471899747848511
|
| 494 |
+
1749931678.899295,62359,0.648399293422699
|
| 495 |
+
1749931792.6391659,62459,0.6488786935806274
|
| 496 |
+
1749931906.5278468,62559,0.6509184837341309
|
| 497 |
+
1749932020.4616919,62659,0.6484221816062927
|
| 498 |
+
1749932134.672566,62759,0.6522690057754517
|
| 499 |
+
1749932248.6759748,62859,0.6484540700912476
|
| 500 |
+
1749932362.711755,62959,0.6484025716781616
|
| 501 |
+
1749932476.661569,63059,0.6470717191696167
|
| 502 |
+
1749932590.790066,63159,0.6500177383422852
|
| 503 |
+
1749932704.8376472,63259,0.6474350690841675
|
| 504 |
+
1749932819.889387,63359,0.6481654644012451
|
| 505 |
+
1749932936.8226,63459,0.6483504772186279
|
| 506 |
+
1749933051.687947,63559,0.6519791483879089
|
| 507 |
+
1749933165.7263381,63659,0.6482064723968506
|
| 508 |
+
1749933279.444386,63759,0.6480998992919922
|
| 509 |
+
1749933393.378773,63859,0.6485667824745178
|
| 510 |
+
1749933507.367212,63959,0.6471758484840393
|
| 511 |
+
1749933623.911954,64059,0.6477560997009277
|
| 512 |
+
1749933738.056524,64159,0.6477352976799011
|
| 513 |
+
1749933852.191925,64259,0.6470110416412354
|
| 514 |
+
1749933966.306999,64359,0.6480882167816162
|
| 515 |
+
1749934080.359732,64459,0.6488682627677917
|
| 516 |
+
1749934194.425351,64559,0.6470747590065002
|
| 517 |
+
1749934308.54075,64659,0.6494074463844299
|
| 518 |
+
1749934422.480662,64759,0.6468547582626343
|
| 519 |
+
1749934536.808921,64859,0.6491286754608154
|
| 520 |
+
1749934650.823456,64959,0.648867666721344
|
| 521 |
+
1749934764.7258089,65059,0.6472849249839783
|
| 522 |
+
1749934878.7077188,65159,0.6478468179702759
|
| 523 |
+
1749934992.883969,65259,0.6479583382606506
|
| 524 |
+
1749935106.933438,65359,0.6497653126716614
|
| 525 |
+
1749935813.731499,65472,0.6506868600845337
|
| 526 |
+
1749935927.942222,65572,0.6503027081489563
|
| 527 |
+
1749936041.607051,65672,0.6512806415557861
|
| 528 |
+
1749936155.296227,65772,0.6489803791046143
|
| 529 |
+
1749936270.8974578,65872,0.6508885025978088
|
| 530 |
+
1749936385.587658,65972,0.6498168110847473
|
| 531 |
+
1749936499.060506,66072,0.6503278017044067
|
| 532 |
+
1749936612.441948,66172,0.6502352952957153
|
| 533 |
+
1749936726.748907,66272,0.6489626169204712
|
| 534 |
+
1749936841.875183,66372,0.6480649709701538
|
| 535 |
+
1749936956.4900322,66472,0.6499338150024414
|
| 536 |
+
1749937069.998561,66572,0.6472322344779968
|
| 537 |
+
1749937183.7017019,66672,0.6475245356559753
|
| 538 |
+
1749937298.516124,66772,0.6491470336914062
|
| 539 |
+
1749937411.92944,66872,0.647495687007904
|
| 540 |
+
1749937526.436012,66972,0.6476641893386841
|
| 541 |
+
1749937641.158314,67072,0.6466764807701111
|
| 542 |
+
1749937756.0765388,67172,0.6474534273147583
|
| 543 |
+
1749937871.194944,67272,0.6498247385025024
|
| 544 |
+
1749937986.103464,67372,0.647672176361084
|
| 545 |
+
1749938111.256961,67472,0.6493241190910339
|
| 546 |
+
1749938234.540023,67572,0.649218738079071
|
| 547 |
+
1749938358.600482,67672,0.6481427550315857
|
| 548 |
+
1749938482.183712,67772,0.6489521861076355
|
| 549 |
+
1749938604.988889,67872,0.6490012407302856
|
| 550 |
+
1749938730.016806,67972,0.6490533351898193
|
| 551 |
+
1749938855.713066,68072,0.6489706039428711
|
| 552 |
+
1749938984.016691,68172,0.6467726826667786
|
| 553 |
+
1749939111.805879,68272,0.6495416760444641
|
| 554 |
+
1749939237.4458308,68372,0.6492695808410645
|
| 555 |
+
1749939362.440269,68472,0.6486568450927734
|
| 556 |
+
1749940143.424747,68585,0.6501026153564453
|
| 557 |
+
1749940267.8671591,68685,0.6520465612411499
|
| 558 |
+
1749940406.536912,68785,0.65306556224823
|
| 559 |
+
1749940539.007791,68885,0.6500367522239685
|
| 560 |
+
1749940665.279575,68985,0.6502971649169922
|
| 561 |
+
1749940787.382014,69085,0.6517028212547302
|
| 562 |
+
1749940907.63042,69185,0.6525441408157349
|
| 563 |
+
1749941027.280397,69285,0.6494277119636536
|
| 564 |
+
1749941156.2273762,69385,0.6480973958969116
|
| 565 |
+
1749941287.1213481,69485,0.6496758460998535
|
| 566 |
+
1749941414.0066822,69585,0.6504148244857788
|
| 567 |
+
1749941543.8237681,69685,0.6497818827629089
|
| 568 |
+
1749941674.046098,69785,0.6492977738380432
|
| 569 |
+
1749941812.787934,69885,0.64699387550354
|
| 570 |
+
1749941962.015331,69985,0.6491678953170776
|
| 571 |
+
1749942102.411791,70085,0.6491384506225586
|
| 572 |
+
1749942233.3373299,70185,0.6481152176856995
|
| 573 |
+
1749942359.018425,70285,0.6494773030281067
|
| 574 |
+
1749942480.5869482,70385,0.6486470699310303
|
| 575 |
+
1749942601.3076742,70485,0.6487181186676025
|
| 576 |
+
1749942721.842447,70585,0.6483076214790344
|
| 577 |
+
1749942847.5958269,70685,0.6500759720802307
|
| 578 |
+
1749942983.723243,70785,0.6499319672584534
|
| 579 |
+
1749943115.342161,70885,0.6474846601486206
|
| 580 |
+
1749943244.810354,70985,0.6511532068252563
|
| 581 |
+
1749943381.720277,71085,0.6476777195930481
|
| 582 |
+
1749943509.7974699,71185,0.6476617455482483
|
| 583 |
+
1749943636.8749802,71285,0.6472451090812683
|
| 584 |
+
1749943769.3658261,71385,0.6470766067504883
|
| 585 |
+
1749943902.5753548,71485,0.6476035714149475
|
| 586 |
+
1749944037.523066,71585,0.6469583511352539
|
| 587 |
+
1749944826.5780659,71698,0.6512960195541382
|
| 588 |
+
1749944953.716961,71798,0.65183025598526
|
| 589 |
+
1749945082.082031,71898,0.6517236232757568
|
| 590 |
+
1749945210.61923,71998,0.6500888466835022
|
| 591 |
+
1749945339.124059,72098,0.6505330801010132
|
| 592 |
+
1749945468.641827,72198,0.6502751111984253
|
| 593 |
+
1749945598.1349041,72298,0.6493455767631531
|
| 594 |
+
1749945728.573417,72398,0.6493124961853027
|
| 595 |
+
1749945858.0623538,72498,0.6513002514839172
|
| 596 |
+
1749945982.3945642,72598,0.6496317386627197
|
| 597 |
+
1749946111.765118,72698,0.6507402062416077
|
| 598 |
+
1749946240.630443,72798,0.6511532068252563
|
| 599 |
+
1749946368.180806,72898,0.6491826176643372
|
| 600 |
+
1749946496.832103,72998,0.6513100266456604
|
| 601 |
+
1749946625.482293,73098,0.6511054039001465
|
| 602 |
+
1749946755.102677,73198,0.6496274471282959
|
| 603 |
+
1749946885.840373,73298,0.6478989124298096
|
| 604 |
+
1749947015.730135,73398,0.6493860483169556
|
| 605 |
+
1749947148.454241,73498,0.649440586566925
|
| 606 |
+
1749947279.728344,73598,0.6481335759162903
|
| 607 |
+
1749947410.7961621,73698,0.648591935634613
|
| 608 |
+
1749947532.051818,73798,0.6477193832397461
|
| 609 |
+
1749947658.5383391,73898,0.6488223075866699
|
| 610 |
+
1749947785.23248,73998,0.6482095718383789
|
| 611 |
+
1749947905.7281501,74098,0.6481219530105591
|
| 612 |
+
1749948037.10495,74198,0.649522066116333
|
| 613 |
+
1749948162.964204,74298,0.6486262083053589
|
| 614 |
+
1749948299.8906748,74398,0.649992048740387
|
| 615 |
+
1749948431.189439,74498,0.6464987993240356
|
| 616 |
+
1749948562.154502,74598,0.6457542777061462
|
| 617 |
+
1749948692.440773,74698,0.6492316126823425
|
| 618 |
+
1749949544.119091,74811,0.6499026417732239
|
| 619 |
+
1749949662.2882051,74911,0.651816189289093
|
| 620 |
+
1749949782.29321,75011,0.65162193775177
|
| 621 |
+
1749949903.6215732,75111,0.6497769355773926
|
| 622 |
+
1749950025.437519,75211,0.652581512928009
|
| 623 |
+
1749950147.172297,75311,0.6517457365989685
|
| 624 |
+
1749950269.3430872,75411,0.6511011123657227
|
| 625 |
+
1749950391.183779,75511,0.6535349488258362
|
| 626 |
+
1749950512.9859998,75611,0.6526746153831482
|
| 627 |
+
1749950634.5976489,75711,0.6515275835990906
|
| 628 |
+
1749950755.841614,75811,0.6468645930290222
|
| 629 |
+
1749950876.902978,75911,0.6469895839691162
|
| 630 |
+
1749950997.8276448,76011,0.6505018472671509
|
| 631 |
+
1749951118.90695,76111,0.6481991410255432
|
| 632 |
+
1749951242.0590239,76211,0.6500465869903564
|
| 633 |
+
1749951365.13945,76311,0.6522291898727417
|
| 634 |
+
1749951485.726495,76411,0.6512941122055054
|
| 635 |
+
1749951606.384018,76511,0.6485955715179443
|
| 636 |
+
1749951727.3418372,76611,0.6492347121238708
|
| 637 |
+
1749951848.91982,76711,0.6510809063911438
|
| 638 |
+
1749951970.4396238,76811,0.6507787704467773
|
| 639 |
+
1749952092.018361,76911,0.6465024352073669
|
| 640 |
+
1749952212.437438,77011,0.6503309011459351
|
| 641 |
+
1749952333.3358028,77111,0.6472941040992737
|
| 642 |
+
1749952454.374986,77211,0.6507310271263123
|
| 643 |
+
1749952577.957252,77311,0.646462619304657
|
| 644 |
+
1749952698.933182,77411,0.6501801609992981
|
| 645 |
+
1749952821.444906,77511,0.6480913162231445
|
| 646 |
+
1749952943.532354,77611,0.6492763757705688
|
| 647 |
+
1749953065.092947,77711,0.6491703391075134
|
| 648 |
+
1749953183.2942998,77811,0.6481268405914307
|
| 649 |
+
1749953919.8155909,77924,0.6514578461647034
|
| 650 |
+
1749954039.144916,78024,0.6536059975624084
|
| 651 |
+
1749954164.892725,78124,0.6516213417053223
|
| 652 |
+
1749954287.405833,78224,0.6496967077255249
|
| 653 |
+
1749954411.896708,78324,0.6509160399436951
|
| 654 |
+
1749954532.976285,78424,0.6475796699523926
|
| 655 |
+
1749954651.8212879,78524,0.6513621211051941
|
| 656 |
+
1749954768.340374,78624,0.6514209508895874
|
| 657 |
+
1749954889.928726,78724,0.6494865417480469
|
| 658 |
+
1749955013.7541099,78824,0.6507163047790527
|
| 659 |
+
1749955135.127798,78924,0.648713231086731
|
| 660 |
+
1749955250.329943,79024,0.6503958106040955
|
| 661 |
+
1749955368.0772521,79124,0.6481421589851379
|
| 662 |
+
1749955488.714318,79224,0.6509117484092712
|
| 663 |
+
1749955608.994308,79324,0.6496789455413818
|
| 664 |
+
1749955730.32185,79424,0.6509148478507996
|
| 665 |
+
1749955852.183527,79524,0.65162193775177
|
| 666 |
+
1749955982.130076,79624,0.6507677435874939
|
| 667 |
+
1749956105.558641,79724,0.6485790610313416
|
| 668 |
+
1749956225.0044448,79824,0.6489969491958618
|
| 669 |
+
1749956346.8952608,79924,0.6521139740943909
|
| 670 |
+
1749956467.616684,80024,0.6509135961532593
|
| 671 |
+
1749956589.9021041,80124,0.6493566036224365
|
| 672 |
+
1749956710.854453,80224,0.6497260928153992
|
| 673 |
+
1749956831.13642,80324,0.6498333215713501
|
| 674 |
+
1749956952.777001,80424,0.6474724411964417
|
| 675 |
+
1749957072.210849,80524,0.648578405380249
|
| 676 |
+
1749957191.451542,80624,0.6495128870010376
|
| 677 |
+
1749957314.737707,80724,0.6525374054908752
|
| 678 |
+
1749957435.151362,80824,0.6503774523735046
|
| 679 |
+
1749957557.393923,80924,0.6495735049247742
|
| 680 |
+
1749958292.639041,81037,0.6502798199653625
|
| 681 |
+
1749958412.06761,81137,0.6510465741157532
|
| 682 |
+
1749958532.686303,81237,0.651066780090332
|
| 683 |
+
1749958645.7889438,81337,0.6499913930892944
|
| 684 |
+
1749958759.571202,81437,0.6506991386413574
|
| 685 |
+
1749958874.801936,81537,0.6515716910362244
|
| 686 |
+
1749958991.03166,81637,0.649645209312439
|
| 687 |
+
1749959107.828541,81737,0.6534062623977661
|
| 688 |
+
1749959224.8999429,81837,0.6517438888549805
|
| 689 |
+
1749959342.0457559,81937,0.6472377181053162
|
| 690 |
+
1749959458.717472,82037,0.6487628817558289
|
| 691 |
+
1749959575.365946,82137,0.6506679058074951
|
| 692 |
+
1749959691.9944792,82237,0.6509387493133545
|
| 693 |
+
1749959809.0819032,82337,0.6492481827735901
|
| 694 |
+
1749959926.093535,82437,0.6509411931037903
|
| 695 |
+
1749960043.8178828,82537,0.6513333320617676
|
| 696 |
+
1749960161.549503,82637,0.650521457195282
|
| 697 |
+
1749960279.3088372,82737,0.6513921618461609
|
| 698 |
+
1749960397.345453,82837,0.6535490155220032
|
| 699 |
+
1749960515.7569332,82937,0.651286780834198
|
| 700 |
+
1749960634.0833302,83037,0.6500453352928162
|
| 701 |
+
1749960752.65109,83137,0.6486304998397827
|
| 702 |
+
1749960871.347281,83237,0.648453414440155
|
| 703 |
+
1749960990.1292732,83337,0.648758590221405
|
| 704 |
+
1749961108.411705,83437,0.6494736671447754
|
| 705 |
+
1749961226.271459,83537,0.6482904553413391
|
| 706 |
+
1749961343.8049831,83637,0.6508358120918274
|
| 707 |
+
1749961461.005431,83737,0.650747537612915
|
| 708 |
+
1749961577.844734,83837,0.6494197249412537
|
| 709 |
+
1749961694.582011,83937,0.6514062285423279
|
| 710 |
+
1749961811.7100859,84037,0.6520459651947021
|
| 711 |
+
1749962538.324168,84150,0.6520133018493652
|
| 712 |
+
1749962654.3846312,84250,0.6523498892784119
|
| 713 |
+
1749962770.139613,84350,0.6531066298484802
|
| 714 |
+
1749962886.967903,84450,0.6516783237457275
|
| 715 |
+
1749963002.7153668,84550,0.6499571204185486
|
| 716 |
+
1749963118.68018,84650,0.6509124040603638
|
| 717 |
+
1749963234.560016,84750,0.6497187614440918
|
| 718 |
+
1749963350.778174,84850,0.6517395973205566
|
| 719 |
+
1749963466.768766,84950,0.6517120003700256
|
| 720 |
+
1749963582.5068312,85050,0.6510888338088989
|
| 721 |
+
1749963697.9346209,85150,0.650672197341919
|
| 722 |
+
1749963813.328256,85250,0.648465096950531
|
| 723 |
+
1749963928.9572961,85350,0.6514068841934204
|
| 724 |
+
1749964044.29373,85450,0.6517898440361023
|
| 725 |
+
1749964159.5894961,85550,0.6503884792327881
|
| 726 |
+
1749964275.069377,85650,0.6498480439186096
|
| 727 |
+
1749964390.285314,85750,0.6514013409614563
|
| 728 |
+
1749964505.738982,85850,0.6507444977760315
|
| 729 |
+
1749964620.9884312,85950,0.6518247723579407
|
| 730 |
+
1749964736.0891361,86050,0.6507181525230408
|
| 731 |
+
1749964851.121147,86150,0.6493345499038696
|
| 732 |
+
1749964966.152081,86250,0.651191771030426
|
| 733 |
+
1749965081.159734,86350,0.6515423059463501
|
| 734 |
+
1749965196.013607,86450,0.6494301557540894
|
| 735 |
+
1749965310.919723,86550,0.6503309011459351
|
| 736 |
+
1749965425.7250419,86650,0.6484840512275696
|
| 737 |
+
1749965540.5652092,86750,0.6490238904953003
|
| 738 |
+
1749965655.315326,86850,0.6500490307807922
|
| 739 |
+
1749965770.049461,86950,0.6497481465339661
|
| 740 |
+
1749965884.834232,87050,0.6497408151626587
|
| 741 |
+
1749965999.8417149,87150,0.6487524509429932
|
| 742 |
+
1749966711.9277241,87263,0.6526259779930115
|
| 743 |
+
1749966826.452731,87363,0.6557187438011169
|
| 744 |
+
1749966941.486036,87463,0.6512144804000854
|
| 745 |
+
1749967056.0634692,87563,0.652887225151062
|
| 746 |
+
1749967171.025635,87663,0.6515324711799622
|
| 747 |
+
1749967285.560586,87763,0.6503400802612305
|
| 748 |
+
1749967400.0741189,87863,0.6521697044372559
|
| 749 |
+
1749967514.531109,87963,0.6512916684150696
|
| 750 |
+
1749967628.8952188,88063,0.6526513695716858
|
| 751 |
+
1749967743.326265,88163,0.6508504748344421
|
| 752 |
+
1749967857.77909,88263,0.6511495113372803
|
| 753 |
+
1749967972.186653,88363,0.6517524719238281
|
| 754 |
+
1749968086.6080792,88463,0.6508412957191467
|
| 755 |
+
1749968201.192553,88563,0.6497830748558044
|
| 756 |
+
1749968315.548358,88663,0.6478333473205566
|
| 757 |
+
1749968429.965076,88763,0.6519607901573181
|
| 758 |
+
1749968544.336524,88863,0.6503326892852783
|
| 759 |
+
1749968658.703823,88963,0.6507499814033508
|
| 760 |
+
1749968773.079833,89063,0.6491066217422485
|
| 761 |
+
1749968887.395246,89163,0.6507052779197693
|
| 762 |
+
1749969001.696439,89263,0.6486783027648926
|
| 763 |
+
1749969115.9041128,89363,0.6508688926696777
|
| 764 |
+
1749969230.1204438,89463,0.649645209312439
|
| 765 |
+
1749969344.436378,89563,0.6521825790405273
|
| 766 |
+
1749969458.747808,89663,0.6497873663902283
|
| 767 |
+
1749969572.979925,89763,0.6509847044944763
|
| 768 |
+
1749969687.2085052,89863,0.6497610211372375
|
| 769 |
+
1749969801.430084,89963,0.6509503722190857
|
| 770 |
+
1749969916.392996,90063,0.6496807336807251
|
| 771 |
+
1749970033.5500932,90163,0.6488866209983826
|
| 772 |
+
1749970147.7666628,90263,0.6514172554016113
|
| 773 |
+
1749970854.433412,90376,0.6510876417160034
|
| 774 |
+
1749970968.305205,90476,0.6535937786102295
|
| 775 |
+
1749971082.257281,90576,0.6536029577255249
|
| 776 |
+
1749971196.191396,90676,0.6521390676498413
|
| 777 |
+
1749971310.241806,90776,0.6517383456230164
|
| 778 |
+
1749971424.201389,90876,0.6515649557113647
|
| 779 |
+
1749971538.050328,90976,0.6490882635116577
|
| 780 |
+
1749971652.028349,91076,0.6509369015693665
|
| 781 |
+
1749971765.8967092,91176,0.6530330777168274
|
| 782 |
+
1749971879.7971148,91276,0.6518627405166626
|
| 783 |
+
1749971993.616958,91376,0.6540888547897339
|
| 784 |
+
1749972107.447367,91476,0.6501213312149048
|
| 785 |
+
1749972221.310951,91576,0.6517695784568787
|
| 786 |
+
1749972335.184167,91676,0.6498695015907288
|
| 787 |
+
1749972449.0105689,91776,0.6509405374526978
|
| 788 |
+
1749972562.8870301,91876,0.649412989616394
|
| 789 |
+
1749972676.74808,91976,0.6519399285316467
|
| 790 |
+
1749972790.527348,92076,0.6517003774642944
|
| 791 |
+
1749972904.268828,92176,0.6503774523735046
|
| 792 |
+
1749973018.014807,92276,0.6529123783111572
|
| 793 |
+
1749973131.713702,92376,0.6517420411109924
|
| 794 |
+
1749973245.464949,92476,0.648855984210968
|
| 795 |
+
1749973359.115923,92576,0.650745689868927
|
| 796 |
+
1749973472.805828,92676,0.6503241658210754
|
| 797 |
+
1749973587.9607658,92776,0.6507254838943481
|
| 798 |
+
1749973701.850806,92876,0.6507604122161865
|
| 799 |
+
1749973817.199614,92976,0.6514717936515808
|
| 800 |
+
1749973932.78748,93076,0.6498952507972717
|
| 801 |
+
1749974046.669132,93176,0.6498903036117554
|
| 802 |
+
1749974160.730736,93276,0.6485962271690369
|
| 803 |
+
1749974274.685258,93376,0.6504834294319153
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750631065.525121,99,0.2892015874385834
|
| 3 |
+
1750631199.189507,199,0.38064277172088623
|
| 4 |
+
1750631324.1754081,299,0.4115380048751831
|
| 5 |
+
1750631444.5063932,399,0.43499264121055603
|
| 6 |
+
1750631570.60304,499,0.4534068703651428
|
| 7 |
+
1750631701.118371,599,0.46645158529281616
|
| 8 |
+
1750633238.095068,722,0.4837685227394104
|
| 9 |
+
1750633374.96926,822,0.4952218234539032
|
| 10 |
+
1750633510.0165582,922,0.5035698413848877
|
| 11 |
+
1750633645.130949,1022,0.513650119304657
|
| 12 |
+
1750633779.191838,1122,0.5235496163368225
|
| 13 |
+
1750633909.250054,1222,0.5300692319869995
|
| 14 |
+
1750635519.98188,1345,0.537870466709137
|
| 15 |
+
1750635651.208163,1445,0.5448480248451233
|
| 16 |
+
1750635786.351653,1545,0.5495679974555969
|
| 17 |
+
1750635919.9084532,1645,0.5562426447868347
|
| 18 |
+
1750636046.0017889,1745,0.5584007501602173
|
| 19 |
+
1750636176.090967,1845,0.5644050240516663
|
| 20 |
+
1750637798.3572638,1968,0.5683611035346985
|
| 21 |
+
1750637944.9763231,2068,0.5706801414489746
|
| 22 |
+
1750638084.590005,2168,0.5772916674613953
|
| 23 |
+
1750638219.658646,2268,0.5787334442138672
|
| 24 |
+
1750638344.697794,2368,0.5803602933883667
|
| 25 |
+
1750638467.964193,2468,0.5839117765426636
|
| 26 |
+
1750640028.77862,2591,0.5846061706542969
|
| 27 |
+
1750640153.68973,2691,0.5862714648246765
|
| 28 |
+
1750640279.634827,2791,0.5884944796562195
|
| 29 |
+
1750640404.920202,2891,0.5922720432281494
|
| 30 |
+
1750640532.764215,2991,0.5919049978256226
|
| 31 |
+
1750640660.727814,3091,0.5935349464416504
|
| 32 |
+
1750642226.478454,3214,0.5932989716529846
|
| 33 |
+
1750642351.258563,3314,0.5945245027542114
|
| 34 |
+
1750642476.434068,3414,0.5954754948616028
|
| 35 |
+
1750642601.0683022,3514,0.5981869101524353
|
| 36 |
+
1750642725.9307342,3614,0.600186288356781
|
| 37 |
+
1750642852.359199,3714,0.5997254848480225
|
| 38 |
+
1750644408.5053859,3837,0.600395143032074
|
| 39 |
+
1750644528.7009401,3937,0.5971096754074097
|
| 40 |
+
1750644664.0226219,4037,0.6009920239448547
|
| 41 |
+
1750644809.2195818,4137,0.6015698313713074
|
| 42 |
+
1750644959.742783,4237,0.601019024848938
|
| 43 |
+
1750645101.643283,4337,0.6039412021636963
|
| 44 |
+
1750646733.2835321,4460,0.6012372970581055
|
| 45 |
+
1750646871.831888,4560,0.603259801864624
|
| 46 |
+
1750647009.8041651,4660,0.600981593132019
|
| 47 |
+
1750647142.06771,4760,0.6031023263931274
|
| 48 |
+
1750647284.050242,4860,0.6026011109352112
|
| 49 |
+
1750647416.582078,4960,0.6032015681266785
|
| 50 |
+
1750648993.736192,5083,0.6007488369941711
|
| 51 |
+
1750649118.638871,5183,0.6006850600242615
|
| 52 |
+
1750649243.5734222,5283,0.597931981086731
|
| 53 |
+
1750649374.5322108,5383,0.6009148359298706
|
| 54 |
+
1750649511.9782,5483,0.6021550297737122
|
| 55 |
+
1750649643.696907,5583,0.6012359261512756
|
| 56 |
+
1750651237.184215,5706,0.605415403842926
|
| 57 |
+
1750651371.145504,5806,0.6034534573554993
|
| 58 |
+
1750651512.9966888,5906,0.6053388714790344
|
| 59 |
+
1750651645.601891,6006,0.6061537861824036
|
| 60 |
+
1750651768.101696,6106,0.6082696318626404
|
| 61 |
+
1750651894.2527318,6206,0.6087990403175354
|
| 62 |
+
1750652751.670625,31229,0.6166274547576904
|
| 63 |
+
1750652868.119637,31329,0.6185355186462402
|
| 64 |
+
1750652987.307248,31429,0.6230275630950928
|
| 65 |
+
1750653108.938766,31529,0.6198063492774963
|
| 66 |
+
1750653231.185305,31629,0.6204130053520203
|
| 67 |
+
1750653353.8723042,31729,0.6206746101379395
|
| 68 |
+
1750653485.112509,31829,0.6198235154151917
|
| 69 |
+
1750653619.547445,31929,0.6213094592094421
|
| 70 |
+
1750653735.6447968,32029,0.6196972727775574
|
| 71 |
+
1750653850.830616,32129,0.6215110421180725
|
| 72 |
+
1750653967.8004758,32229,0.6225257515907288
|
| 73 |
+
1750654085.789829,32329,0.6231274604797363
|
| 74 |
+
1750654204.026706,32429,0.6223854422569275
|
| 75 |
+
1750654322.3788009,32529,0.6243988871574402
|
| 76 |
+
1750654440.559285,32629,0.6218382120132446
|
| 77 |
+
1750654559.3013608,32729,0.6245214343070984
|
| 78 |
+
1750654677.978327,32829,0.6259387135505676
|
| 79 |
+
1750654796.143668,32929,0.6237101554870605
|
| 80 |
+
1750654914.796604,33029,0.6228921413421631
|
| 81 |
+
1750655032.7838562,33129,0.6244197487831116
|
| 82 |
+
1750655150.7129211,33229,0.6263841986656189
|
| 83 |
+
1750655268.704857,33329,0.626318633556366
|
| 84 |
+
1750655386.503475,33429,0.6249117851257324
|
| 85 |
+
1750655504.5079181,33529,0.625941812992096
|
| 86 |
+
1750655622.460054,33629,0.6252077221870422
|
| 87 |
+
1750655740.27733,33729,0.6250073313713074
|
| 88 |
+
1750655861.6650329,33829,0.6259454488754272
|
| 89 |
+
1750655980.320866,33929,0.6259902119636536
|
| 90 |
+
1750656097.877643,34029,0.6275263428688049
|
| 91 |
+
1750656215.370133,34129,0.6273707151412964
|
| 92 |
+
1750656333.075268,34229,0.6274074912071228
|
| 93 |
+
1750657086.7578921,34342,0.629107654094696
|
| 94 |
+
1750657204.668383,34442,0.6307554841041565
|
| 95 |
+
1750657322.042016,34542,0.6320698261260986
|
| 96 |
+
1750657440.209099,34642,0.6313345432281494
|
| 97 |
+
1750657557.434792,34742,0.630058228969574
|
| 98 |
+
1750657674.6609201,34842,0.6304326057434082
|
| 99 |
+
1750657792.012317,34942,0.6296194791793823
|
| 100 |
+
1750657909.36126,35042,0.6288976669311523
|
| 101 |
+
1750658027.830069,35142,0.6288621425628662
|
| 102 |
+
1750658146.2059221,35242,0.6297763586044312
|
| 103 |
+
1750658264.251379,35342,0.6283137202262878
|
| 104 |
+
1750658381.4552808,35442,0.6315312385559082
|
| 105 |
+
1750658499.310551,35542,0.6321831941604614
|
| 106 |
+
1750658616.736139,35642,0.6333051323890686
|
| 107 |
+
1750658733.911008,35742,0.6327359080314636
|
| 108 |
+
1750658851.442867,35842,0.6320796608924866
|
| 109 |
+
1750658969.819356,35942,0.6324282884597778
|
| 110 |
+
1750659087.024925,36042,0.6331182718276978
|
| 111 |
+
1750659204.912767,36142,0.6325937509536743
|
| 112 |
+
1750659322.231669,36242,0.6324228048324585
|
| 113 |
+
1750659439.335664,36342,0.6320667862892151
|
| 114 |
+
1750659556.742486,36442,0.6295673847198486
|
| 115 |
+
1750659686.196736,36542,0.633201003074646
|
| 116 |
+
1750659820.735029,36642,0.6299142241477966
|
| 117 |
+
1750659952.2519848,36742,0.6304901838302612
|
| 118 |
+
1750660069.779297,36842,0.6325153112411499
|
| 119 |
+
1750660186.531904,36942,0.6317647099494934
|
| 120 |
+
1750660303.349201,37042,0.6333976984024048
|
| 121 |
+
1750660420.603225,37142,0.6317064762115479
|
| 122 |
+
1750660541.714292,37242,0.633700966835022
|
| 123 |
+
1750660660.136745,37342,0.6330820918083191
|
| 124 |
+
1750661388.201132,37455,0.6364847421646118
|
| 125 |
+
1750661504.059097,37555,0.6347518563270569
|
| 126 |
+
1750661620.601632,37655,0.6350349187850952
|
| 127 |
+
1750661737.8392,37755,0.6348829865455627
|
| 128 |
+
1750661854.791054,37855,0.6365698575973511
|
| 129 |
+
1750661971.614787,37955,0.6343265771865845
|
| 130 |
+
1750662088.802027,38055,0.6356586813926697
|
| 131 |
+
1750662205.563544,38155,0.6366415619850159
|
| 132 |
+
1750662322.22959,38255,0.6359074711799622
|
| 133 |
+
1750662438.993978,38355,0.6354228258132935
|
| 134 |
+
1750662555.670385,38455,0.6363198757171631
|
| 135 |
+
1750662672.0950882,38555,0.6336360573768616
|
| 136 |
+
1750662788.568151,38655,0.6352775692939758
|
| 137 |
+
1750662904.984475,38755,0.635205864906311
|
| 138 |
+
1750663021.477014,38855,0.6362101435661316
|
| 139 |
+
1750663138.0638971,38955,0.635427713394165
|
| 140 |
+
1750663254.581194,39055,0.637482225894928
|
| 141 |
+
1750663370.997845,39155,0.6358364224433899
|
| 142 |
+
1750663493.812136,39255,0.632895827293396
|
| 143 |
+
1750663627.289997,39355,0.6369025707244873
|
| 144 |
+
1750663743.763742,39455,0.637343168258667
|
| 145 |
+
1750663861.556258,39555,0.6361697316169739
|
| 146 |
+
1750663978.43328,39655,0.6330171823501587
|
| 147 |
+
1750664096.123036,39755,0.6363339424133301
|
| 148 |
+
1750664213.899255,39855,0.6361568570137024
|
| 149 |
+
1750664330.618678,39955,0.6359429955482483
|
| 150 |
+
1750664447.20275,40055,0.6349148154258728
|
| 151 |
+
1750664564.020528,40155,0.6366127729415894
|
| 152 |
+
1750664680.570506,40255,0.6383057832717896
|
| 153 |
+
1750664797.279377,40355,0.6339424252510071
|
| 154 |
+
1750664913.7482638,40455,0.6364099383354187
|
| 155 |
+
1750665641.142454,40568,0.6383413076400757
|
| 156 |
+
1750665760.664209,40668,0.6392855644226074
|
| 157 |
+
1750665877.128622,40768,0.6404803991317749
|
| 158 |
+
1750665995.714239,40868,0.6381770968437195
|
| 159 |
+
1750666112.245776,40968,0.6393100619316101
|
| 160 |
+
1750666228.523797,41068,0.6395349502563477
|
| 161 |
+
1750666344.925282,41168,0.6408413052558899
|
| 162 |
+
1750666461.593054,41268,0.637652575969696
|
| 163 |
+
1750666579.433974,41368,0.6385288238525391
|
| 164 |
+
1750666696.877779,41468,0.640026330947876
|
| 165 |
+
1750666812.9850519,41568,0.6378768682479858
|
| 166 |
+
1750666929.211252,41668,0.6363480091094971
|
| 167 |
+
1750667045.4454331,41768,0.6351427435874939
|
| 168 |
+
1750667170.844815,41868,0.6397064924240112
|
| 169 |
+
1750667297.539781,41968,0.6390379667282104
|
| 170 |
+
1750667413.213182,42068,0.6378137469291687
|
| 171 |
+
1750667529.4597619,42168,0.6374160647392273
|
| 172 |
+
1750667647.741377,42268,0.6378731727600098
|
| 173 |
+
1750667768.2726898,42368,0.6387904286384583
|
| 174 |
+
1750667888.868618,42468,0.6397058963775635
|
| 175 |
+
1750668005.4062688,42568,0.6393921375274658
|
| 176 |
+
1750668123.1960561,42668,0.6392971873283386
|
| 177 |
+
1750668240.35351,42768,0.6400429010391235
|
| 178 |
+
1750668356.820052,42868,0.640026330947876
|
| 179 |
+
1750668473.354707,42968,0.6401923894882202
|
| 180 |
+
1750668589.6288471,43068,0.6370373964309692
|
| 181 |
+
1750668705.693681,43168,0.638969361782074
|
| 182 |
+
1750668822.180295,43268,0.6386280655860901
|
| 183 |
+
1750668938.474815,43368,0.6391764879226685
|
| 184 |
+
1750669054.528234,43468,0.6391905546188354
|
| 185 |
+
1750669170.334048,43568,0.6384166479110718
|
| 186 |
+
1750669890.286228,43681,0.6410545706748962
|
| 187 |
+
1750670005.478684,43781,0.6422426700592041
|
| 188 |
+
1750670121.107224,43881,0.6428235173225403
|
| 189 |
+
1750670236.806165,43981,0.6417873501777649
|
| 190 |
+
1750670352.39135,44081,0.6401703357696533
|
| 191 |
+
1750670467.961828,44181,0.6414467096328735
|
| 192 |
+
1750670583.4607208,44281,0.6410232782363892
|
| 193 |
+
1750670700.078947,44381,0.6407126188278198
|
| 194 |
+
1750670817.439255,44481,0.6421048045158386
|
| 195 |
+
1750670938.819511,44581,0.6384797692298889
|
| 196 |
+
1750671067.602432,44681,0.6404001116752625
|
| 197 |
+
1750671182.949585,44781,0.6403780579566956
|
| 198 |
+
1750671303.0707479,44881,0.640904426574707
|
| 199 |
+
1750671419.453831,44981,0.6423774361610413
|
| 200 |
+
1750671536.2709272,45081,0.6402193307876587
|
| 201 |
+
1750671652.090307,45181,0.6390085816383362
|
| 202 |
+
1750671767.837034,45281,0.6396801471710205
|
| 203 |
+
1750671883.496514,45381,0.6396065950393677
|
| 204 |
+
1750671999.103832,45481,0.6402015686035156
|
| 205 |
+
1750672114.666927,45581,0.6429314017295837
|
| 206 |
+
1750672230.1629078,45681,0.6415674090385437
|
| 207 |
+
1750672345.576981,45781,0.6380888223648071
|
| 208 |
+
1750672461.0743942,45881,0.6400943398475647
|
| 209 |
+
1750672576.9968,45981,0.6397291421890259
|
| 210 |
+
1750672692.294366,46081,0.6416593194007874
|
| 211 |
+
1750672807.730447,46181,0.6401035785675049
|
| 212 |
+
1750672923.071418,46281,0.6408541798591614
|
| 213 |
+
1750673038.510377,46381,0.6429779529571533
|
| 214 |
+
1750673153.916872,46481,0.6408265829086304
|
| 215 |
+
1750673269.40186,46581,0.6402071118354797
|
| 216 |
+
1750673384.451123,46681,0.6400980353355408
|
| 217 |
+
1750674100.060661,46794,0.6442976593971252
|
| 218 |
+
1750674214.866165,46894,0.6420912742614746
|
| 219 |
+
1750674330.260976,46994,0.6419485211372375
|
| 220 |
+
1750674445.835049,47094,0.6429399251937866
|
| 221 |
+
1750674560.9154491,47194,0.6439393162727356
|
| 222 |
+
1750674676.151907,47294,0.6419363021850586
|
| 223 |
+
1750674791.636968,47394,0.6423609256744385
|
| 224 |
+
1750674922.1213708,47494,0.644547164440155
|
| 225 |
+
1750675052.2869709,47594,0.6426292657852173
|
| 226 |
+
1750675167.9400709,47694,0.6425269842147827
|
| 227 |
+
1750675283.399919,47794,0.6424803733825684
|
| 228 |
+
1750675398.730604,47894,0.6421470642089844
|
| 229 |
+
1750675514.069264,47994,0.6427990198135376
|
| 230 |
+
1750675629.360107,48094,0.6415747404098511
|
| 231 |
+
1750675744.7140338,48194,0.642421543598175
|
| 232 |
+
1750675859.934976,48294,0.6413670182228088
|
| 233 |
+
1750675974.88224,48394,0.6440698504447937
|
| 234 |
+
1750676090.1252851,48494,0.6412782073020935
|
| 235 |
+
1750676205.5739782,48594,0.6424521803855896
|
| 236 |
+
1750676320.4785259,48694,0.6418020725250244
|
| 237 |
+
1750676435.3930979,48794,0.6436948776245117
|
| 238 |
+
1750676550.250946,48894,0.6425514817237854
|
| 239 |
+
1750676665.2288482,48994,0.6427904367446899
|
| 240 |
+
1750676780.087465,49094,0.6412267088890076
|
| 241 |
+
1750676894.973839,49194,0.6426372528076172
|
| 242 |
+
1750677010.184855,49294,0.641489565372467
|
| 243 |
+
1750677125.007437,49394,0.6410931348800659
|
| 244 |
+
1750677239.845695,49494,0.6417775750160217
|
| 245 |
+
1750677354.7222772,49594,0.6416972875595093
|
| 246 |
+
1750677469.7859788,49694,0.6428253650665283
|
| 247 |
+
1750677584.576396,49794,0.6431384682655334
|
| 248 |
+
1750678299.27569,49907,0.6434351205825806
|
| 249 |
+
1750678413.905586,50007,0.6445478200912476
|
| 250 |
+
1750678531.823102,50107,0.6454742550849915
|
| 251 |
+
1750678646.712779,50207,0.643337607383728
|
| 252 |
+
1750678761.7013912,50307,0.644447922706604
|
| 253 |
+
1750678880.435966,50407,0.6441225409507751
|
| 254 |
+
1750679010.284852,50507,0.6426029205322266
|
| 255 |
+
1750679134.0058482,50607,0.644059419631958
|
| 256 |
+
1750679249.3821728,50707,0.6426562666893005
|
| 257 |
+
1750679364.4773529,50807,0.6431776881217957
|
| 258 |
+
1750679479.19853,50907,0.6433780789375305
|
| 259 |
+
1750679593.954922,51007,0.64449143409729
|
| 260 |
+
1750679709.1884952,51107,0.6440226435661316
|
| 261 |
+
1750679823.9469929,51207,0.6416170597076416
|
| 262 |
+
1750679938.726235,51307,0.6432934999465942
|
| 263 |
+
1750680053.322082,51407,0.6428688764572144
|
| 264 |
+
1750680167.934365,51507,0.6435839533805847
|
| 265 |
+
1750680282.568205,51607,0.6457653045654297
|
| 266 |
+
1750680397.378247,51707,0.6440061330795288
|
| 267 |
+
1750680512.335923,51807,0.645423412322998
|
| 268 |
+
1750680626.8774269,51907,0.6447021961212158
|
| 269 |
+
1750680741.40782,52007,0.6430257558822632
|
| 270 |
+
1750680855.937533,52107,0.641985297203064
|
| 271 |
+
1750680971.009221,52207,0.6439638733863831
|
| 272 |
+
1750681085.748811,52307,0.6442089676856995
|
| 273 |
+
1750681200.528425,52407,0.6428124904632568
|
| 274 |
+
1750681315.339005,52507,0.6452438831329346
|
| 275 |
+
1750681429.589907,52607,0.6432763338088989
|
| 276 |
+
1750681543.8544981,52707,0.6444277167320251
|
| 277 |
+
1750681658.02585,52807,0.6424583196640015
|
| 278 |
+
1750681772.812811,52907,0.6432414054870605
|
| 279 |
+
1750682496.464426,53020,0.6453410387039185
|
| 280 |
+
1750682610.4130208,53120,0.6466960906982422
|
| 281 |
+
1750682724.89655,53220,0.6458014845848083
|
| 282 |
+
1750682851.957867,53320,0.6452855467796326
|
| 283 |
+
1750682974.301877,53420,0.6456722021102905
|
| 284 |
+
1750683088.494407,53520,0.6459877490997314
|
| 285 |
+
1750683203.613056,53620,0.6453210711479187
|
| 286 |
+
1750683318.650611,53720,0.643280029296875
|
| 287 |
+
1750683433.289535,53820,0.646141529083252
|
| 288 |
+
1750683548.048495,53920,0.6453112959861755
|
| 289 |
+
1750683662.402407,54020,0.6435919404029846
|
| 290 |
+
1750683776.7563431,54120,0.6448676586151123
|
| 291 |
+
1750683891.090209,54220,0.6451409459114075
|
| 292 |
+
1750684005.30981,54320,0.6430343389511108
|
| 293 |
+
1750684119.456249,54420,0.6458688974380493
|
| 294 |
+
1750684233.579416,54520,0.6452438831329346
|
| 295 |
+
1750684347.760201,54620,0.6426666975021362
|
| 296 |
+
1750684461.8397079,54720,0.6438566446304321
|
| 297 |
+
1750684576.002183,54820,0.6431954503059387
|
| 298 |
+
1750684690.038761,54920,0.6441697478294373
|
| 299 |
+
1750684804.1086779,55020,0.6478394865989685
|
| 300 |
+
1750684918.116061,55120,0.6437377333641052
|
| 301 |
+
1750685032.259659,55220,0.6452493667602539
|
| 302 |
+
1750685146.6256042,55320,0.6453210711479187
|
| 303 |
+
1750685260.67823,55420,0.6463456153869629
|
| 304 |
+
1750685376.882659,55520,0.6453866362571716
|
| 305 |
+
1750685493.291549,55620,0.6427052617073059
|
| 306 |
+
1750685607.024134,55720,0.6428866386413574
|
| 307 |
+
1750685727.476725,55820,0.6446048021316528
|
| 308 |
+
1750685842.3274372,55920,0.6424411535263062
|
| 309 |
+
1750685956.441565,56020,0.6442604064941406
|
| 310 |
+
1750686672.225863,56133,0.6452059149742126
|
| 311 |
+
1750686791.769061,56233,0.6464859247207642
|
| 312 |
+
1750686906.029582,56333,0.6462916731834412
|
| 313 |
+
1750687020.063763,56433,0.645586371421814
|
| 314 |
+
1750687134.383457,56533,0.6452543139457703
|
| 315 |
+
1750687248.784163,56633,0.6459258794784546
|
| 316 |
+
1750687362.727921,56733,0.6461715698242188
|
| 317 |
+
1750687476.80187,56833,0.6469969153404236
|
| 318 |
+
1750687590.8523,56933,0.6452996134757996
|
| 319 |
+
1750687705.0287628,57033,0.6463247537612915
|
| 320 |
+
1750687818.963202,57133,0.6458566188812256
|
| 321 |
+
1750687933.0801868,57233,0.6479871273040771
|
| 322 |
+
1750688047.127454,57333,0.6460961699485779
|
| 323 |
+
1750688161.213365,57433,0.6467254757881165
|
| 324 |
+
1750688276.0170398,57533,0.644781231880188
|
| 325 |
+
1750688403.652831,57633,0.6459190845489502
|
| 326 |
+
1750688521.523565,57733,0.6464926600456238
|
| 327 |
+
1750688644.4562812,57833,0.6445631384849548
|
| 328 |
+
1750688770.101472,57933,0.6468847990036011
|
| 329 |
+
1750688890.472131,58033,0.6455134749412537
|
| 330 |
+
1750689007.258662,58133,0.6451200842857361
|
| 331 |
+
1750689125.634627,58233,0.6431875228881836
|
| 332 |
+
1750689244.2372658,58333,0.644850492477417
|
| 333 |
+
1750689364.695701,58433,0.6435110569000244
|
| 334 |
+
1750689483.856778,58533,0.64333575963974
|
| 335 |
+
1750689602.730277,58633,0.6450287699699402
|
| 336 |
+
1750689715.3254662,58733,0.6437554955482483
|
| 337 |
+
1750689828.575561,58833,0.6466703414916992
|
| 338 |
+
1750689943.0059462,58933,0.6457230448722839
|
| 339 |
+
1750690057.1912692,59033,0.6446985006332397
|
| 340 |
+
1750690171.580263,59133,0.6473180055618286
|
| 341 |
+
1750690905.871822,59246,0.64793860912323
|
| 342 |
+
1750691038.545429,59346,0.6456004977226257
|
| 343 |
+
1750691165.34847,59446,0.6488615274429321
|
| 344 |
+
1750691280.322457,59546,0.6473891139030457
|
| 345 |
+
1750691403.3815491,59646,0.6478112936019897
|
| 346 |
+
1750691524.508241,59746,0.6466991305351257
|
| 347 |
+
1750691644.0496929,59846,0.6483572125434875
|
| 348 |
+
1750691764.225536,59946,0.6464233994483948
|
| 349 |
+
1750691880.756077,60046,0.6465569734573364
|
| 350 |
+
1750692002.569738,60146,0.6469810009002686
|
| 351 |
+
1750692120.909455,60246,0.6465851664543152
|
| 352 |
+
1750692245.229753,60346,0.6458603143692017
|
| 353 |
+
1750692369.640859,60446,0.6466072201728821
|
| 354 |
+
1750692495.37012,60546,0.6461721658706665
|
| 355 |
+
1750692623.336402,60646,0.6448425054550171
|
| 356 |
+
1750692748.5088708,60746,0.6467867493629456
|
| 357 |
+
1750692874.7629929,60846,0.6467475295066833
|
| 358 |
+
1750693004.752369,60946,0.6442990303039551
|
| 359 |
+
1750693132.13376,61046,0.6468836069107056
|
| 360 |
+
1750693252.7927139,61146,0.643668532371521
|
| 361 |
+
1750693374.481353,61246,0.6488774418830872
|
| 362 |
+
1750693516.340599,61346,0.6444466710090637
|
| 363 |
+
1750693652.6204538,61446,0.6460545063018799
|
| 364 |
+
1750693789.2221708,61546,0.6460103988647461
|
| 365 |
+
1750693923.9854028,61646,0.6463541388511658
|
| 366 |
+
1750694062.410586,61746,0.6441966891288757
|
| 367 |
+
1750694211.168782,61846,0.6448927521705627
|
| 368 |
+
1750694354.862964,61946,0.6444712281227112
|
| 369 |
+
1750694509.583164,62046,0.6447359323501587
|
| 370 |
+
1750694658.221782,62146,0.6455471515655518
|
| 371 |
+
1750694811.258077,62246,0.6479007601737976
|
| 372 |
+
1750695756.325511,62359,0.6486972570419312
|
| 373 |
+
1750695908.34564,62459,0.6474969387054443
|
| 374 |
+
1750696060.974647,62559,0.6472408175468445
|
| 375 |
+
1750696213.8780239,62659,0.6475049257278442
|
| 376 |
+
1750696371.832135,62759,0.6473884582519531
|
| 377 |
+
1750696538.972726,62859,0.6451342105865479
|
| 378 |
+
1750696703.472451,62959,0.6484380960464478
|
| 379 |
+
1750696862.664015,63059,0.6466072201728821
|
| 380 |
+
1750697019.825461,63159,0.6475588083267212
|
| 381 |
+
1750697178.623338,63259,0.6457787752151489
|
| 382 |
+
1750697338.321743,63359,0.6473228931427002
|
| 383 |
+
1750697496.1072798,63459,0.6486611366271973
|
| 384 |
+
1750697640.699009,63559,0.6490833163261414
|
| 385 |
+
1750697798.67978,63659,0.6481010913848877
|
| 386 |
+
1750697973.567866,63759,0.6467138528823853
|
| 387 |
+
1750698130.159744,63859,0.6478351950645447
|
| 388 |
+
1750698288.505541,63959,0.6474264860153198
|
| 389 |
+
1750698443.6894138,64059,0.6452934741973877
|
| 390 |
+
1750698600.570563,64159,0.6454577445983887
|
| 391 |
+
1750698762.2083058,64259,0.6472849249839783
|
| 392 |
+
1750698927.0930612,64359,0.6461752653121948
|
| 393 |
+
1750699092.819475,64459,0.6464166641235352
|
| 394 |
+
1750699254.193381,64559,0.6461654305458069
|
| 395 |
+
1750699417.738376,64659,0.6442646980285645
|
| 396 |
+
1750699580.8290539,64759,0.6487463116645813
|
| 397 |
+
1750699744.4795609,64859,0.645877480506897
|
| 398 |
+
1750699902.463346,64959,0.6496366262435913
|
| 399 |
+
1750700059.6175628,65059,0.6453235149383545
|
| 400 |
+
1750700217.401553,65159,0.6459877490997314
|
| 401 |
+
1750700373.628779,65259,0.6481280326843262
|
| 402 |
+
1750700522.979404,65359,0.6468296647071838
|
| 403 |
+
1750701377.086987,65472,0.6475510597229004
|
| 404 |
+
1750701517.144674,65572,0.6492867469787598
|
| 405 |
+
1750701645.158174,65672,0.650678277015686
|
| 406 |
+
1750701763.433599,65772,0.6491249799728394
|
| 407 |
+
1750701883.156283,65872,0.649980366230011
|
| 408 |
+
1750702005.192172,65972,0.6487285494804382
|
| 409 |
+
1750702135.9532921,66072,0.6475416421890259
|
| 410 |
+
1750702259.124198,66172,0.647977352142334
|
| 411 |
+
1750702380.897517,66272,0.6486433744430542
|
| 412 |
+
1750702504.3463502,66372,0.6462622284889221
|
| 413 |
+
1750702627.001485,66472,0.6512279510498047
|
| 414 |
+
1750702748.683696,66572,0.6497218012809753
|
| 415 |
+
1750702871.379016,66672,0.646327793598175
|
| 416 |
+
1750703002.8059258,66772,0.6470361351966858
|
| 417 |
+
1750703129.8358212,66872,0.6464185118675232
|
| 418 |
+
1750703254.3501709,66972,0.6481636166572571
|
| 419 |
+
1750703374.948131,67072,0.6462395787239075
|
| 420 |
+
1750703496.332047,67172,0.6486942172050476
|
| 421 |
+
1750703622.6555572,67272,0.6456899642944336
|
| 422 |
+
1750703745.221765,67372,0.6455833315849304
|
| 423 |
+
1750703878.715412,67472,0.6465294361114502
|
| 424 |
+
1750703999.804201,67572,0.6461703181266785
|
| 425 |
+
1750704123.5258,67672,0.6471580862998962
|
| 426 |
+
1750704244.238156,67772,0.6484571099281311
|
| 427 |
+
1750704364.829161,67872,0.6461807489395142
|
| 428 |
+
1750704503.792347,67972,0.6457751393318176
|
| 429 |
+
1750704647.331369,68072,0.6455257534980774
|
| 430 |
+
1750704783.771793,68172,0.6465281844139099
|
| 431 |
+
1750704923.119327,68272,0.6465435028076172
|
| 432 |
+
1750705071.639251,68372,0.6453333497047424
|
| 433 |
+
1750705197.302738,68472,0.6477996110916138
|
| 434 |
+
1750705965.7468271,68585,0.6481080055236816
|
| 435 |
+
1750706082.729342,68685,0.6502708196640015
|
| 436 |
+
1750706202.871629,68785,0.6493762135505676
|
| 437 |
+
1750706323.307158,68885,0.6498241424560547
|
| 438 |
+
1750706443.331074,68985,0.6487175226211548
|
| 439 |
+
1750706562.951682,69085,0.6472249031066895
|
| 440 |
+
1750706683.12535,69185,0.6495385766029358
|
| 441 |
+
1750706803.174572,69285,0.6501666903495789
|
| 442 |
+
1750706934.701206,69385,0.6494032144546509
|
| 443 |
+
1750707065.983119,69485,0.6485214233398438
|
| 444 |
+
1750707197.3294451,69585,0.6473425030708313
|
| 445 |
+
1750707327.289064,69685,0.6495343446731567
|
| 446 |
+
1750707451.997995,69785,0.649676501750946
|
| 447 |
+
1750707570.648684,69885,0.6482769846916199
|
| 448 |
+
1750707691.121522,69985,0.6493817567825317
|
| 449 |
+
1750707813.819514,70085,0.6475061178207397
|
| 450 |
+
1750707944.233801,70185,0.6452242732048035
|
| 451 |
+
1750708069.5539682,70285,0.6478223204612732
|
| 452 |
+
1750708193.143508,70385,0.6486562490463257
|
| 453 |
+
1750708313.543391,70485,0.6490907073020935
|
| 454 |
+
1750708435.994677,70585,0.6490913033485413
|
| 455 |
+
1750708556.496484,70685,0.6464282870292664
|
| 456 |
+
1750708683.346485,70785,0.6461372375488281
|
| 457 |
+
1750708804.8735192,70885,0.6472303867340088
|
| 458 |
+
1750708925.507859,70985,0.6471476554870605
|
| 459 |
+
1750709044.3017159,71085,0.6456458568572998
|
| 460 |
+
1750709164.810722,71185,0.6467101573944092
|
| 461 |
+
1750709292.3388329,71285,0.6471237540245056
|
| 462 |
+
1750709419.642994,71385,0.6455876231193542
|
| 463 |
+
1750709540.056331,71485,0.6471979022026062
|
| 464 |
+
1750709664.903027,71585,0.6474840641021729
|
| 465 |
+
1750710541.4944708,71698,0.6503886580467224
|
| 466 |
+
1750710717.131572,71798,0.6500024795532227
|
| 467 |
+
1750710901.969075,71898,0.6495906710624695
|
| 468 |
+
1750711072.293113,71998,0.6502586007118225
|
| 469 |
+
1750711202.7389472,72098,0.6461090445518494
|
| 470 |
+
1750711396.2362828,72198,0.6494160294532776
|
| 471 |
+
1750711598.5428538,72298,0.6488057374954224
|
| 472 |
+
1750711785.426256,72398,0.6490386128425598
|
| 473 |
+
1750711975.593777,72498,0.6491384506225586
|
| 474 |
+
1750712139.114897,72598,0.6515661478042603
|
| 475 |
+
1750712276.62222,72698,0.6474993824958801
|
| 476 |
+
1750712408.118044,72798,0.647630512714386
|
| 477 |
+
1750712532.846184,72898,0.6472610235214233
|
| 478 |
+
1750712732.6694112,72998,0.6475815176963806
|
| 479 |
+
1750712912.756147,73098,0.6476121544837952
|
| 480 |
+
1750713089.944423,73198,0.6483517289161682
|
| 481 |
+
1750713266.5091908,73298,0.6472702026367188
|
| 482 |
+
1750713469.4219148,73398,0.6492101550102234
|
| 483 |
+
1750713638.181789,73498,0.6476243734359741
|
| 484 |
+
1750713771.21946,73598,0.6491378545761108
|
| 485 |
+
1750713912.549492,73698,0.6466948390007019
|
| 486 |
+
1750714050.563842,73798,0.6465091705322266
|
| 487 |
+
1750714183.45089,73898,0.6491899490356445
|
| 488 |
+
1750714314.628376,73998,0.6481991410255432
|
| 489 |
+
1750714435.101216,74098,0.6487414240837097
|
| 490 |
+
1750714560.014582,74198,0.6473872661590576
|
| 491 |
+
1750714745.7170901,74298,0.6494601964950562
|
| 492 |
+
1750714909.82759,74398,0.6497021913528442
|
| 493 |
+
1750715069.977015,74498,0.6480085849761963
|
| 494 |
+
1750715238.480225,74598,0.6475067138671875
|
| 495 |
+
1750715372.129917,74698,0.6503259539604187
|
| 496 |
+
1750716228.131658,74811,0.6505544781684875
|
| 497 |
+
1750716350.395545,74911,0.6496856808662415
|
| 498 |
+
1750716474.600398,75011,0.6469619870185852
|
| 499 |
+
1750716600.222958,75111,0.650341272354126
|
| 500 |
+
1750716724.592279,75211,0.6511985063552856
|
| 501 |
+
1750716846.6685288,75311,0.649648904800415
|
| 502 |
+
1750716970.249894,75411,0.6496115326881409
|
| 503 |
+
1750717093.2215412,75511,0.6492812633514404
|
| 504 |
+
1750717214.8042839,75611,0.651259183883667
|
| 505 |
+
1750717334.98802,75711,0.6500968337059021
|
| 506 |
+
1750717460.1300192,75811,0.6493321061134338
|
| 507 |
+
1750717583.9358342,75911,0.6485692262649536
|
| 508 |
+
1750717705.862606,76011,0.6479626297950745
|
| 509 |
+
1750717829.5720232,76111,0.6494246125221252
|
| 510 |
+
1750717955.916364,76211,0.6481115221977234
|
| 511 |
+
1750718085.498107,76311,0.6478915214538574
|
| 512 |
+
1750718216.036777,76411,0.6476054191589355
|
| 513 |
+
1750718340.389607,76511,0.648463249206543
|
| 514 |
+
1750718464.600441,76611,0.6489871144294739
|
| 515 |
+
1750718587.348203,76711,0.6475778222084045
|
| 516 |
+
1750718708.304975,76811,0.6484387516975403
|
| 517 |
+
1750718829.805079,76911,0.6481636166572571
|
| 518 |
+
1750718954.33559,77011,0.6491053700447083
|
| 519 |
+
1750719081.4483619,77111,0.6484283208847046
|
| 520 |
+
1750719204.075867,77211,0.6471807360649109
|
| 521 |
+
1750719324.287713,77311,0.6484160423278809
|
| 522 |
+
1750719445.91699,77411,0.6471991539001465
|
| 523 |
+
1750719576.412999,77511,0.6469191312789917
|
| 524 |
+
1750719705.654291,77611,0.6492947340011597
|
| 525 |
+
1750719828.729173,77711,0.6487928628921509
|
| 526 |
+
1750719952.819894,77811,0.6483290195465088
|
| 527 |
+
1750720707.855105,77924,0.6514130234718323
|
| 528 |
+
1750720834.3818061,78024,0.6511390805244446
|
| 529 |
+
1750720954.2498791,78124,0.6513014435768127
|
| 530 |
+
1750721075.6623218,78224,0.6500998735427856
|
| 531 |
+
1750721198.050805,78324,0.648921549320221
|
| 532 |
+
1750721324.231403,78424,0.6483725309371948
|
| 533 |
+
1750721445.729855,78524,0.649813711643219
|
| 534 |
+
1750721567.7231958,78624,0.6490533351898193
|
| 535 |
+
1750721689.3256562,78724,0.6521862745285034
|
| 536 |
+
1750721811.6838999,78824,0.6498749852180481
|
| 537 |
+
1750721934.568477,78924,0.6493455767631531
|
| 538 |
+
1750722057.12514,79024,0.6505618691444397
|
| 539 |
+
1750722180.053097,79124,0.6507413983345032
|
| 540 |
+
1750722309.0838542,79224,0.6470245122909546
|
| 541 |
+
1750722430.786697,79324,0.6500116586685181
|
| 542 |
+
1750722552.653567,79424,0.6486446261405945
|
| 543 |
+
1750722675.150284,79524,0.6497328281402588
|
| 544 |
+
1750722801.892572,79624,0.6492977738380432
|
| 545 |
+
1750722926.87468,79724,0.6499423980712891
|
| 546 |
+
1750723048.9938838,79824,0.649302065372467
|
| 547 |
+
1750723171.157585,79924,0.64703369140625
|
| 548 |
+
1750723293.181637,80024,0.649258553981781
|
| 549 |
+
1750723431.741166,80124,0.6478559970855713
|
| 550 |
+
1750723559.550064,80224,0.6506384611129761
|
| 551 |
+
1750723681.40414,80324,0.6469699740409851
|
| 552 |
+
1750723803.064142,80424,0.6478290557861328
|
| 553 |
+
1750723925.7209768,80524,0.6485876441001892
|
| 554 |
+
1750724047.5232399,80624,0.6461513638496399
|
| 555 |
+
1750724170.174003,80724,0.647087037563324
|
| 556 |
+
1750724297.151586,80824,0.6457886099815369
|
| 557 |
+
1750724417.878161,80924,0.6503731608390808
|
| 558 |
+
1750725157.696506,81037,0.6495773792266846
|
| 559 |
+
1750725276.6986072,81137,0.6504503488540649
|
| 560 |
+
1750725397.190547,81237,0.6491556167602539
|
| 561 |
+
1750725524.3689,81337,0.6497187614440918
|
| 562 |
+
1750725644.67522,81437,0.6509546637535095
|
| 563 |
+
1750725765.146411,81537,0.649066150188446
|
| 564 |
+
1750725884.893885,81637,0.6516880989074707
|
| 565 |
+
1750726005.993588,81737,0.649161159992218
|
| 566 |
+
1750726126.0909061,81837,0.6498290300369263
|
| 567 |
+
1750726245.70204,81937,0.6476758718490601
|
| 568 |
+
1750726365.361954,82037,0.6505698561668396
|
| 569 |
+
1750726485.85697,82137,0.6511262059211731
|
| 570 |
+
1750726605.3225331,82237,0.6503793001174927
|
| 571 |
+
1750726724.761475,82337,0.6514166593551636
|
| 572 |
+
1750726844.29699,82437,0.6494975686073303
|
| 573 |
+
1750726963.862327,82537,0.6492120027542114
|
| 574 |
+
1750727083.898033,82637,0.6498327255249023
|
| 575 |
+
1750727203.5610502,82737,0.6495827436447144
|
| 576 |
+
1750727335.467991,82837,0.6498339176177979
|
| 577 |
+
1750727461.4255848,82937,0.6493180394172668
|
| 578 |
+
1750727580.802688,83037,0.6476672887802124
|
| 579 |
+
1750727700.546695,83137,0.6492990255355835
|
| 580 |
+
1750727820.246243,83237,0.6486592888832092
|
| 581 |
+
1750727939.7627158,83337,0.6493976712226868
|
| 582 |
+
1750728059.387846,83437,0.6471347808837891
|
| 583 |
+
1750728178.965193,83537,0.6504607796669006
|
| 584 |
+
1750728298.911614,83637,0.6483713388442993
|
| 585 |
+
1750728417.9110038,83737,0.6494062542915344
|
| 586 |
+
1750728536.891008,83837,0.6495723128318787
|
| 587 |
+
1750728656.541192,83937,0.6500159502029419
|
| 588 |
+
1750728775.999934,84037,0.6504166722297668
|
| 589 |
+
1750729516.326271,84150,0.6514350175857544
|
| 590 |
+
1750729635.5868201,84250,0.6485661864280701
|
| 591 |
+
1750729755.283911,84350,0.6504926681518555
|
| 592 |
+
1750729874.948045,84450,0.6497555375099182
|
| 593 |
+
1750729994.65663,84550,0.6508461833000183
|
| 594 |
+
1750730115.380836,84650,0.6527867913246155
|
| 595 |
+
1750730235.7166631,84750,0.6499779224395752
|
| 596 |
+
1750730357.25261,84850,0.649398922920227
|
| 597 |
+
1750730481.5908852,84950,0.65004962682724
|
| 598 |
+
1750730602.491642,85050,0.650799036026001
|
| 599 |
+
1750730722.6586602,85150,0.6507383584976196
|
| 600 |
+
1750730842.4600708,85250,0.6495306491851807
|
| 601 |
+
1750730962.190387,85350,0.6494981646537781
|
| 602 |
+
1750731088.203167,85450,0.649103581905365
|
| 603 |
+
1750731223.60728,85550,0.6513688564300537
|
| 604 |
+
1750731342.761135,85650,0.6500018239021301
|
| 605 |
+
1750731462.366403,85750,0.6494865417480469
|
| 606 |
+
1750731581.910183,85850,0.6486973166465759
|
| 607 |
+
1750731702.0141718,85950,0.6486813426017761
|
| 608 |
+
1750731821.820343,86050,0.6480244994163513
|
| 609 |
+
1750731941.7614589,86150,0.6482402086257935
|
| 610 |
+
1750732061.7666922,86250,0.6491397023200989
|
| 611 |
+
1750732181.49381,86350,0.6469172835350037
|
| 612 |
+
1750732300.797029,86450,0.6487285494804382
|
| 613 |
+
1750732422.157172,86550,0.649090051651001
|
| 614 |
+
1750732542.973932,86650,0.6489270925521851
|
| 615 |
+
1750732662.5130408,86750,0.6489424109458923
|
| 616 |
+
1750732785.6029382,86850,0.6521629691123962
|
| 617 |
+
1750732910.433344,86950,0.6478688716888428
|
| 618 |
+
1750733035.9218528,87050,0.6507003903388977
|
| 619 |
+
1750733156.725365,87150,0.6498915553092957
|
| 620 |
+
1750733896.359711,87263,0.6497565507888794
|
| 621 |
+
1750734015.399508,87363,0.6516507267951965
|
| 622 |
+
1750734134.69631,87463,0.6511244177818298
|
| 623 |
+
1750734254.233375,87563,0.6511893272399902
|
| 624 |
+
1750734373.475133,87663,0.6513578295707703
|
| 625 |
+
1750734493.382653,87763,0.6510146856307983
|
| 626 |
+
1750734612.470846,87863,0.649398922920227
|
| 627 |
+
1750734731.6779652,87963,0.6510551571846008
|
| 628 |
+
1750734850.9760342,88063,0.6505367755889893
|
| 629 |
+
1750734970.168761,88163,0.6485563516616821
|
| 630 |
+
1750735097.956203,88263,0.651425838470459
|
| 631 |
+
1750735230.192062,88363,0.6517040729522705
|
| 632 |
+
1750735353.5143921,88463,0.649908721446991
|
| 633 |
+
1750735472.50462,88563,0.6502830982208252
|
| 634 |
+
1750735592.248508,88663,0.6493400931358337
|
| 635 |
+
1750735711.481355,88763,0.6515184044837952
|
| 636 |
+
1750735835.323574,88863,0.651968777179718
|
| 637 |
+
1750735959.649396,88963,0.6492573618888855
|
| 638 |
+
1750736079.622922,89063,0.6485177874565125
|
| 639 |
+
1750736201.45227,89163,0.65073162317276
|
| 640 |
+
1750736328.779538,89263,0.6506391167640686
|
| 641 |
+
1750736453.863393,89363,0.6493461728096008
|
| 642 |
+
1750736579.8850691,89463,0.6513738036155701
|
| 643 |
+
1750736706.55581,89563,0.649884819984436
|
| 644 |
+
1750736833.269705,89663,0.6500735282897949
|
| 645 |
+
1750736960.1432378,89763,0.6492965817451477
|
| 646 |
+
1750737084.776,89863,0.6479424238204956
|
| 647 |
+
1750737208.4969301,89963,0.6495612859725952
|
| 648 |
+
1750737332.222544,90063,0.650051474571228
|
| 649 |
+
1750737455.16169,90163,0.6492947340011597
|
| 650 |
+
1750737577.5052302,90263,0.649922788143158
|
| 651 |
+
1750738343.503647,90376,0.6517683863639832
|
| 652 |
+
1750738464.6283572,90476,0.6503811478614807
|
| 653 |
+
1750738586.790051,90576,0.6515122652053833
|
| 654 |
+
1750738709.366909,90676,0.6520986557006836
|
| 655 |
+
1750738832.5377321,90776,0.6523400545120239
|
| 656 |
+
1750738970.401221,90876,0.6531476974487305
|
| 657 |
+
1750739095.7010791,90976,0.6510576009750366
|
| 658 |
+
1750739217.840459,91076,0.6499638557434082
|
| 659 |
+
1750739340.2926238,91176,0.6502794027328491
|
| 660 |
+
1750739463.466149,91276,0.6517659425735474
|
| 661 |
+
1750739590.7468421,91376,0.6497941017150879
|
| 662 |
+
1750739713.8746822,91476,0.6495937705039978
|
| 663 |
+
1750739836.112065,91576,0.6491133570671082
|
| 664 |
+
1750739963.997621,91676,0.6504295468330383
|
| 665 |
+
1750740087.7551308,91776,0.6515079736709595
|
| 666 |
+
1750740212.115783,91876,0.6501353979110718
|
| 667 |
+
1750740336.1198661,91976,0.6524007320404053
|
| 668 |
+
1750740460.717707,92076,0.6504209637641907
|
| 669 |
+
1750740583.9634502,92176,0.6514607667922974
|
| 670 |
+
1750740706.436011,92276,0.6513572335243225
|
| 671 |
+
1750740829.78276,92376,0.6487401723861694
|
| 672 |
+
1750740953.5337112,92476,0.6475380063056946
|
| 673 |
+
1750741076.088882,92576,0.6479228138923645
|
| 674 |
+
1750741199.643792,92676,0.6485998630523682
|
| 675 |
+
1750741323.0374389,92776,0.6502567529678345
|
| 676 |
+
1750741445.873158,92876,0.6485974192619324
|
| 677 |
+
1750741568.847466,92976,0.6510079503059387
|
| 678 |
+
1750741691.524961,93076,0.6507824659347534
|
| 679 |
+
1750741814.875741,93176,0.6490287780761719
|
| 680 |
+
1750741937.7055318,93276,0.6506329774856567
|
| 681 |
+
1750742060.312392,93376,0.6476893424987793
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750512519.815718,99,0.35653799772262573
|
| 3 |
+
1750512633.343317,199,0.4381672739982605
|
| 4 |
+
1750512747.365131,299,0.4712659418582916
|
| 5 |
+
1750512861.224118,399,0.49623775482177734
|
| 6 |
+
1750512975.119779,499,0.513439953327179
|
| 7 |
+
1750513088.872152,599,0.5254001021385193
|
| 8 |
+
1750513809.867424,722,0.5253951549530029
|
| 9 |
+
1750513923.8327522,822,0.5347071290016174
|
| 10 |
+
1750514037.5889359,922,0.5412462949752808
|
| 11 |
+
1750514151.3481102,1022,0.5503872632980347
|
| 12 |
+
1750514265.3658612,1122,0.554965078830719
|
| 13 |
+
1750514379.196322,1222,0.5614001154899597
|
| 14 |
+
1750515100.2786849,1345,0.555978000164032
|
| 15 |
+
1750515221.4078789,1445,0.5601238012313843
|
| 16 |
+
1750515339.4312892,1545,0.5642242431640625
|
| 17 |
+
1750515452.4170399,1645,0.5709405541419983
|
| 18 |
+
1750515568.994731,1745,0.5758161544799805
|
| 19 |
+
1750515686.4188118,1845,0.5814154148101807
|
| 20 |
+
1750516412.14166,1968,0.5749399662017822
|
| 21 |
+
1750516525.414965,2068,0.5782021880149841
|
| 22 |
+
1750516639.16349,2168,0.5816942453384399
|
| 23 |
+
1750516752.973536,2268,0.5854877233505249
|
| 24 |
+
1750516866.946796,2368,0.5889233946800232
|
| 25 |
+
1750516981.5747578,2468,0.592665433883667
|
| 26 |
+
1750517706.2651598,2591,0.5968009233474731
|
| 27 |
+
1750517820.3146722,2691,0.6044197082519531
|
| 28 |
+
1750517934.468713,2791,0.6065300107002258
|
| 29 |
+
1750518049.348709,2891,0.6092806458473206
|
| 30 |
+
1750518163.634838,2991,0.6104442477226257
|
| 31 |
+
1750518278.329371,3091,0.6138020753860474
|
| 32 |
+
1750519000.4730198,3214,0.6145917177200317
|
| 33 |
+
1750519114.607754,3314,0.6169730424880981
|
| 34 |
+
1750519228.8915122,3414,0.6186458468437195
|
| 35 |
+
1750519342.9850502,3514,0.6210214495658875
|
| 36 |
+
1750519457.0511758,3614,0.6226415634155273
|
| 37 |
+
1750519571.277271,3714,0.6246513724327087
|
| 38 |
+
1750520294.328237,3837,0.6104810237884521
|
| 39 |
+
1750520408.563489,3937,0.6084001064300537
|
| 40 |
+
1750520522.554494,4037,0.6111329793930054
|
| 41 |
+
1750520636.649106,4137,0.6098198294639587
|
| 42 |
+
1750520750.8757708,4237,0.6129932403564453
|
| 43 |
+
1750520865.4166121,4337,0.6150122284889221
|
| 44 |
+
1750521588.555209,4460,0.5972975492477417
|
| 45 |
+
1750521703.739831,4560,0.6002150774002075
|
| 46 |
+
1750521818.0140438,4660,0.5990980267524719
|
| 47 |
+
1750521932.381789,4760,0.6041396856307983
|
| 48 |
+
1750522046.6180868,4860,0.6016372442245483
|
| 49 |
+
1750522160.804651,4960,0.6044938564300537
|
| 50 |
+
1750522882.363214,5083,0.5452057719230652
|
| 51 |
+
1750522996.178067,5183,0.5377548933029175
|
| 52 |
+
1750523110.268896,5283,0.5406078696250916
|
| 53 |
+
1750523224.938071,5383,0.5426801443099976
|
| 54 |
+
1750541295.082926,5483,0.5457214117050171
|
| 55 |
+
1750541413.756873,5583,0.5476936101913452
|
| 56 |
+
1750542158.7599602,5706,0.5522372126579285
|
| 57 |
+
1750542290.217236,5806,0.5540257096290588
|
| 58 |
+
1750542417.53398,5906,0.5527156591415405
|
| 59 |
+
1750542537.180147,6006,0.5527616143226624
|
| 60 |
+
1750542656.817374,6106,0.5534932613372803
|
| 61 |
+
1750542777.823172,6206,0.5568425059318542
|
| 62 |
+
1750543545.430414,31229,0.610929548740387
|
| 63 |
+
1750543663.278502,31329,0.6151758432388306
|
| 64 |
+
1750543781.815904,31429,0.615355372428894
|
| 65 |
+
1750543905.7521641,31529,0.6202402114868164
|
| 66 |
+
1750544029.771921,31629,0.6155343055725098
|
| 67 |
+
1750544148.302644,31729,0.6183946132659912
|
| 68 |
+
1750544267.0461318,31829,0.6176010966300964
|
| 69 |
+
1750544385.893482,31929,0.6180815100669861
|
| 70 |
+
1750544505.148078,32029,0.61910480260849
|
| 71 |
+
1750544623.689687,32129,0.6188995242118835
|
| 72 |
+
1750544742.472401,32229,0.6202977895736694
|
| 73 |
+
1750544862.1196558,32329,0.6215581893920898
|
| 74 |
+
1750544980.752425,32429,0.6213241219520569
|
| 75 |
+
1750545100.1657238,32529,0.6201795339584351
|
| 76 |
+
1750545219.826904,32629,0.6202463507652283
|
| 77 |
+
1750545339.65601,32729,0.6230955719947815
|
| 78 |
+
1750545459.081982,32829,0.6213045120239258
|
| 79 |
+
1750545578.037591,32929,0.6234099268913269
|
| 80 |
+
1750545696.975323,33029,0.6207371354103088
|
| 81 |
+
1750545815.385591,33129,0.6228461861610413
|
| 82 |
+
1750545934.608308,33229,0.6237910389900208
|
| 83 |
+
1750546053.3803542,33329,0.6244209408760071
|
| 84 |
+
1750546182.375731,33429,0.622624397277832
|
| 85 |
+
1750546307.277687,33529,0.6251568794250488
|
| 86 |
+
1750546429.378062,33629,0.6256231665611267
|
| 87 |
+
1750546550.834217,33729,0.6248804926872253
|
| 88 |
+
1750546671.655812,33829,0.6258406639099121
|
| 89 |
+
1750546792.428179,33929,0.6277745366096497
|
| 90 |
+
1750546918.626771,34029,0.6248946189880371
|
| 91 |
+
1750547042.8809779,34129,0.6268229484558105
|
| 92 |
+
1750547162.770035,34229,0.6267101764678955
|
| 93 |
+
1750547925.0949068,34342,0.629385769367218
|
| 94 |
+
1750548054.083701,34442,0.6300692558288574
|
| 95 |
+
1750548178.90531,34542,0.6305318474769592
|
| 96 |
+
1750548304.004815,34642,0.6282947063446045
|
| 97 |
+
1750548425.417802,34742,0.6297309994697571
|
| 98 |
+
1750548548.194823,34842,0.6289791464805603
|
| 99 |
+
1750548669.393541,34942,0.6279013752937317
|
| 100 |
+
1750548789.350036,35042,0.6303896903991699
|
| 101 |
+
1750548906.935905,35142,0.6307867765426636
|
| 102 |
+
1750549022.121208,35242,0.6295006275177002
|
| 103 |
+
1750549141.310299,35342,0.6311660408973694
|
| 104 |
+
1750549268.005093,35442,0.6301262378692627
|
| 105 |
+
1750549387.780771,35542,0.6300386190414429
|
| 106 |
+
1750549514.264724,35642,0.6322604417800903
|
| 107 |
+
1750549639.401718,35742,0.6307003498077393
|
| 108 |
+
1750549761.940147,35842,0.6288437247276306
|
| 109 |
+
1750549887.1971352,35942,0.6304025650024414
|
| 110 |
+
1750550011.6239932,36042,0.6312199831008911
|
| 111 |
+
1750550132.8257868,36142,0.6306874752044678
|
| 112 |
+
1750550255.810136,36242,0.6294883489608765
|
| 113 |
+
1750550381.590354,36342,0.6302328705787659
|
| 114 |
+
1750550501.410628,36442,0.6305447220802307
|
| 115 |
+
1750550634.333121,36542,0.6334442496299744
|
| 116 |
+
1750550765.55441,36642,0.6321170330047607
|
| 117 |
+
1750550887.487181,36742,0.6318621039390564
|
| 118 |
+
1750551013.896276,36842,0.6318296790122986
|
| 119 |
+
1750551137.945178,36942,0.6315900683403015
|
| 120 |
+
1750551266.814848,37042,0.6314914226531982
|
| 121 |
+
1750551394.2324128,37142,0.6335533261299133
|
| 122 |
+
1750551523.5538719,37242,0.6346985101699829
|
| 123 |
+
1750551650.787906,37342,0.6316176652908325
|
| 124 |
+
1750552466.9763598,37455,0.6362544894218445
|
| 125 |
+
1750552593.755706,37555,0.6365900635719299
|
| 126 |
+
1750552717.375516,37655,0.6357371211051941
|
| 127 |
+
1750552835.905696,37755,0.6341838240623474
|
| 128 |
+
1750552954.684725,37855,0.6333137154579163
|
| 129 |
+
1750553073.9152,37955,0.6359472870826721
|
| 130 |
+
1750553192.2524118,38055,0.6344184875488281
|
| 131 |
+
1750553310.080835,38155,0.6353762149810791
|
| 132 |
+
1750553428.732256,38255,0.6352101564407349
|
| 133 |
+
1750553546.970997,38355,0.6349589228630066
|
| 134 |
+
1750553666.473217,38455,0.6361311078071594
|
| 135 |
+
1750553784.241045,38555,0.6340802907943726
|
| 136 |
+
1750553902.091969,38655,0.6345416903495789
|
| 137 |
+
1750554026.661404,38755,0.6336268186569214
|
| 138 |
+
1750554146.0335739,38855,0.6340404152870178
|
| 139 |
+
1750554265.734678,38955,0.6374675035476685
|
| 140 |
+
1750554412.668614,39055,0.6357383728027344
|
| 141 |
+
1750554536.714976,39155,0.6354368925094604
|
| 142 |
+
1750554654.934488,39255,0.634547770023346
|
| 143 |
+
1750554776.778559,39355,0.6353645920753479
|
| 144 |
+
1750554900.425964,39455,0.6348314881324768
|
| 145 |
+
1750555022.075907,39555,0.6348682641983032
|
| 146 |
+
1750555142.84383,39655,0.6349509954452515
|
| 147 |
+
1750555263.858019,39755,0.6392279267311096
|
| 148 |
+
1750555381.484349,39855,0.6370049118995667
|
| 149 |
+
1750555500.748307,39955,0.6361789107322693
|
| 150 |
+
1750555619.281695,40055,0.6373627185821533
|
| 151 |
+
1750555738.519796,40155,0.6367095708847046
|
| 152 |
+
1750555857.1638122,40255,0.6352824568748474
|
| 153 |
+
1750555976.423006,40355,0.6345422863960266
|
| 154 |
+
1750556097.481622,40455,0.6377561092376709
|
| 155 |
+
1750556849.481431,40568,0.6366961002349854
|
| 156 |
+
1750556968.323946,40668,0.6389920115470886
|
| 157 |
+
1750557090.0829928,40768,0.641265332698822
|
| 158 |
+
1750557209.600089,40868,0.637299656867981
|
| 159 |
+
1750557332.9571662,40968,0.6393511295318604
|
| 160 |
+
1750557454.556417,41068,0.6385324597358704
|
| 161 |
+
1750557575.257521,41168,0.6373486518859863
|
| 162 |
+
1750557694.323545,41268,0.6395165324211121
|
| 163 |
+
1750557813.8086998,41368,0.6381868720054626
|
| 164 |
+
1750557933.02108,41468,0.6384454369544983
|
| 165 |
+
1750558057.867629,41568,0.6389246582984924
|
| 166 |
+
1750558186.963292,41668,0.640751838684082
|
| 167 |
+
1750558308.942624,41768,0.6382028460502625
|
| 168 |
+
1750558428.0147572,41868,0.6369920372962952
|
| 169 |
+
1750558547.4412658,41968,0.638592541217804
|
| 170 |
+
1750558682.438314,42068,0.6381452083587646
|
| 171 |
+
1750558808.044629,42168,0.6389350295066833
|
| 172 |
+
1750558930.6890728,42268,0.6376427412033081
|
| 173 |
+
1750559051.3273962,42368,0.6376127600669861
|
| 174 |
+
1750559178.170135,42468,0.637618899345398
|
| 175 |
+
1750559307.4376478,42568,0.6370833516120911
|
| 176 |
+
1750559431.0882099,42668,0.6377671360969543
|
| 177 |
+
1750559553.4359171,42768,0.6386574506759644
|
| 178 |
+
1750559675.465578,42868,0.63829106092453
|
| 179 |
+
1750559800.673301,42968,0.6385459303855896
|
| 180 |
+
1750559929.051855,43068,0.6382383704185486
|
| 181 |
+
1750560049.739744,43168,0.6397971510887146
|
| 182 |
+
1750560167.535625,43268,0.6400588154792786
|
| 183 |
+
1750560289.986766,43368,0.6386746168136597
|
| 184 |
+
1750560406.148522,43468,0.6393057703971863
|
| 185 |
+
1750560521.5862908,43568,0.6379473209381104
|
| 186 |
+
1750561252.059766,43681,0.6409271359443665
|
| 187 |
+
1750561365.918968,43781,0.6408057808876038
|
| 188 |
+
1750561486.963851,43881,0.6405190229415894
|
| 189 |
+
1750561604.9847198,43981,0.6418737769126892
|
| 190 |
+
1750561727.380712,44081,0.6414803862571716
|
| 191 |
+
1750561855.436966,44181,0.6388021111488342
|
| 192 |
+
1750561973.1673331,44281,0.6416249871253967
|
| 193 |
+
1750562089.661609,44381,0.6407567262649536
|
| 194 |
+
1750562205.391223,44481,0.6435379981994629
|
| 195 |
+
1750562328.7374332,44581,0.6402175426483154
|
| 196 |
+
1750562443.504435,44681,0.640390932559967
|
| 197 |
+
1750562560.073649,44781,0.63901287317276
|
| 198 |
+
1750562676.1336892,44881,0.6404430270195007
|
| 199 |
+
1750562791.255451,44981,0.6395447254180908
|
| 200 |
+
1750562906.4314828,45081,0.6409381031990051
|
| 201 |
+
1750563022.989471,45181,0.639928936958313
|
| 202 |
+
1750563143.009612,45281,0.6422383785247803
|
| 203 |
+
1750563258.426795,45381,0.6405906677246094
|
| 204 |
+
1750563376.2271922,45481,0.6399515867233276
|
| 205 |
+
1750563492.181812,45581,0.6405729055404663
|
| 206 |
+
1750563606.7030542,45681,0.6400998830795288
|
| 207 |
+
1750563722.270846,45781,0.6418590545654297
|
| 208 |
+
1750563855.6493251,45881,0.6403008699417114
|
| 209 |
+
1750563971.354243,45981,0.6378388404846191
|
| 210 |
+
1750564085.182728,46081,0.6398559808731079
|
| 211 |
+
1750564200.637504,46181,0.6400257349014282
|
| 212 |
+
1750564317.326557,46281,0.6415300369262695
|
| 213 |
+
1750564434.4818282,46381,0.6426758766174316
|
| 214 |
+
1750564551.824142,46481,0.6398523449897766
|
| 215 |
+
1750564668.9933949,46581,0.6407260894775391
|
| 216 |
+
1750564785.8267531,46681,0.6419124007225037
|
| 217 |
+
1750565535.8494668,46794,0.6422056555747986
|
| 218 |
+
1750565657.997025,46894,0.6463032960891724
|
| 219 |
+
1750565772.921847,46994,0.6411439776420593
|
| 220 |
+
1750565889.006614,47094,0.6431507468223572
|
| 221 |
+
1750566005.92935,47194,0.6427739262580872
|
| 222 |
+
1750566123.246987,47294,0.644709587097168
|
| 223 |
+
1750566241.316244,47394,0.6436188817024231
|
| 224 |
+
1750566359.59302,47494,0.6419742703437805
|
| 225 |
+
1750566478.1097019,47594,0.6424705982208252
|
| 226 |
+
1750566596.7906308,47694,0.6443076133728027
|
| 227 |
+
1750566715.658506,47794,0.6413400769233704
|
| 228 |
+
1750566834.1012092,47894,0.6434215903282166
|
| 229 |
+
1750566952.149738,47994,0.6423106789588928
|
| 230 |
+
1750567070.122616,48094,0.6425330638885498
|
| 231 |
+
1750567187.9696372,48194,0.6412070989608765
|
| 232 |
+
1750567305.905546,48294,0.6385864019393921
|
| 233 |
+
1750567423.545755,48394,0.6413811445236206
|
| 234 |
+
1750567540.965311,48494,0.6424896121025085
|
| 235 |
+
1750567658.286883,48594,0.6416807770729065
|
| 236 |
+
1750567775.6383739,48694,0.6426562666893005
|
| 237 |
+
1750567893.00083,48794,0.641862154006958
|
| 238 |
+
1750568010.5977511,48894,0.6437365412712097
|
| 239 |
+
1750568128.829071,48994,0.6420478224754333
|
| 240 |
+
1750568246.5276082,49094,0.6405422687530518
|
| 241 |
+
1750568363.7302551,49194,0.6421311497688293
|
| 242 |
+
1750568481.0795798,49294,0.6424846649169922
|
| 243 |
+
1750568599.233805,49394,0.6423088312149048
|
| 244 |
+
1750568716.026248,49494,0.6430091857910156
|
| 245 |
+
1750568832.759424,49594,0.6410496234893799
|
| 246 |
+
1750568949.76052,49694,0.6420183777809143
|
| 247 |
+
1750569066.560096,49794,0.6412965655326843
|
| 248 |
+
1750569807.785005,49907,0.6452121734619141
|
| 249 |
+
1750569924.86,50007,0.64546138048172
|
| 250 |
+
1750570042.331681,50107,0.6411899328231812
|
| 251 |
+
1750570158.903578,50207,0.6425251364707947
|
| 252 |
+
1750570275.5214038,50307,0.6432732939720154
|
| 253 |
+
1750570392.1878011,50407,0.6426911950111389
|
| 254 |
+
1750570508.411907,50507,0.6446635723114014
|
| 255 |
+
1750570624.694793,50607,0.6437653303146362
|
| 256 |
+
1750570740.793289,50707,0.6411666870117188
|
| 257 |
+
1750570856.8499572,50807,0.6449939012527466
|
| 258 |
+
1750570972.8968399,50907,0.643962025642395
|
| 259 |
+
1750571088.87247,51007,0.6435968279838562
|
| 260 |
+
1750571205.251838,51107,0.6421219110488892
|
| 261 |
+
1750571321.295198,51207,0.6449313759803772
|
| 262 |
+
1750571437.27664,51307,0.6418517231941223
|
| 263 |
+
1750571553.188294,51407,0.6427794098854065
|
| 264 |
+
1750571669.605008,51507,0.6435288190841675
|
| 265 |
+
1750571785.396748,51607,0.6424405574798584
|
| 266 |
+
1750571901.20281,51707,0.644616425037384
|
| 267 |
+
1750572016.9972858,51807,0.6438559889793396
|
| 268 |
+
1750572132.757011,51907,0.6454203724861145
|
| 269 |
+
1750572248.543735,52007,0.6435306072235107
|
| 270 |
+
1750572364.283351,52107,0.6453829407691956
|
| 271 |
+
1750572479.954495,52207,0.6444313526153564
|
| 272 |
+
1750572595.630369,52307,0.6437414288520813
|
| 273 |
+
1750572711.608358,52407,0.6438400745391846
|
| 274 |
+
1750572827.294903,52507,0.6434191465377808
|
| 275 |
+
1750572949.4278588,52607,0.6442353129386902
|
| 276 |
+
1750573081.115513,52707,0.6438174247741699
|
| 277 |
+
1750573196.249892,52807,0.6420766115188599
|
| 278 |
+
1750573311.5793018,52907,0.642891526222229
|
| 279 |
+
1750574031.98301,53020,0.6436647772789001
|
| 280 |
+
1750574147.29718,53120,0.6458823680877686
|
| 281 |
+
1750574262.8689039,53220,0.6458296775817871
|
| 282 |
+
1750574378.20222,53320,0.6470888257026672
|
| 283 |
+
1750574493.654762,53420,0.647616446018219
|
| 284 |
+
1750574609.564354,53520,0.6457353234291077
|
| 285 |
+
1750574725.0149271,53620,0.6446035504341125
|
| 286 |
+
1750574840.4245021,53720,0.6438578367233276
|
| 287 |
+
1750574956.014117,53820,0.6460122466087341
|
| 288 |
+
1750575071.2638001,53920,0.6461544036865234
|
| 289 |
+
1750575186.563884,54020,0.6442506313323975
|
| 290 |
+
1750575302.772993,54120,0.6432886123657227
|
| 291 |
+
1750575418.222311,54220,0.6440937519073486
|
| 292 |
+
1750575533.568038,54320,0.6452794075012207
|
| 293 |
+
1750575649.4060469,54420,0.6453946232795715
|
| 294 |
+
1750575764.808038,54520,0.643932580947876
|
| 295 |
+
1750575880.2007291,54620,0.6454748511314392
|
| 296 |
+
1750575995.601755,54720,0.642601728439331
|
| 297 |
+
1750576110.9728398,54820,0.6444087028503418
|
| 298 |
+
1750576226.336683,54920,0.6438363790512085
|
| 299 |
+
1750576341.659678,55020,0.6449577212333679
|
| 300 |
+
1750576457.048513,55120,0.6454240083694458
|
| 301 |
+
1750576572.232755,55220,0.6449577212333679
|
| 302 |
+
1750576687.4686751,55320,0.6424785256385803
|
| 303 |
+
1750576802.802933,55420,0.644071102142334
|
| 304 |
+
1750576924.621731,55520,0.645532488822937
|
| 305 |
+
1750577056.267253,55620,0.6443345546722412
|
| 306 |
+
1750577173.309717,55720,0.6449442505836487
|
| 307 |
+
1750577289.042749,55820,0.6443547606468201
|
| 308 |
+
1750577404.575142,55920,0.6440588235855103
|
| 309 |
+
1750577519.9576378,56020,0.6426476836204529
|
| 310 |
+
1750578238.668938,56133,0.6469918489456177
|
| 311 |
+
1750578353.731814,56233,0.6458842158317566
|
| 312 |
+
1750578469.1204789,56333,0.6470937728881836
|
| 313 |
+
1750578584.3296099,56433,0.6494325995445251
|
| 314 |
+
1750578699.499978,56533,0.6446874737739563
|
| 315 |
+
1750578814.765738,56633,0.6472757458686829
|
| 316 |
+
1750578930.1919858,56733,0.6456893682479858
|
| 317 |
+
1750579045.453515,56833,0.6469656825065613
|
| 318 |
+
1750579160.8550808,56933,0.6465668082237244
|
| 319 |
+
1750579275.820174,57033,0.6443455815315247
|
| 320 |
+
1750579390.958364,57133,0.6445361375808716
|
| 321 |
+
1750579505.911273,57233,0.6454914212226868
|
| 322 |
+
1750579620.8545332,57333,0.6444337964057922
|
| 323 |
+
1750579735.869893,57433,0.6451758742332458
|
| 324 |
+
1750579850.789462,57533,0.6452665328979492
|
| 325 |
+
1750579965.67233,57633,0.6462322473526001
|
| 326 |
+
1750580080.63616,57733,0.6449939012527466
|
| 327 |
+
1750580195.508951,57833,0.6456440091133118
|
| 328 |
+
1750580310.310483,57933,0.6459197402000427
|
| 329 |
+
1750580425.353987,58033,0.6460098028182983
|
| 330 |
+
1750580540.1708171,58133,0.6448296308517456
|
| 331 |
+
1750580655.5569558,58233,0.645352303981781
|
| 332 |
+
1750580773.674479,58333,0.64360111951828
|
| 333 |
+
1750580908.940358,58433,0.6448719501495361
|
| 334 |
+
1750581023.3370812,58533,0.6445863842964172
|
| 335 |
+
1750581137.9006279,58633,0.6447775959968567
|
| 336 |
+
1750581252.673393,58733,0.644709587097168
|
| 337 |
+
1750581367.477561,58833,0.648421585559845
|
| 338 |
+
1750581482.437027,58933,0.6426127552986145
|
| 339 |
+
1750581597.4778292,59033,0.6464632153511047
|
| 340 |
+
1750581712.4491048,59133,0.6453744173049927
|
| 341 |
+
1750582429.634799,59246,0.6471410393714905
|
| 342 |
+
1750582544.210488,59346,0.6491905450820923
|
| 343 |
+
1750582659.016445,59446,0.6470538973808289
|
| 344 |
+
1750582773.78576,59546,0.6470618844032288
|
| 345 |
+
1750582888.855089,59646,0.6465263366699219
|
| 346 |
+
1750583003.3240662,59746,0.6477512121200562
|
| 347 |
+
1750583117.701067,59846,0.6461568474769592
|
| 348 |
+
1750583232.1828108,59946,0.6468449831008911
|
| 349 |
+
1750583347.412756,60046,0.6451090574264526
|
| 350 |
+
1750583461.909665,60146,0.6453369855880737
|
| 351 |
+
1750583576.393462,60246,0.6463989019393921
|
| 352 |
+
1750583692.413404,60346,0.6461660265922546
|
| 353 |
+
1750583806.796368,60446,0.6431660652160645
|
| 354 |
+
1750583922.159539,60546,0.6469038128852844
|
| 355 |
+
1750584037.123441,60646,0.6451770663261414
|
| 356 |
+
1750584151.542207,60746,0.6481862664222717
|
| 357 |
+
1750584273.796669,60846,0.6473749876022339
|
| 358 |
+
1750584394.352535,60946,0.6465943455696106
|
| 359 |
+
1750584515.335422,61046,0.6462965607643127
|
| 360 |
+
1750584631.064252,61146,0.6443327069282532
|
| 361 |
+
1750584750.8549461,61246,0.6457077264785767
|
| 362 |
+
1750584889.686422,61346,0.6464699506759644
|
| 363 |
+
1750585011.032854,61446,0.6477438807487488
|
| 364 |
+
1750585125.415521,61546,0.6452156901359558
|
| 365 |
+
1750585239.952118,61646,0.6447757482528687
|
| 366 |
+
1750585354.440314,61746,0.6467132568359375
|
| 367 |
+
1750585468.9611871,61846,0.6464454531669617
|
| 368 |
+
1750585583.376058,61946,0.647243857383728
|
| 369 |
+
1750585697.829522,62046,0.6438388228416443
|
| 370 |
+
1750585812.219603,62146,0.6457659602165222
|
| 371 |
+
1750585926.509505,62246,0.6463039517402649
|
| 372 |
+
1750586641.152458,62359,0.6478689908981323
|
| 373 |
+
1750586755.0868979,62459,0.6492831110954285
|
| 374 |
+
1750586869.202715,62559,0.6477726697921753
|
| 375 |
+
1750586983.435402,62659,0.6484154462814331
|
| 376 |
+
1750587097.811608,62759,0.6487561464309692
|
| 377 |
+
1750587211.895541,62859,0.6486678719520569
|
| 378 |
+
1750587326.1778922,62959,0.6473970413208008
|
| 379 |
+
1750587440.346177,63059,0.6490961909294128
|
| 380 |
+
1750587554.630498,63159,0.6482481360435486
|
| 381 |
+
1750587668.8492742,63259,0.644489586353302
|
| 382 |
+
1750587783.0494442,63359,0.6476078629493713
|
| 383 |
+
1750587899.978155,63459,0.6483131051063538
|
| 384 |
+
1750588014.9147332,63559,0.6460931301116943
|
| 385 |
+
1750588129.3693519,63659,0.6467328667640686
|
| 386 |
+
1750588243.8547578,63759,0.6476801633834839
|
| 387 |
+
1750588358.458077,63859,0.6495165228843689
|
| 388 |
+
1750588472.834694,63959,0.6476917862892151
|
| 389 |
+
1750588587.1876748,64059,0.644529402256012
|
| 390 |
+
1750588701.4570339,64159,0.6447855234146118
|
| 391 |
+
1750588821.0114582,64259,0.6437076926231384
|
| 392 |
+
1750588955.885608,64359,0.6447230577468872
|
| 393 |
+
1750589071.6215308,64459,0.6471397280693054
|
| 394 |
+
1750589185.356407,64559,0.6479007601737976
|
| 395 |
+
1750589299.509584,64659,0.644725501537323
|
| 396 |
+
1750589413.543396,64759,0.6464246511459351
|
| 397 |
+
1750589527.535722,64859,0.6464620232582092
|
| 398 |
+
1750589641.46502,64959,0.6458835601806641
|
| 399 |
+
1750589755.7920032,65059,0.6466954946517944
|
| 400 |
+
1750589869.688013,65159,0.6468278169631958
|
| 401 |
+
1750589983.531646,65259,0.6452040672302246
|
| 402 |
+
1750590097.425102,65359,0.6451256275177002
|
| 403 |
+
1750590808.176262,65472,0.6471759676933289
|
| 404 |
+
1750590921.921278,65572,0.650242030620575
|
| 405 |
+
1750591035.834945,65672,0.6471421718597412
|
| 406 |
+
1750591149.778326,65772,0.6475422978401184
|
| 407 |
+
1750591263.569057,65872,0.6506139636039734
|
| 408 |
+
1750591377.499996,65972,0.6497548818588257
|
| 409 |
+
1750591492.6199858,66072,0.6472107768058777
|
| 410 |
+
1750591608.058696,66172,0.6497953534126282
|
| 411 |
+
1750591722.086161,66272,0.6478143334388733
|
| 412 |
+
1750591837.5271091,66372,0.6468130946159363
|
| 413 |
+
1750591951.600312,66472,0.6478676199913025
|
| 414 |
+
1750592065.9358442,66572,0.6482720375061035
|
| 415 |
+
1750592180.108526,66672,0.6472058892250061
|
| 416 |
+
1750592294.1624372,66772,0.6472187638282776
|
| 417 |
+
1750592408.118195,66872,0.6455698609352112
|
| 418 |
+
1750592521.987005,66972,0.6482892036437988
|
| 419 |
+
1750592635.671606,67072,0.6464956998825073
|
| 420 |
+
1750592749.309768,67172,0.6475116610527039
|
| 421 |
+
1750592873.8428931,67272,0.6468645930290222
|
| 422 |
+
1750592993.918257,67372,0.6468793153762817
|
| 423 |
+
1750593107.367975,67472,0.6465765833854675
|
| 424 |
+
1750593221.155617,67572,0.6472683548927307
|
| 425 |
+
1750593335.090611,67672,0.6478118896484375
|
| 426 |
+
1750593448.960873,67772,0.6479766964912415
|
| 427 |
+
1750593564.0753238,67872,0.6446660757064819
|
| 428 |
+
1750593678.410104,67972,0.6475949883460999
|
| 429 |
+
1750593792.5865002,68072,0.6463884711265564
|
| 430 |
+
1750593906.303969,68172,0.6469693779945374
|
| 431 |
+
1750594020.026242,68272,0.6472254991531372
|
| 432 |
+
1750594133.6772149,68372,0.6451256275177002
|
| 433 |
+
1750594247.254053,68472,0.6462708115577698
|
| 434 |
+
1750594961.283332,68585,0.6506675481796265
|
| 435 |
+
1750595074.51535,68685,0.6492162942886353
|
| 436 |
+
1750595190.514826,68785,0.6484957337379456
|
| 437 |
+
1750595304.432062,68885,0.6488602757453918
|
| 438 |
+
1750595418.623473,68985,0.651425838470459
|
| 439 |
+
1750595532.449842,69085,0.6482800245285034
|
| 440 |
+
1750595646.204473,69185,0.64822918176651
|
| 441 |
+
1750595762.673673,69285,0.650368869304657
|
| 442 |
+
1750595877.196369,69385,0.6483909487724304
|
| 443 |
+
1750595991.276843,69485,0.6484816074371338
|
| 444 |
+
1750596105.727592,69585,0.6470624804496765
|
| 445 |
+
1750596220.7992299,69685,0.6490337252616882
|
| 446 |
+
1750596335.3485792,69785,0.646506130695343
|
| 447 |
+
1750596448.897135,69885,0.6479473114013672
|
| 448 |
+
1750596564.747894,69985,0.6497861742973328
|
| 449 |
+
1750596678.875308,70085,0.649032473564148
|
| 450 |
+
1750596792.3268101,70185,0.6471666693687439
|
| 451 |
+
1750596924.9429271,70285,0.6476060152053833
|
| 452 |
+
1750597043.7443461,70385,0.6474436521530151
|
| 453 |
+
1750597157.05215,70485,0.6484914422035217
|
| 454 |
+
1750597273.072156,70585,0.6457003951072693
|
| 455 |
+
1750597387.474043,70685,0.6476954817771912
|
| 456 |
+
1750597500.927712,70785,0.6476231813430786
|
| 457 |
+
1750597614.392519,70885,0.6465104222297668
|
| 458 |
+
1750597727.858714,70985,0.6453964710235596
|
| 459 |
+
1750597841.328077,71085,0.6476513743400574
|
| 460 |
+
1750597954.9446578,71185,0.6449001431465149
|
| 461 |
+
1750598068.513953,71285,0.6473345756530762
|
| 462 |
+
1750598182.8033369,71385,0.6480250954627991
|
| 463 |
+
1750598296.94331,71485,0.6461629867553711
|
| 464 |
+
1750598410.522016,71585,0.6479019522666931
|
| 465 |
+
1750599126.963549,71698,0.6486186385154724
|
| 466 |
+
1750599240.299493,71798,0.6481819748878479
|
| 467 |
+
1750599353.719161,71898,0.6503051519393921
|
| 468 |
+
1750599467.1946821,71998,0.6500055193901062
|
| 469 |
+
1750599580.561397,72098,0.6490943431854248
|
| 470 |
+
1750599693.795018,72198,0.6512138247489929
|
| 471 |
+
1750599807.05666,72298,0.6481212973594666
|
| 472 |
+
1750599920.236497,72398,0.650051474571228
|
| 473 |
+
1750600033.394361,72498,0.6495527029037476
|
| 474 |
+
1750600146.6231291,72598,0.6471550464630127
|
| 475 |
+
1750600259.913486,72698,0.6475778222084045
|
| 476 |
+
1750600373.3632681,72798,0.6488180160522461
|
| 477 |
+
1750600486.834271,72898,0.6477310061454773
|
| 478 |
+
1750600599.855514,72998,0.6499209403991699
|
| 479 |
+
1750600713.053118,73098,0.6469252705574036
|
| 480 |
+
1750600826.464627,73198,0.6488933563232422
|
| 481 |
+
1750600953.8382468,73298,0.6481825709342957
|
| 482 |
+
1750601080.921181,73398,0.6490073800086975
|
| 483 |
+
1750601194.100801,73498,0.6484344601631165
|
| 484 |
+
1750601307.372137,73598,0.647243857383728
|
| 485 |
+
1750601420.6709042,73698,0.6485416889190674
|
| 486 |
+
1750601534.0793972,73798,0.6484705805778503
|
| 487 |
+
1750601647.552754,73898,0.6458694934844971
|
| 488 |
+
1750601760.966083,73998,0.6481115221977234
|
| 489 |
+
1750601874.406259,74098,0.6480833292007446
|
| 490 |
+
1750601988.078152,74198,0.6486004590988159
|
| 491 |
+
1750602101.551609,74298,0.6483842134475708
|
| 492 |
+
1750602214.817476,74398,0.6475067138671875
|
| 493 |
+
1750602328.090564,74498,0.6483412981033325
|
| 494 |
+
1750602443.7624419,74598,0.6467549204826355
|
| 495 |
+
1750602557.626081,74698,0.6482132077217102
|
| 496 |
+
1750603268.085641,74811,0.6493449807167053
|
| 497 |
+
1750603381.465674,74911,0.6493836045265198
|
| 498 |
+
1750603494.849602,75011,0.6496973037719727
|
| 499 |
+
1750603608.307693,75111,0.6514828205108643
|
| 500 |
+
1750603721.841194,75211,0.6484718322753906
|
| 501 |
+
1750603835.478155,75311,0.651509165763855
|
| 502 |
+
1750603948.918059,75411,0.6493082046508789
|
| 503 |
+
1750604062.4713511,75511,0.6493167877197266
|
| 504 |
+
1750604176.132805,75611,0.6502978205680847
|
| 505 |
+
1750604291.777073,75711,0.6487714648246765
|
| 506 |
+
1750604404.9040508,75811,0.6485036611557007
|
| 507 |
+
1750604519.9396749,75911,0.6494589447975159
|
| 508 |
+
1750604633.384871,76011,0.6465171575546265
|
| 509 |
+
1750604749.003573,76111,0.6481679081916809
|
| 510 |
+
1750604861.942571,76211,0.6491654515266418
|
| 511 |
+
1750604991.504076,76311,0.6484301686286926
|
| 512 |
+
1750605110.915926,76411,0.648379921913147
|
| 513 |
+
1750605224.225229,76511,0.6497408151626587
|
| 514 |
+
1750605337.9226382,76611,0.6495067477226257
|
| 515 |
+
1750605460.625063,76711,0.64895099401474
|
| 516 |
+
1750605573.4640121,76811,0.6485710740089417
|
| 517 |
+
1750605690.328724,76911,0.6481311321258545
|
| 518 |
+
1750605819.816677,77011,0.6497769355773926
|
| 519 |
+
1750605951.775168,77111,0.6471458077430725
|
| 520 |
+
1750606083.632878,77211,0.6488964557647705
|
| 521 |
+
1750606206.9935338,77311,0.6482897996902466
|
| 522 |
+
1750606326.036043,77411,0.6463878750801086
|
| 523 |
+
1750606447.776541,77511,0.6483945846557617
|
| 524 |
+
1750606573.864937,77611,0.6469258666038513
|
| 525 |
+
1750606686.392003,77711,0.646547794342041
|
| 526 |
+
1750606799.700439,77811,0.6500318646430969
|
| 527 |
+
1750607531.42963,77924,0.6539332270622253
|
| 528 |
+
1750607646.005093,78024,0.6508700847625732
|
| 529 |
+
1750607762.3291159,78124,0.6489117741584778
|
| 530 |
+
1750607876.9745002,78224,0.6478290557861328
|
| 531 |
+
1750607990.553424,78324,0.6508229374885559
|
| 532 |
+
1750608104.333185,78424,0.6502634882926941
|
| 533 |
+
1750608218.94431,78524,0.648602306842804
|
| 534 |
+
1750608332.3119192,78624,0.6503958106040955
|
| 535 |
+
1750608448.777334,78724,0.6495441198348999
|
| 536 |
+
1750608565.710043,78824,0.6504626274108887
|
| 537 |
+
1750608697.5426712,78924,0.6474418044090271
|
| 538 |
+
1750608813.743689,79024,0.6483995318412781
|
| 539 |
+
1750608931.296033,79124,0.6503363847732544
|
| 540 |
+
1750609051.616808,79224,0.6509423851966858
|
| 541 |
+
1750609170.325835,79324,0.6495392322540283
|
| 542 |
+
1750609287.822495,79424,0.6479007601737976
|
| 543 |
+
1750609404.5917308,79524,0.649052083492279
|
| 544 |
+
1750609521.92617,79624,0.6479411721229553
|
| 545 |
+
1750609644.375568,79724,0.6480759978294373
|
| 546 |
+
1750609779.294302,79824,0.6493504643440247
|
| 547 |
+
1750609908.866155,79924,0.6490018367767334
|
| 548 |
+
1750610027.0822668,80024,0.6477825045585632
|
| 549 |
+
1750610144.4391959,80124,0.6481972932815552
|
| 550 |
+
1750610262.5570219,80224,0.6487996578216553
|
| 551 |
+
1750610381.838706,80324,0.6485079526901245
|
| 552 |
+
1750610501.226707,80424,0.6487144827842712
|
| 553 |
+
1750610620.8597472,80524,0.6500441431999207
|
| 554 |
+
1750610740.6217742,80624,0.6486127376556396
|
| 555 |
+
1750610860.366892,80724,0.6491838097572327
|
| 556 |
+
1750610980.208236,80824,0.6473811268806458
|
| 557 |
+
1750611099.770781,80924,0.6480177640914917
|
| 558 |
+
1750611869.460192,81037,0.6499354243278503
|
| 559 |
+
1750611987.6706471,81137,0.6512537002563477
|
| 560 |
+
1750612106.840222,81237,0.651816189289093
|
| 561 |
+
1750612234.3039439,81337,0.650275707244873
|
| 562 |
+
1750612360.8318841,81437,0.6499889492988586
|
| 563 |
+
1750612480.657759,81537,0.6484877467155457
|
| 564 |
+
1750612601.2043068,81637,0.6499779224395752
|
| 565 |
+
1750612722.204899,81737,0.6482211947441101
|
| 566 |
+
1750612845.812425,81837,0.6495686173439026
|
| 567 |
+
1750612965.936807,81937,0.651201605796814
|
| 568 |
+
1750613085.701558,82037,0.6513584852218628
|
| 569 |
+
1750613205.6292732,82137,0.6499001383781433
|
| 570 |
+
1750613328.230561,82237,0.6502407789230347
|
| 571 |
+
1750613449.8727932,82337,0.6506360173225403
|
| 572 |
+
1750613572.076292,82437,0.649478554725647
|
| 573 |
+
1750613694.686913,82537,0.6480134725570679
|
| 574 |
+
1750613814.5874639,82637,0.6490668058395386
|
| 575 |
+
1750613935.299573,82737,0.6511960625648499
|
| 576 |
+
1750614055.92192,82837,0.6488516926765442
|
| 577 |
+
1750614178.808106,82937,0.6496151685714722
|
| 578 |
+
1750614299.236428,83037,0.6512248516082764
|
| 579 |
+
1750614419.280396,83137,0.6513504981994629
|
| 580 |
+
1750614541.370991,83237,0.6505110263824463
|
| 581 |
+
1750614661.353313,83337,0.6482665538787842
|
| 582 |
+
1750614781.57492,83437,0.6482396125793457
|
| 583 |
+
1750614903.094549,83537,0.6486562490463257
|
| 584 |
+
1750615024.0435221,83637,0.649006724357605
|
| 585 |
+
1750615145.50887,83737,0.6487040519714355
|
| 586 |
+
1750615267.080477,83837,0.6492610573768616
|
| 587 |
+
1750615389.716396,83937,0.6466494798660278
|
| 588 |
+
1750615510.6090298,84037,0.6468896865844727
|
| 589 |
+
1750616279.8720782,84150,0.6526991128921509
|
| 590 |
+
1750616399.818967,84250,0.6504766941070557
|
| 591 |
+
1750616520.4718778,84350,0.6510208249092102
|
| 592 |
+
1750616641.622665,84450,0.653174638748169
|
| 593 |
+
1750616764.3281019,84550,0.6528529524803162
|
| 594 |
+
1750616892.090367,84650,0.6496495008468628
|
| 595 |
+
1750617016.783542,84750,0.6483529210090637
|
| 596 |
+
1750617140.8825998,84850,0.6534565091133118
|
| 597 |
+
1750617264.373489,84950,0.6515576243400574
|
| 598 |
+
1750617385.02099,85050,0.6476219296455383
|
| 599 |
+
1750617507.1370711,85150,0.6492205858230591
|
| 600 |
+
1750617628.504561,85250,0.6475943326950073
|
| 601 |
+
1750617750.817056,85350,0.6470876336097717
|
| 602 |
+
1750617873.566484,85450,0.6500269770622253
|
| 603 |
+
1750617995.333026,85550,0.6503198742866516
|
| 604 |
+
1750618118.851619,85650,0.6515373587608337
|
| 605 |
+
1750618241.0798368,85750,0.6499902009963989
|
| 606 |
+
1750618364.121679,85850,0.6491415500640869
|
| 607 |
+
1750618487.464784,85950,0.648560643196106
|
| 608 |
+
1750618611.460181,86050,0.6482163071632385
|
| 609 |
+
1750618734.826777,86150,0.6484558582305908
|
| 610 |
+
1750618856.437807,86250,0.6502573490142822
|
| 611 |
+
1750618979.6873322,86350,0.6508774757385254
|
| 612 |
+
1750619103.141767,86450,0.6505165696144104
|
| 613 |
+
1750619224.659722,86550,0.6477352976799011
|
| 614 |
+
1750619348.091896,86650,0.6486329436302185
|
| 615 |
+
1750619476.8873081,86750,0.6491035223007202
|
| 616 |
+
1750619606.321012,86850,0.6493394374847412
|
| 617 |
+
1750619740.594385,86950,0.6478296518325806
|
| 618 |
+
1750619863.408876,87050,0.6471274495124817
|
| 619 |
+
1750619985.631146,87150,0.649993896484375
|
| 620 |
+
1750620756.7987552,87263,0.650802493095398
|
| 621 |
+
1750620882.0297558,87363,0.6521164178848267
|
| 622 |
+
1750621003.968464,87463,0.6504436135292053
|
| 623 |
+
1750621126.092192,87563,0.6502971649169922
|
| 624 |
+
1750621247.571977,87663,0.6495686173439026
|
| 625 |
+
1750621369.483636,87763,0.6518437266349792
|
| 626 |
+
1750621491.970355,87863,0.6509503722190857
|
| 627 |
+
1750621613.86056,87963,0.6497542858123779
|
| 628 |
+
1750621735.753966,88063,0.6520538926124573
|
| 629 |
+
1750621857.879051,88163,0.6510637402534485
|
| 630 |
+
1750621994.1035788,88263,0.6493682861328125
|
| 631 |
+
1750622135.149805,88363,0.6500649452209473
|
| 632 |
+
1750622256.653326,88463,0.6486452221870422
|
| 633 |
+
1750622385.631712,88563,0.6495349407196045
|
| 634 |
+
1750622509.917459,88663,0.6512365341186523
|
| 635 |
+
1750622643.8225129,88763,0.6506893634796143
|
| 636 |
+
1750622766.2324889,88863,0.6490857601165771
|
| 637 |
+
1750622901.935771,88963,0.6516850590705872
|
| 638 |
+
1750623047.185132,89063,0.6498480439186096
|
| 639 |
+
1750623201.280082,89163,0.6501519680023193
|
| 640 |
+
1750623332.6913059,89263,0.6491029262542725
|
| 641 |
+
1750623464.901411,89363,0.6478192210197449
|
| 642 |
+
1750623601.143538,89463,0.6510116457939148
|
| 643 |
+
1750623736.550787,89563,0.6510367393493652
|
| 644 |
+
1750623873.634254,89663,0.6512346863746643
|
| 645 |
+
1750624010.361918,89763,0.6488339304924011
|
| 646 |
+
1750624148.0604842,89863,0.6489362716674805
|
| 647 |
+
1750624285.8869388,89963,0.6506789326667786
|
| 648 |
+
1750624422.588512,90063,0.6470484137535095
|
| 649 |
+
1750624558.7439559,90163,0.6507463455200195
|
| 650 |
+
1750624693.945033,90263,0.6487003564834595
|
| 651 |
+
1750625561.1012402,90376,0.6528535485267639
|
| 652 |
+
1750625700.597535,90476,0.6515300273895264
|
| 653 |
+
1750625840.1480231,90576,0.6503186225891113
|
| 654 |
+
1750625982.267066,90676,0.6544325947761536
|
| 655 |
+
1750626125.271106,90776,0.6524448394775391
|
| 656 |
+
1750626261.545839,90876,0.650384783744812
|
| 657 |
+
1750626396.5558882,90976,0.651689350605011
|
| 658 |
+
1750626532.687592,91076,0.6526868939399719
|
| 659 |
+
1750626667.8625998,91176,0.65162193775177
|
| 660 |
+
1750626820.179069,91276,0.6481838226318359
|
| 661 |
+
1750626957.687435,91376,0.6513223052024841
|
| 662 |
+
1750627090.740042,91476,0.6505637168884277
|
| 663 |
+
1750627212.479149,91576,0.6479706168174744
|
| 664 |
+
1750627346.710177,91676,0.6502432823181152
|
| 665 |
+
1750627467.090128,91776,0.6488933563232422
|
| 666 |
+
1750627589.0224462,91876,0.6498934030532837
|
| 667 |
+
1750627706.6918569,91976,0.6503088474273682
|
| 668 |
+
1750627826.827691,92076,0.6487782001495361
|
| 669 |
+
1750627949.173918,92176,0.6491109132766724
|
| 670 |
+
1750628072.8665018,92276,0.6505398154258728
|
| 671 |
+
1750628196.8727,92376,0.651665449142456
|
| 672 |
+
1750628320.0594828,92476,0.649732232093811
|
| 673 |
+
1750628442.600629,92576,0.6493094563484192
|
| 674 |
+
1750628571.3224661,92676,0.6495833396911621
|
| 675 |
+
1750628695.75915,92776,0.6489681601524353
|
| 676 |
+
1750628814.931844,92876,0.6502506136894226
|
| 677 |
+
1750628935.356877,92976,0.6503320932388306
|
| 678 |
+
1750629056.920314,93076,0.6506360173225403
|
| 679 |
+
1750629178.913306,93176,0.6491133570671082
|
| 680 |
+
1750629301.540896,93276,0.6481752395629883
|
| 681 |
+
1750629423.713382,93376,0.64860600233078
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv
ADDED
|
@@ -0,0 +1,931 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750113731.620789,99,0.2894834578037262
|
| 3 |
+
1750113849.767039,199,0.3767451047897339
|
| 4 |
+
1750113967.6854558,299,0.4108302593231201
|
| 5 |
+
1750114086.60289,399,0.43304839730262756
|
| 6 |
+
1750114203.53461,499,0.4487530589103699
|
| 7 |
+
1750114319.05894,599,0.4647977948188782
|
| 8 |
+
1750114433.683232,699,0.4779221713542938
|
| 9 |
+
1750114549.178375,799,0.4858437478542328
|
| 10 |
+
1750114666.602735,899,0.4952310025691986
|
| 11 |
+
1750114784.96975,999,0.5046378970146179
|
| 12 |
+
1750114903.164481,1099,0.510549008846283
|
| 13 |
+
1750115021.448666,1199,0.5169546604156494
|
| 14 |
+
1750115139.529448,1299,0.5243063569068909
|
| 15 |
+
1750115257.0991611,1399,0.5313149690628052
|
| 16 |
+
1750115374.696079,1499,0.5365349054336548
|
| 17 |
+
1750115492.256682,1599,0.5419779419898987
|
| 18 |
+
1750115609.7381198,1699,0.5472671389579773
|
| 19 |
+
1750115727.6742442,1799,0.5525239109992981
|
| 20 |
+
1750115846.1890602,1899,0.5586219429969788
|
| 21 |
+
1750115963.3834822,1999,0.5621029138565063
|
| 22 |
+
1750116078.100993,2099,0.5670888423919678
|
| 23 |
+
1750116193.276267,2199,0.5702389478683472
|
| 24 |
+
1750116309.9196339,2299,0.5720189809799194
|
| 25 |
+
1750116427.060064,2399,0.5749754905700684
|
| 26 |
+
1750116547.518163,2499,0.5783933997154236
|
| 27 |
+
1750116667.225869,2599,0.5825998783111572
|
| 28 |
+
1750116785.104908,2699,0.5849546790122986
|
| 29 |
+
1750116902.92834,2799,0.5844546556472778
|
| 30 |
+
1750117020.8187819,2899,0.5880067348480225
|
| 31 |
+
1750117138.61855,2999,0.5897616147994995
|
| 32 |
+
1750117256.4706008,3099,0.5910385847091675
|
| 33 |
+
1750117985.241129,3212,0.5960073471069336
|
| 34 |
+
1750118102.617536,3312,0.5971531867980957
|
| 35 |
+
1750118220.444465,3412,0.5973045229911804
|
| 36 |
+
1750118338.19369,3512,0.5979706048965454
|
| 37 |
+
1750118456.5901508,3612,0.5989013314247131
|
| 38 |
+
1750118574.4481542,3712,0.6017383337020874
|
| 39 |
+
1750118692.173856,3812,0.6024864912033081
|
| 40 |
+
1750118809.675841,3912,0.6059197187423706
|
| 41 |
+
1750118927.0768251,4012,0.605040431022644
|
| 42 |
+
1750119044.244113,4112,0.6060906648635864
|
| 43 |
+
1750119161.4492958,4212,0.6076433658599854
|
| 44 |
+
1750119278.9653602,4312,0.6105741262435913
|
| 45 |
+
1750119396.2123258,4412,0.608817994594574
|
| 46 |
+
1750119513.680035,4512,0.6096912026405334
|
| 47 |
+
1750119631.039315,4612,0.6099552512168884
|
| 48 |
+
1750119748.182695,4712,0.6081299185752869
|
| 49 |
+
1750119865.2358,4812,0.6111084818840027
|
| 50 |
+
1750119982.510842,4912,0.6108804941177368
|
| 51 |
+
1750120099.751489,5012,0.6128131151199341
|
| 52 |
+
1750120216.878508,5112,0.6107971668243408
|
| 53 |
+
1750120336.3254158,5212,0.611819863319397
|
| 54 |
+
1750120455.1962152,5312,0.6152880191802979
|
| 55 |
+
1750120572.40735,5412,0.6127806305885315
|
| 56 |
+
1750120689.8053892,5512,0.6141225695610046
|
| 57 |
+
1750120806.95999,5612,0.6120851635932922
|
| 58 |
+
1750120924.000415,5712,0.6140600442886353
|
| 59 |
+
1750121041.0392551,5812,0.616313099861145
|
| 60 |
+
1750121158.027762,5912,0.616937518119812
|
| 61 |
+
1750121275.076512,6012,0.6182089447975159
|
| 62 |
+
1750121392.242815,6112,0.6185839772224426
|
| 63 |
+
1750121509.560572,6212,0.6185386180877686
|
| 64 |
+
1750122236.247565,6325,0.6205170750617981
|
| 65 |
+
1750122353.2827451,6425,0.6217316389083862
|
| 66 |
+
1750122470.6261241,6525,0.6223118901252747
|
| 67 |
+
1750122587.740099,6625,0.6239798069000244
|
| 68 |
+
1750122704.872276,6725,0.6233713030815125
|
| 69 |
+
1750122821.802098,6825,0.6230680346488953
|
| 70 |
+
1750122938.833802,6925,0.6210196018218994
|
| 71 |
+
1750123055.842272,7025,0.6240410804748535
|
| 72 |
+
1750123172.8767931,7125,0.6223112940788269
|
| 73 |
+
1750123289.89931,7225,0.6236402988433838
|
| 74 |
+
1750123406.977906,7325,0.6227518320083618
|
| 75 |
+
1750123524.040493,7425,0.6241286993026733
|
| 76 |
+
1750123645.586762,7525,0.6230373978614807
|
| 77 |
+
1750123770.12349,7625,0.6250728964805603
|
| 78 |
+
1750123894.062557,7725,0.626041054725647
|
| 79 |
+
1750124017.769852,7825,0.6263768076896667
|
| 80 |
+
1750124141.589675,7925,0.6256023049354553
|
| 81 |
+
1750124264.9646668,8025,0.6247665286064148
|
| 82 |
+
1750124388.286009,8125,0.6270281672477722
|
| 83 |
+
1750124511.5643709,8225,0.6259209513664246
|
| 84 |
+
1750124634.786423,8325,0.6271286606788635
|
| 85 |
+
1750124757.976234,8425,0.6265177726745605
|
| 86 |
+
1750124881.255619,8525,0.6269381046295166
|
| 87 |
+
1750125004.6163251,8625,0.6261979341506958
|
| 88 |
+
1750125127.80512,8725,0.6244840621948242
|
| 89 |
+
1750125251.1043782,8825,0.6263057589530945
|
| 90 |
+
1750125372.8366652,8925,0.6279277205467224
|
| 91 |
+
1750125488.471084,9025,0.6275722980499268
|
| 92 |
+
1750125604.8458922,9125,0.6267922520637512
|
| 93 |
+
1750125721.945167,9225,0.6274963021278381
|
| 94 |
+
1750125839.4399822,9325,0.6265906691551208
|
| 95 |
+
1750126568.87327,9438,0.6310899257659912
|
| 96 |
+
1750126686.0177972,9538,0.6319068670272827
|
| 97 |
+
1750126803.075713,9638,0.6296250224113464
|
| 98 |
+
1750126920.2006972,9738,0.6294779181480408
|
| 99 |
+
1750127037.2270699,9838,0.6321789026260376
|
| 100 |
+
1750127154.403425,9938,0.631394624710083
|
| 101 |
+
1750127271.66291,10038,0.6310882568359375
|
| 102 |
+
1750127389.2984068,10138,0.6316127181053162
|
| 103 |
+
1750127506.492415,10238,0.6307291388511658
|
| 104 |
+
1750127623.8482492,10338,0.630906879901886
|
| 105 |
+
1750127741.937981,10438,0.6305000185966492
|
| 106 |
+
1750127861.817396,10538,0.6327885985374451
|
| 107 |
+
1750127986.162461,10638,0.6290306448936462
|
| 108 |
+
1750128101.973453,10738,0.6331011056900024
|
| 109 |
+
1750128215.941491,10838,0.6333174109458923
|
| 110 |
+
1750128333.051722,10938,0.6326783299446106
|
| 111 |
+
1750128449.8881018,11038,0.632743239402771
|
| 112 |
+
1750128565.201629,11138,0.6354828476905823
|
| 113 |
+
1750128680.718143,11238,0.629326581954956
|
| 114 |
+
1750128796.200768,11338,0.6336666941642761
|
| 115 |
+
1750128912.1242769,11438,0.6331813931465149
|
| 116 |
+
1750129028.2624772,11538,0.6323400735855103
|
| 117 |
+
1750129145.020651,11638,0.6308204531669617
|
| 118 |
+
1750129260.3285818,11738,0.6334859132766724
|
| 119 |
+
1750129375.7598982,11838,0.6311874985694885
|
| 120 |
+
1750129490.823924,11938,0.6339693665504456
|
| 121 |
+
1750129612.1423109,12038,0.6321121454238892
|
| 122 |
+
1750129729.737945,12138,0.6323835849761963
|
| 123 |
+
1750129843.521337,12238,0.6343400478363037
|
| 124 |
+
1750129958.389817,12338,0.6346200704574585
|
| 125 |
+
1750130074.886712,12438,0.6317095756530762
|
| 126 |
+
1750130797.195461,12551,0.6377848982810974
|
| 127 |
+
1750130913.551477,12651,0.6359258890151978
|
| 128 |
+
1750131029.9605272,12751,0.6366654634475708
|
| 129 |
+
1750131146.178004,12851,0.6349962949752808
|
| 130 |
+
1750131262.316326,12951,0.6367310285568237
|
| 131 |
+
1750131378.346705,13051,0.6352874040603638
|
| 132 |
+
1750131495.698422,13151,0.6360171437263489
|
| 133 |
+
1750131616.7174149,13251,0.6332733035087585
|
| 134 |
+
1750131731.532917,13351,0.6358032822608948
|
| 135 |
+
1750131847.1308758,13451,0.6361176371574402
|
| 136 |
+
1750131968.430305,13551,0.6354730129241943
|
| 137 |
+
1750132084.27898,13651,0.6354466676712036
|
| 138 |
+
1750132200.038204,13751,0.6371151804924011
|
| 139 |
+
1750132319.169413,13851,0.6363713145256042
|
| 140 |
+
1750132438.390068,13951,0.637178897857666
|
| 141 |
+
1750132558.9431229,14051,0.6359381079673767
|
| 142 |
+
1750132675.8777852,14151,0.6376035809516907
|
| 143 |
+
1750132792.4886348,14251,0.634702205657959
|
| 144 |
+
1750132910.584851,14351,0.6372800469398499
|
| 145 |
+
1750133025.6731231,14451,0.6361268162727356
|
| 146 |
+
1750133142.90565,14551,0.6342365145683289
|
| 147 |
+
1750133258.155395,14651,0.636232852935791
|
| 148 |
+
1750133373.189059,14751,0.6362788081169128
|
| 149 |
+
1750133491.139358,14851,0.638356626033783
|
| 150 |
+
1750133607.066138,14951,0.6361353993415833
|
| 151 |
+
1750133723.421437,15051,0.6371831893920898
|
| 152 |
+
1750133840.847361,15151,0.6361868977546692
|
| 153 |
+
1750133959.700592,15251,0.638497531414032
|
| 154 |
+
1750134074.1966631,15351,0.6368253827095032
|
| 155 |
+
1750134187.9394948,15451,0.637081503868103
|
| 156 |
+
1750134302.713641,15551,0.6369711756706238
|
| 157 |
+
1750135023.636418,15664,0.6414214372634888
|
| 158 |
+
1750135141.1357188,15764,0.6409013271331787
|
| 159 |
+
1750135258.759758,15864,0.6405637264251709
|
| 160 |
+
1750135376.99012,15964,0.6418529748916626
|
| 161 |
+
1750135494.1965451,16064,0.6383431553840637
|
| 162 |
+
1750135608.808107,16164,0.6378315091133118
|
| 163 |
+
1750135723.458106,16264,0.6394038200378418
|
| 164 |
+
1750135839.631459,16364,0.640099287033081
|
| 165 |
+
1750135955.580974,16464,0.6385912895202637
|
| 166 |
+
1750136074.264535,16564,0.6373909115791321
|
| 167 |
+
1750136193.651,16664,0.6392021775245667
|
| 168 |
+
1750136314.32962,16764,0.6415532827377319
|
| 169 |
+
1750136434.18018,16864,0.6389154195785522
|
| 170 |
+
1750136555.7207282,16964,0.6406936049461365
|
| 171 |
+
1750136675.4657931,17064,0.6387518644332886
|
| 172 |
+
1750136795.084468,17164,0.6393167972564697
|
| 173 |
+
1750136914.205429,17264,0.6392285227775574
|
| 174 |
+
1750137035.372716,17364,0.6363167762756348
|
| 175 |
+
1750137154.359993,17464,0.6403204798698425
|
| 176 |
+
1750137273.301456,17564,0.6400998830795288
|
| 177 |
+
1750137394.2470431,17664,0.6375594139099121
|
| 178 |
+
1750137513.418113,17764,0.6383321285247803
|
| 179 |
+
1750137634.678452,17864,0.6409785747528076
|
| 180 |
+
1750137756.4706821,17964,0.6375900506973267
|
| 181 |
+
1750137875.473762,18064,0.6377848982810974
|
| 182 |
+
1750137994.2557068,18164,0.6396868824958801
|
| 183 |
+
1750138113.775125,18264,0.6368486285209656
|
| 184 |
+
1750138232.8082612,18364,0.6396819949150085
|
| 185 |
+
1750138354.624307,18464,0.6409926414489746
|
| 186 |
+
1750138473.795286,18564,0.6381832361221313
|
| 187 |
+
1750138592.955252,18664,0.638762891292572
|
| 188 |
+
1750139332.563206,18777,0.6417085528373718
|
| 189 |
+
1750139450.918967,18877,0.6413272023200989
|
| 190 |
+
1750139568.5608902,18977,0.6415269374847412
|
| 191 |
+
1750139683.728093,19077,0.642278790473938
|
| 192 |
+
1750139798.8207428,19177,0.6432695984840393
|
| 193 |
+
1750139914.742817,19277,0.6426323652267456
|
| 194 |
+
1750140030.56935,19377,0.6408547759056091
|
| 195 |
+
1750140146.465847,19477,0.6393155455589294
|
| 196 |
+
1750140262.49084,19577,0.6393713355064392
|
| 197 |
+
1750140377.922889,19677,0.6409344673156738
|
| 198 |
+
1750140493.495858,19777,0.6416102647781372
|
| 199 |
+
1750140608.8578281,19877,0.6388081908226013
|
| 200 |
+
1750140724.1570408,19977,0.6407830715179443
|
| 201 |
+
1750140839.597943,20077,0.6431452035903931
|
| 202 |
+
1750140955.138979,20177,0.6435882449150085
|
| 203 |
+
1750141070.454108,20277,0.6393401026725769
|
| 204 |
+
1750141185.7768152,20377,0.6400582194328308
|
| 205 |
+
1750141301.044354,20477,0.6432475447654724
|
| 206 |
+
1750141416.3390641,20577,0.640428900718689
|
| 207 |
+
1750141531.601494,20677,0.6398032903671265
|
| 208 |
+
1750141646.825821,20777,0.6412371397018433
|
| 209 |
+
1750141761.996236,20877,0.6410496234893799
|
| 210 |
+
1750141877.182584,20977,0.6418633460998535
|
| 211 |
+
1750141992.5366201,21077,0.6428903341293335
|
| 212 |
+
1750142107.586726,21177,0.6411133408546448
|
| 213 |
+
1750142222.6049569,21277,0.6410361528396606
|
| 214 |
+
1750142337.798254,21377,0.6409087181091309
|
| 215 |
+
1750142453.484319,21477,0.6398229002952576
|
| 216 |
+
1750142568.740176,21577,0.6409865021705627
|
| 217 |
+
1750142683.7909062,21677,0.6418002247810364
|
| 218 |
+
1750142798.773196,21777,0.6425018310546875
|
| 219 |
+
1750143520.164426,21890,0.6443571448326111
|
| 220 |
+
1750143634.6251478,21990,0.6450649499893188
|
| 221 |
+
1750143749.21843,22090,0.6443308591842651
|
| 222 |
+
1750143864.2806258,22190,0.6435453295707703
|
| 223 |
+
1750143979.28554,22290,0.6437665224075317
|
| 224 |
+
1750144094.193641,22390,0.6440392136573792
|
| 225 |
+
1750144209.3165162,22490,0.6438339352607727
|
| 226 |
+
1750144324.3223748,22590,0.6418835520744324
|
| 227 |
+
1750144439.282978,22690,0.6438413262367249
|
| 228 |
+
1750144554.216827,22790,0.6436820030212402
|
| 229 |
+
1750144669.008311,22890,0.6432769894599915
|
| 230 |
+
1750144783.814523,22990,0.6408646106719971
|
| 231 |
+
1750144898.5940568,23090,0.6421887278556824
|
| 232 |
+
1750145013.393398,23190,0.6414957046508789
|
| 233 |
+
1750145128.2170281,23290,0.6440876126289368
|
| 234 |
+
1750145242.972746,23390,0.6400729417800903
|
| 235 |
+
1750145357.732231,23490,0.6425802707672119
|
| 236 |
+
1750145472.505175,23590,0.6433351635932922
|
| 237 |
+
1750145587.4498,23690,0.6431158185005188
|
| 238 |
+
1750145702.163373,23790,0.6410600543022156
|
| 239 |
+
1750145816.921547,23890,0.639877438545227
|
| 240 |
+
1750145931.7669811,23990,0.6422916650772095
|
| 241 |
+
1750146046.504975,24090,0.6443449854850769
|
| 242 |
+
1750146161.2147121,24190,0.6426194906234741
|
| 243 |
+
1750146275.9584918,24290,0.6449301242828369
|
| 244 |
+
1750146390.784697,24390,0.6416991353034973
|
| 245 |
+
1750146505.614413,24490,0.6413970589637756
|
| 246 |
+
1750146620.282717,24590,0.6429215669631958
|
| 247 |
+
1750146734.8804638,24690,0.6432873606681824
|
| 248 |
+
1750146849.533429,24790,0.6407738924026489
|
| 249 |
+
1750146964.5592299,24890,0.6440024375915527
|
| 250 |
+
1750147679.542837,25003,0.6454715132713318
|
| 251 |
+
1750147793.6827168,25103,0.6458553671836853
|
| 252 |
+
1750147907.787584,25203,0.6463437676429749
|
| 253 |
+
1750148021.8308291,25303,0.6460214257240295
|
| 254 |
+
1750148136.3536232,25403,0.6446813941001892
|
| 255 |
+
1750148250.906328,25503,0.6433265805244446
|
| 256 |
+
1750148365.442896,25603,0.6450576186180115
|
| 257 |
+
1750148480.200042,25703,0.644380509853363
|
| 258 |
+
1750148595.058618,25803,0.6459301710128784
|
| 259 |
+
1750148709.66884,25903,0.6446764469146729
|
| 260 |
+
1750148824.243923,26003,0.6451599597930908
|
| 261 |
+
1750148939.0552242,26103,0.6425201892852783
|
| 262 |
+
1750149053.6095018,26203,0.6443584561347961
|
| 263 |
+
1750149168.228619,26303,0.6439142227172852
|
| 264 |
+
1750149282.822546,26403,0.6410729289054871
|
| 265 |
+
1750149397.330698,26503,0.6427549123764038
|
| 266 |
+
1750149511.8132858,26603,0.6437352895736694
|
| 267 |
+
1750149626.1733289,26703,0.6415925025939941
|
| 268 |
+
1750149740.6842072,26803,0.6445502638816833
|
| 269 |
+
1750149855.045606,26903,0.642570436000824
|
| 270 |
+
1750149969.485446,27003,0.6419209837913513
|
| 271 |
+
1750150083.913009,27103,0.6432291865348816
|
| 272 |
+
1750150198.319511,27203,0.6437806487083435
|
| 273 |
+
1750150312.843913,27303,0.6411721706390381
|
| 274 |
+
1750150427.516995,27403,0.6436336040496826
|
| 275 |
+
1750150542.11465,27503,0.6441850662231445
|
| 276 |
+
1750150656.459729,27603,0.6442763209342957
|
| 277 |
+
1750150771.42578,27703,0.6445937752723694
|
| 278 |
+
1750150885.853046,27803,0.6449534296989441
|
| 279 |
+
1750151001.460722,27903,0.6456507444381714
|
| 280 |
+
1750151118.4321642,28003,0.64631187915802
|
| 281 |
+
1750151829.248618,28116,0.646086573600769
|
| 282 |
+
1750151943.078294,28216,0.6479123830795288
|
| 283 |
+
1750152057.047113,28316,0.6474877595901489
|
| 284 |
+
1750152171.432849,28416,0.6482303738594055
|
| 285 |
+
1750152286.508014,28516,0.6460502743721008
|
| 286 |
+
1750152401.1489978,28616,0.6448835730552673
|
| 287 |
+
1750152515.413527,28716,0.6456035375595093
|
| 288 |
+
1750152629.652021,28816,0.6447438597679138
|
| 289 |
+
1750152743.952258,28916,0.6452659368515015
|
| 290 |
+
1750152858.325719,29016,0.6441513299942017
|
| 291 |
+
1750152972.611438,29116,0.6438400745391846
|
| 292 |
+
1750153086.916217,29216,0.644781231880188
|
| 293 |
+
1750153201.144207,29316,0.6442689895629883
|
| 294 |
+
1750153315.309581,29416,0.6456679105758667
|
| 295 |
+
1750153429.44458,29516,0.6448688507080078
|
| 296 |
+
1750153543.543734,29616,0.645100474357605
|
| 297 |
+
1750153657.6779819,29716,0.6447163224220276
|
| 298 |
+
1750153771.8275049,29816,0.6465955972671509
|
| 299 |
+
1750153885.92943,29916,0.6442383527755737
|
| 300 |
+
1750153999.957532,30016,0.6444007158279419
|
| 301 |
+
1750154114.308219,30116,0.644033670425415
|
| 302 |
+
1750154228.255648,30216,0.6465018391609192
|
| 303 |
+
1750154342.671011,30316,0.6441317200660706
|
| 304 |
+
1750154456.92466,30416,0.6441439986228943
|
| 305 |
+
1750154570.8514502,30516,0.6443982720375061
|
| 306 |
+
1750154684.893121,30616,0.6452634930610657
|
| 307 |
+
1750154798.786987,30716,0.6440244913101196
|
| 308 |
+
1750154913.827877,30816,0.6440759897232056
|
| 309 |
+
1750155029.3820329,30916,0.6450165510177612
|
| 310 |
+
1750155143.22456,31016,0.6444761157035828
|
| 311 |
+
1750155257.108583,31116,0.6448621153831482
|
| 312 |
+
1750155962.728151,31229,0.6478039026260376
|
| 313 |
+
1750156075.928097,31329,0.6434123516082764
|
| 314 |
+
1750156189.48367,31429,0.6451672911643982
|
| 315 |
+
1750156303.394955,31529,0.6452407836914062
|
| 316 |
+
1750156417.372407,31629,0.6449154615402222
|
| 317 |
+
1750156531.142308,31729,0.6444166898727417
|
| 318 |
+
1750156644.83728,31829,0.6429234147071838
|
| 319 |
+
1750156758.558246,31929,0.6457071304321289
|
| 320 |
+
1750156872.236898,32029,0.6429779529571533
|
| 321 |
+
1750156985.8631868,32129,0.6454914212226868
|
| 322 |
+
1750157099.471839,32229,0.6448553800582886
|
| 323 |
+
1750157213.144945,32329,0.6450937390327454
|
| 324 |
+
1750157326.83038,32429,0.6457101702690125
|
| 325 |
+
1750157440.418242,32529,0.6459914445877075
|
| 326 |
+
1750157554.108214,32629,0.6459436416625977
|
| 327 |
+
1750157667.764524,32729,0.6446029543876648
|
| 328 |
+
1750157781.77124,32829,0.6433131098747253
|
| 329 |
+
1750157895.391364,32929,0.6455876231193542
|
| 330 |
+
1750158009.4498491,33029,0.6422027945518494
|
| 331 |
+
1750158123.400573,33129,0.6441372632980347
|
| 332 |
+
1750158237.274854,33229,0.6478474140167236
|
| 333 |
+
1750158351.4057229,33329,0.642891526222229
|
| 334 |
+
1750158465.0631769,33429,0.644404411315918
|
| 335 |
+
1750158578.82605,33529,0.643045961856842
|
| 336 |
+
1750158693.140485,33629,0.6461200714111328
|
| 337 |
+
1750158806.7955441,33729,0.6441783308982849
|
| 338 |
+
1750158921.5146172,33829,0.6449828147888184
|
| 339 |
+
1750159037.7530391,33929,0.6435337066650391
|
| 340 |
+
1750159154.457119,34029,0.6459822058677673
|
| 341 |
+
1750159268.228975,34129,0.6478161811828613
|
| 342 |
+
1750159381.740445,34229,0.6426378488540649
|
| 343 |
+
1750160085.005162,34342,0.6470090746879578
|
| 344 |
+
1750160198.348043,34442,0.6481801271438599
|
| 345 |
+
1750160311.747175,34542,0.6475827097892761
|
| 346 |
+
1750160425.138507,34642,0.6469852924346924
|
| 347 |
+
1750160538.48875,34742,0.6483670473098755
|
| 348 |
+
1750160651.848408,34842,0.6453578472137451
|
| 349 |
+
1750160765.314482,34942,0.6456782817840576
|
| 350 |
+
1750160878.866994,35042,0.646591305732727
|
| 351 |
+
1750160992.230761,35142,0.6468860507011414
|
| 352 |
+
1750161105.693148,35242,0.6465141177177429
|
| 353 |
+
1750161218.995789,35342,0.6469963192939758
|
| 354 |
+
1750161332.5664508,35442,0.6466348171234131
|
| 355 |
+
1750161445.919214,35542,0.6484062671661377
|
| 356 |
+
1750161559.383483,35642,0.6455906629562378
|
| 357 |
+
1750161673.6305,35742,0.6493076086044312
|
| 358 |
+
1750161787.254136,35842,0.6477861404418945
|
| 359 |
+
1750161900.676028,35942,0.645685076713562
|
| 360 |
+
1750162014.149937,36042,0.6454393267631531
|
| 361 |
+
1750162127.658717,36142,0.6482794284820557
|
| 362 |
+
1750162241.275851,36242,0.6471041440963745
|
| 363 |
+
1750162354.631356,36342,0.6425851583480835
|
| 364 |
+
1750162468.086647,36442,0.6445594429969788
|
| 365 |
+
1750162581.5384982,36542,0.6445643305778503
|
| 366 |
+
1750162695.087326,36642,0.6442175507545471
|
| 367 |
+
1750162809.828563,36742,0.6470441222190857
|
| 368 |
+
1750162924.7870321,36842,0.6449258327484131
|
| 369 |
+
1750163038.209954,36942,0.646640956401825
|
| 370 |
+
1750163151.635194,37042,0.6461133360862732
|
| 371 |
+
1750163264.974731,37142,0.6443866491317749
|
| 372 |
+
1750163378.221429,37242,0.6464932560920715
|
| 373 |
+
1750163491.5059998,37342,0.6447793841362
|
| 374 |
+
1750164193.677419,37455,0.6476581692695618
|
| 375 |
+
1750164307.195159,37555,0.647255539894104
|
| 376 |
+
1750164421.066918,37655,0.6487867832183838
|
| 377 |
+
1750164534.736021,37755,0.6464687585830688
|
| 378 |
+
1750164648.24837,37855,0.6465888619422913
|
| 379 |
+
1750164761.81198,37955,0.6500686407089233
|
| 380 |
+
1750164875.635211,38055,0.645599901676178
|
| 381 |
+
1750164989.1543128,38155,0.6471317410469055
|
| 382 |
+
1750165102.588775,38255,0.6502401828765869
|
| 383 |
+
1750165216.941986,38355,0.6477800011634827
|
| 384 |
+
1750165331.034644,38455,0.6475955843925476
|
| 385 |
+
1750165445.119753,38555,0.647935688495636
|
| 386 |
+
1750165558.645719,38655,0.6478866338729858
|
| 387 |
+
1750165672.160977,38755,0.6475901007652283
|
| 388 |
+
1750165785.6155572,38855,0.6450784206390381
|
| 389 |
+
1750165899.311167,38955,0.6473743915557861
|
| 390 |
+
1750166012.88341,39055,0.645641565322876
|
| 391 |
+
1750166126.417427,39155,0.6463271975517273
|
| 392 |
+
1750166239.942431,39255,0.6468958258628845
|
| 393 |
+
1750166353.3543758,39355,0.6461611390113831
|
| 394 |
+
1750166466.737011,39455,0.646640956401825
|
| 395 |
+
1750166580.132757,39555,0.6450404524803162
|
| 396 |
+
1750166694.087965,39655,0.6461501121520996
|
| 397 |
+
1750166808.932729,39755,0.6485146880149841
|
| 398 |
+
1750166922.1546009,39855,0.6473143100738525
|
| 399 |
+
1750167035.455957,39955,0.6466066241264343
|
| 400 |
+
1750167148.981168,40055,0.6473357677459717
|
| 401 |
+
1750167262.136349,40155,0.6464975476264954
|
| 402 |
+
1750167375.418537,40255,0.6477984189987183
|
| 403 |
+
1750167488.6276622,40355,0.6458902955055237
|
| 404 |
+
1750167601.80478,40455,0.6469926238059998
|
| 405 |
+
1750168305.549919,40568,0.6479889154434204
|
| 406 |
+
1750168418.686291,40668,0.6489663124084473
|
| 407 |
+
1750168532.243145,40768,0.6484571099281311
|
| 408 |
+
1750168645.629598,40868,0.6462671756744385
|
| 409 |
+
1750168759.0465431,40968,0.6473872661590576
|
| 410 |
+
1750168872.680598,41068,0.649395227432251
|
| 411 |
+
1750168986.166226,41168,0.6480453610420227
|
| 412 |
+
1750169099.474284,41268,0.6467450857162476
|
| 413 |
+
1750169212.7410848,41368,0.6484111547470093
|
| 414 |
+
1750169326.04799,41468,0.6493627429008484
|
| 415 |
+
1750169439.392054,41568,0.6476011276245117
|
| 416 |
+
1750169552.8411279,41668,0.6482506394386292
|
| 417 |
+
1750169666.2452219,41768,0.6467812657356262
|
| 418 |
+
1750169779.630753,41868,0.6488823294639587
|
| 419 |
+
1750169892.997459,41968,0.6475870013237
|
| 420 |
+
1750170006.325802,42068,0.6473737955093384
|
| 421 |
+
1750170119.74634,42168,0.6490128636360168
|
| 422 |
+
1750170233.111603,42268,0.6491776704788208
|
| 423 |
+
1750170346.464087,42368,0.6470282077789307
|
| 424 |
+
1750170459.819036,42468,0.6465251445770264
|
| 425 |
+
1750170573.186072,42568,0.6488749980926514
|
| 426 |
+
1750170687.445025,42668,0.6481562256813049
|
| 427 |
+
1750170802.2272902,42768,0.6471311450004578
|
| 428 |
+
1750170915.545244,42868,0.6452487707138062
|
| 429 |
+
1750171029.069637,42968,0.6465508341789246
|
| 430 |
+
1750171142.355304,43068,0.6480312347412109
|
| 431 |
+
1750171255.3594549,43168,0.6468027234077454
|
| 432 |
+
1750171368.3259819,43268,0.647104799747467
|
| 433 |
+
1750171481.2981818,43368,0.6469283103942871
|
| 434 |
+
1750171594.1976168,43468,0.6453443765640259
|
| 435 |
+
1750171707.4200919,43568,0.6480011940002441
|
| 436 |
+
1750172411.56715,43681,0.6506575345993042
|
| 437 |
+
1750172525.263346,43781,0.6503186225891113
|
| 438 |
+
1750172638.9083562,43881,0.6491972804069519
|
| 439 |
+
1750172752.3709812,43981,0.6485925316810608
|
| 440 |
+
1750172865.821585,44081,0.6492530703544617
|
| 441 |
+
1750172979.3060749,44181,0.648758590221405
|
| 442 |
+
1750173092.803905,44281,0.6458437442779541
|
| 443 |
+
1750173206.320617,44381,0.6495532989501953
|
| 444 |
+
1750173320.0753589,44481,0.6473075747489929
|
| 445 |
+
1750173433.839679,44581,0.6493823528289795
|
| 446 |
+
1750173547.568301,44681,0.6470006108283997
|
| 447 |
+
1750173661.262742,44781,0.64878249168396
|
| 448 |
+
1750173774.887253,44881,0.6495998501777649
|
| 449 |
+
1750173888.577277,44981,0.6497346758842468
|
| 450 |
+
1750174002.540683,45081,0.650436282157898
|
| 451 |
+
1750174116.500936,45181,0.6478793025016785
|
| 452 |
+
1750174230.292064,45281,0.6493216753005981
|
| 453 |
+
1750174344.00959,45381,0.6477751135826111
|
| 454 |
+
1750174458.6823559,45481,0.6478314995765686
|
| 455 |
+
1750174582.002251,45581,0.6497757434844971
|
| 456 |
+
1750174697.423135,45681,0.6471936106681824
|
| 457 |
+
1750174812.9403892,45781,0.6474087238311768
|
| 458 |
+
1750174926.491348,45881,0.6489497423171997
|
| 459 |
+
1750175040.076024,45981,0.6474913954734802
|
| 460 |
+
1750175154.131111,46081,0.6461586952209473
|
| 461 |
+
1750175268.282666,46181,0.6451795101165771
|
| 462 |
+
1750175382.425216,46281,0.6477794051170349
|
| 463 |
+
1750175496.392714,46381,0.6479068398475647
|
| 464 |
+
1750175609.770999,46481,0.6458786725997925
|
| 465 |
+
1750175724.055951,46581,0.646839439868927
|
| 466 |
+
1750175838.421676,46681,0.6497340798377991
|
| 467 |
+
1750176580.928725,46794,0.6514317393302917
|
| 468 |
+
1750176705.306518,46894,0.6506942510604858
|
| 469 |
+
1750176831.4460852,46994,0.6502358913421631
|
| 470 |
+
1750176955.433076,47094,0.6493523120880127
|
| 471 |
+
1750177073.9730392,47194,0.6472653150558472
|
| 472 |
+
1750177192.6517901,47294,0.6478253602981567
|
| 473 |
+
1750177314.1818519,47394,0.6494748592376709
|
| 474 |
+
1750177434.44067,47494,0.6500471830368042
|
| 475 |
+
1750177553.952735,47594,0.6531305313110352
|
| 476 |
+
1750177673.134149,47694,0.6493964195251465
|
| 477 |
+
1750177792.270995,47794,0.6481494903564453
|
| 478 |
+
1750177911.997818,47894,0.6478658318519592
|
| 479 |
+
1750178031.541447,47994,0.6486777067184448
|
| 480 |
+
1750178152.013373,48094,0.647212028503418
|
| 481 |
+
1750178282.920251,48194,0.6479981541633606
|
| 482 |
+
1750178404.602707,48294,0.650227963924408
|
| 483 |
+
1750178524.305347,48394,0.649756133556366
|
| 484 |
+
1750178645.1386409,48494,0.649482250213623
|
| 485 |
+
1750178766.525677,48594,0.646979808807373
|
| 486 |
+
1750178888.9640179,48694,0.6477328538894653
|
| 487 |
+
1750179011.975293,48794,0.6484197378158569
|
| 488 |
+
1750179135.350307,48894,0.6497855186462402
|
| 489 |
+
1750179259.6127,48994,0.6480778455734253
|
| 490 |
+
1750179383.8094609,49094,0.6481825709342957
|
| 491 |
+
1750179511.237918,49194,0.6486868858337402
|
| 492 |
+
1750179627.0805178,49294,0.6475882530212402
|
| 493 |
+
1750179746.269055,49394,0.646894633769989
|
| 494 |
+
1750179866.396343,49494,0.648546576499939
|
| 495 |
+
1750179986.382614,49594,0.6483370065689087
|
| 496 |
+
1750180107.75221,49694,0.64788419008255
|
| 497 |
+
1750180227.678242,49794,0.6463406682014465
|
| 498 |
+
1750180969.0883079,49907,0.6518536806106567
|
| 499 |
+
1750181090.172385,50007,0.650023877620697
|
| 500 |
+
1750181212.134043,50107,0.6507181525230408
|
| 501 |
+
1750181334.533705,50207,0.6484644412994385
|
| 502 |
+
1750181465.882124,50307,0.6544803977012634
|
| 503 |
+
1750181587.068089,50407,0.6485630869865417
|
| 504 |
+
1750181708.084366,50507,0.6504883766174316
|
| 505 |
+
1750181840.999325,50607,0.6505625247955322
|
| 506 |
+
1750181961.5115259,50707,0.648201584815979
|
| 507 |
+
1750182079.541576,50807,0.6486090421676636
|
| 508 |
+
1750182197.266737,50907,0.6495968103408813
|
| 509 |
+
1750182315.1002722,51007,0.6482720375061035
|
| 510 |
+
1750182432.904394,51107,0.6480674147605896
|
| 511 |
+
1750182551.339682,51207,0.6502254605293274
|
| 512 |
+
1750182670.478698,51307,0.6492512226104736
|
| 513 |
+
1750182789.90627,51407,0.6473082304000854
|
| 514 |
+
1750182908.7698948,51507,0.6484963297843933
|
| 515 |
+
1750183037.4613519,51607,0.6488296389579773
|
| 516 |
+
1750183161.471972,51707,0.6489105224609375
|
| 517 |
+
1750183291.52994,51807,0.6490851640701294
|
| 518 |
+
1750183407.790381,51907,0.6481225490570068
|
| 519 |
+
1750183521.6878788,52007,0.6502506136894226
|
| 520 |
+
1750183637.919424,52107,0.647424042224884
|
| 521 |
+
1750183752.363044,52207,0.6496298909187317
|
| 522 |
+
1750183866.9297988,52307,0.6503333449363708
|
| 523 |
+
1750183981.390929,52407,0.6479521989822388
|
| 524 |
+
1750184095.5802271,52507,0.6479148268699646
|
| 525 |
+
1750184210.028254,52607,0.6463069915771484
|
| 526 |
+
1750184324.589393,52707,0.6484387516975403
|
| 527 |
+
1750184438.467419,52807,0.6486115455627441
|
| 528 |
+
1750184552.48716,52907,0.6494203209877014
|
| 529 |
+
1750185269.738076,53020,0.6529350280761719
|
| 530 |
+
1750185385.6406112,53120,0.6514607667922974
|
| 531 |
+
1750185518.791826,53220,0.6502573490142822
|
| 532 |
+
1750185648.3261309,53320,0.6507150530815125
|
| 533 |
+
1750185766.350579,53420,0.6509687304496765
|
| 534 |
+
1750185882.4859111,53520,0.6498749852180481
|
| 535 |
+
1750185999.920657,53620,0.6490496397018433
|
| 536 |
+
1750186116.470092,53720,0.6494356393814087
|
| 537 |
+
1750186233.701521,53820,0.6506243944168091
|
| 538 |
+
1750186348.1218078,53920,0.6511102914810181
|
| 539 |
+
1750186462.8153942,54020,0.6500092148780823
|
| 540 |
+
1750186577.176766,54120,0.6470000147819519
|
| 541 |
+
1750186691.626355,54220,0.6490698456764221
|
| 542 |
+
1750186809.135169,54320,0.6488339304924011
|
| 543 |
+
1750186924.101025,54420,0.6512530446052551
|
| 544 |
+
1750187038.042819,54520,0.6513051390647888
|
| 545 |
+
1750187153.876576,54620,0.649454653263092
|
| 546 |
+
1750187270.856839,54720,0.6488707065582275
|
| 547 |
+
1750187388.092273,54820,0.6468566060066223
|
| 548 |
+
1750187505.3842618,54920,0.6477567553520203
|
| 549 |
+
1750187622.499163,55020,0.6494356393814087
|
| 550 |
+
1750187739.232141,55120,0.6487793922424316
|
| 551 |
+
1750187856.038623,55220,0.6468333601951599
|
| 552 |
+
1750187972.701476,55320,0.6477089524269104
|
| 553 |
+
1750188089.368793,55420,0.6492751240730286
|
| 554 |
+
1750188209.86198,55520,0.6488946080207825
|
| 555 |
+
1750188327.6926038,55620,0.6480735540390015
|
| 556 |
+
1750188443.593812,55720,0.6484246253967285
|
| 557 |
+
1750188558.7403882,55820,0.649732232093811
|
| 558 |
+
1750188673.727296,55920,0.648169755935669
|
| 559 |
+
1750188788.464843,56020,0.648758590221405
|
| 560 |
+
1750189510.837446,56133,0.652562141418457
|
| 561 |
+
1750189636.569664,56233,0.6531740427017212
|
| 562 |
+
1750189760.1718102,56333,0.6522052884101868
|
| 563 |
+
1750189884.2184691,56433,0.6514399647712708
|
| 564 |
+
1750190002.556967,56533,0.6519460678100586
|
| 565 |
+
1750190125.644963,56633,0.6504319906234741
|
| 566 |
+
1750190253.3642602,56733,0.6498603224754333
|
| 567 |
+
1750190373.920746,56833,0.6517855525016785
|
| 568 |
+
1750190491.961588,56933,0.6493884921073914
|
| 569 |
+
1750190613.771198,57033,0.6494656801223755
|
| 570 |
+
1750190734.327851,57133,0.6519926190376282
|
| 571 |
+
1750190862.187742,57233,0.6505416631698608
|
| 572 |
+
1750190984.956588,57333,0.6512144804000854
|
| 573 |
+
1750191104.267488,57433,0.6494032144546509
|
| 574 |
+
1750191225.094819,57533,0.6509761214256287
|
| 575 |
+
1750191345.572768,57633,0.6475679874420166
|
| 576 |
+
1750191470.165256,57733,0.6501360535621643
|
| 577 |
+
1750191587.409512,57833,0.6503192186355591
|
| 578 |
+
1750191703.425641,57933,0.6478345394134521
|
| 579 |
+
1750191819.150804,58033,0.6481010913848877
|
| 580 |
+
1750191935.777419,58133,0.650660514831543
|
| 581 |
+
1750192051.979758,58233,0.6503952145576477
|
| 582 |
+
1750192167.9384131,58333,0.6502059102058411
|
| 583 |
+
1750192293.379223,58433,0.6492549180984497
|
| 584 |
+
1750192417.20106,58533,0.6459411978721619
|
| 585 |
+
1750192542.735241,58633,0.6474999785423279
|
| 586 |
+
1750192661.1384242,58733,0.6492089629173279
|
| 587 |
+
1750192802.421109,58833,0.6491139531135559
|
| 588 |
+
1750192933.7944162,58933,0.647077202796936
|
| 589 |
+
1750193050.276062,59033,0.6494344472885132
|
| 590 |
+
1750193176.119875,59133,0.6495790481567383
|
| 591 |
+
1750193920.51338,59246,0.6499886512756348
|
| 592 |
+
1750194045.084266,59346,0.6525447368621826
|
| 593 |
+
1750194166.152643,59446,0.6545361280441284
|
| 594 |
+
1750194290.1062028,59546,0.6511783003807068
|
| 595 |
+
1750194413.406618,59646,0.6522555351257324
|
| 596 |
+
1750194532.297371,59746,0.6506648063659668
|
| 597 |
+
1750194651.915478,59846,0.6495827436447144
|
| 598 |
+
1750194768.454585,59946,0.6509699821472168
|
| 599 |
+
1750194885.989171,60046,0.6505171656608582
|
| 600 |
+
1750195004.4217758,60146,0.6496574878692627
|
| 601 |
+
1750195122.796754,60246,0.6495760083198547
|
| 602 |
+
1750195241.1685178,60346,0.6539803743362427
|
| 603 |
+
1750195359.380128,60446,0.6518909335136414
|
| 604 |
+
1750195477.500273,60546,0.6500398516654968
|
| 605 |
+
1750195595.194546,60646,0.6493155360221863
|
| 606 |
+
1750195712.6640158,60746,0.6507965922355652
|
| 607 |
+
1750195830.128203,60846,0.6487315893173218
|
| 608 |
+
1750195947.532106,60946,0.64984130859375
|
| 609 |
+
1750196064.6767318,61046,0.6507530808448792
|
| 610 |
+
1750196181.839788,61146,0.6503750085830688
|
| 611 |
+
1750196298.9782882,61246,0.6491789221763611
|
| 612 |
+
1750196419.469962,61346,0.6494809985160828
|
| 613 |
+
1750196537.685473,61446,0.648508608341217
|
| 614 |
+
1750196654.741201,61546,0.6499896049499512
|
| 615 |
+
1750196771.60573,61646,0.6491972804069519
|
| 616 |
+
1750196888.68485,61746,0.6472653150558472
|
| 617 |
+
1750197005.5654202,61846,0.648643970489502
|
| 618 |
+
1750197122.58132,61946,0.6495269536972046
|
| 619 |
+
1750197239.6836321,62046,0.6489706039428711
|
| 620 |
+
1750197356.658911,62146,0.650370717048645
|
| 621 |
+
1750197473.705615,62246,0.6491789221763611
|
| 622 |
+
1750198215.8670452,62359,0.6532827615737915
|
| 623 |
+
1750198335.5019982,62459,0.6520931124687195
|
| 624 |
+
1750198455.383928,62559,0.6538217067718506
|
| 625 |
+
1750198575.79297,62659,0.6519485116004944
|
| 626 |
+
1750198696.533057,62759,0.6506170034408569
|
| 627 |
+
1750198815.859319,62859,0.6506446003913879
|
| 628 |
+
1750198935.44478,62959,0.6487156748771667
|
| 629 |
+
1750199059.1411462,63059,0.6485925316810608
|
| 630 |
+
1750199184.803943,63159,0.6515673995018005
|
| 631 |
+
1750199310.925441,63259,0.6507904529571533
|
| 632 |
+
1750199433.72157,63359,0.6506826281547546
|
| 633 |
+
1750199561.5135539,63459,0.649024486541748
|
| 634 |
+
1750199691.5398378,63559,0.6502034068107605
|
| 635 |
+
1750199822.156569,63659,0.6515159606933594
|
| 636 |
+
1750199951.5579321,63759,0.6505275964736938
|
| 637 |
+
1750200083.599078,63859,0.6500361561775208
|
| 638 |
+
1750200207.467263,63959,0.6518909335136414
|
| 639 |
+
1750200330.1590981,64059,0.6511121392250061
|
| 640 |
+
1750200469.507869,64159,0.6491243839263916
|
| 641 |
+
1750200594.17766,64259,0.6516826152801514
|
| 642 |
+
1750200718.7385821,64359,0.6484901905059814
|
| 643 |
+
1750200846.544816,64459,0.6489362716674805
|
| 644 |
+
1750200980.4076228,64559,0.649101734161377
|
| 645 |
+
1750201118.501771,64659,0.6507015824317932
|
| 646 |
+
1750201251.713602,64759,0.6516936421394348
|
| 647 |
+
1750201387.155283,64859,0.649645209312439
|
| 648 |
+
1750201513.980899,64959,0.6489956974983215
|
| 649 |
+
1750201636.0862138,65059,0.6496255993843079
|
| 650 |
+
1750201758.982406,65159,0.6510600447654724
|
| 651 |
+
1750201883.149479,65259,0.6485349535942078
|
| 652 |
+
1750202006.093005,65359,0.6479546427726746
|
| 653 |
+
1750202777.213505,65472,0.6521849632263184
|
| 654 |
+
1750202896.2299972,65572,0.6521967053413391
|
| 655 |
+
1750203019.795586,65672,0.6540802717208862
|
| 656 |
+
1750203140.703593,65772,0.6515918970108032
|
| 657 |
+
1750203263.335868,65872,0.651580274105072
|
| 658 |
+
1750203384.854414,65972,0.651120126247406
|
| 659 |
+
1750203504.846452,66072,0.6513449549674988
|
| 660 |
+
1750203628.212611,66172,0.6506274342536926
|
| 661 |
+
1750203756.130655,66272,0.6480668187141418
|
| 662 |
+
1750203876.6895628,66372,0.6519798040390015
|
| 663 |
+
1750204006.239497,66472,0.6513695120811462
|
| 664 |
+
1750204142.196408,66572,0.6510729193687439
|
| 665 |
+
1750204287.2481012,66672,0.6501390933990479
|
| 666 |
+
1750204423.739143,66772,0.6511347889900208
|
| 667 |
+
1750204559.135356,66872,0.6513848304748535
|
| 668 |
+
1750204695.453165,66972,0.6511813998222351
|
| 669 |
+
1750204819.261575,67072,0.6505778431892395
|
| 670 |
+
1750204942.811445,67172,0.6470471620559692
|
| 671 |
+
1750205066.229961,67272,0.6509184837341309
|
| 672 |
+
1750205193.411467,67372,0.6518590450286865
|
| 673 |
+
1750205321.508208,67472,0.6499056220054626
|
| 674 |
+
1750205444.7884219,67572,0.6505864262580872
|
| 675 |
+
1750205564.955932,67672,0.650077223777771
|
| 676 |
+
1750205687.457877,67772,0.6515147089958191
|
| 677 |
+
1750205812.0596402,67872,0.6515190005302429
|
| 678 |
+
1750205934.748478,67972,0.65004962682724
|
| 679 |
+
1750206063.267509,68072,0.6492340564727783
|
| 680 |
+
1750206183.376571,68172,0.6521697044372559
|
| 681 |
+
1750206301.27382,68272,0.6474865078926086
|
| 682 |
+
1750206421.328908,68372,0.6496250033378601
|
| 683 |
+
1750206539.485643,68472,0.6490839719772339
|
| 684 |
+
1750207321.8521621,68585,0.6537524461746216
|
| 685 |
+
1750207442.4434109,68685,0.6518541574478149
|
| 686 |
+
1750207566.014395,68785,0.6520618796348572
|
| 687 |
+
1750207704.319272,68885,0.6525790691375732
|
| 688 |
+
1750207846.854864,68985,0.6519289016723633
|
| 689 |
+
1750207984.225697,69085,0.652522087097168
|
| 690 |
+
1750208118.714988,69185,0.6517009735107422
|
| 691 |
+
1750208242.605837,69285,0.6530214548110962
|
| 692 |
+
1750208361.1823092,69385,0.6520110368728638
|
| 693 |
+
1750208487.8287401,69485,0.6518511176109314
|
| 694 |
+
1750208617.661794,69585,0.6500625014305115
|
| 695 |
+
1750208743.342601,69685,0.6487806439399719
|
| 696 |
+
1750208870.029738,69785,0.6512181162834167
|
| 697 |
+
1750208997.522767,69885,0.6523026823997498
|
| 698 |
+
1750209127.494852,69985,0.6515324711799622
|
| 699 |
+
1750209256.4055629,70085,0.6476084589958191
|
| 700 |
+
1750209384.005273,70185,0.6507536768913269
|
| 701 |
+
1750209510.114496,70285,0.6517114043235779
|
| 702 |
+
1750209634.697458,70385,0.6498817205429077
|
| 703 |
+
1750209759.469044,70485,0.6528468132019043
|
| 704 |
+
1750209881.834552,70585,0.6493946313858032
|
| 705 |
+
1750210004.882837,70685,0.650033712387085
|
| 706 |
+
1750210130.674243,70785,0.6516813635826111
|
| 707 |
+
1750210250.644804,70885,0.6499975323677063
|
| 708 |
+
1750210369.9460092,70985,0.648506760597229
|
| 709 |
+
1750210487.077604,71085,0.6502401828765869
|
| 710 |
+
1750210603.498112,71185,0.6484497785568237
|
| 711 |
+
1750210723.773453,71285,0.6500245332717896
|
| 712 |
+
1750210854.9351768,71385,0.6486703157424927
|
| 713 |
+
1750210974.741968,71485,0.6504276990890503
|
| 714 |
+
1750211092.69955,71585,0.6502315998077393
|
| 715 |
+
1750211836.4115598,71698,0.6527054905891418
|
| 716 |
+
1750211959.258971,71798,0.6532187461853027
|
| 717 |
+
1750212080.137299,71898,0.6524791717529297
|
| 718 |
+
1750212206.760812,71998,0.6517573595046997
|
| 719 |
+
1750212328.7633018,72098,0.6519681215286255
|
| 720 |
+
1750212452.735042,72198,0.6514185070991516
|
| 721 |
+
1750212579.902265,72298,0.6531807780265808
|
| 722 |
+
1750212706.0983,72398,0.6497095823287964
|
| 723 |
+
1750212828.38046,72498,0.6521152257919312
|
| 724 |
+
1750212949.6319842,72598,0.6546366214752197
|
| 725 |
+
1750213069.810538,72698,0.6505435109138489
|
| 726 |
+
1750213189.514265,72798,0.6510116457939148
|
| 727 |
+
1750213309.594836,72898,0.6516385078430176
|
| 728 |
+
1750213428.384023,72998,0.6517401933670044
|
| 729 |
+
1750213546.256141,73098,0.6523290276527405
|
| 730 |
+
1750213663.961636,73198,0.6519142389297485
|
| 731 |
+
1750213780.801387,73298,0.6516415476799011
|
| 732 |
+
1750213898.225749,73398,0.6520814895629883
|
| 733 |
+
1750214016.615032,73498,0.6494430303573608
|
| 734 |
+
1750214133.553664,73598,0.6524209380149841
|
| 735 |
+
1750214250.9355168,73698,0.6498633623123169
|
| 736 |
+
1750214368.522475,73798,0.6487671732902527
|
| 737 |
+
1750214489.8051682,73898,0.6514932513237
|
| 738 |
+
1750214608.765088,73998,0.6499601602554321
|
| 739 |
+
1750214726.388865,74098,0.6509920358657837
|
| 740 |
+
1750214842.279794,74198,0.6481733918190002
|
| 741 |
+
1750214958.14559,74298,0.6512224078178406
|
| 742 |
+
1750215076.020194,74398,0.6492077112197876
|
| 743 |
+
1750215194.694057,74498,0.6519576907157898
|
| 744 |
+
1750215313.7677739,74598,0.649075984954834
|
| 745 |
+
1750215433.00785,74698,0.6498480439186096
|
| 746 |
+
1750216172.726628,74811,0.652137279510498
|
| 747 |
+
1750216292.926495,74911,0.6519228219985962
|
| 748 |
+
1750216412.879912,75011,0.6520006060600281
|
| 749 |
+
1750216533.188134,75111,0.6530429124832153
|
| 750 |
+
1750216652.7510452,75211,0.6530116200447083
|
| 751 |
+
1750216776.881959,75311,0.6532891988754272
|
| 752 |
+
1750216906.5222652,75411,0.6530269384384155
|
| 753 |
+
1750217031.058101,75511,0.6527003645896912
|
| 754 |
+
1750217152.050337,75611,0.6500183939933777
|
| 755 |
+
1750217269.059565,75711,0.6527996063232422
|
| 756 |
+
1750217392.82244,75811,0.6536691188812256
|
| 757 |
+
1750217521.202039,75911,0.6502052545547485
|
| 758 |
+
1750217641.773841,76011,0.6522555351257324
|
| 759 |
+
1750217763.52988,76111,0.6519368886947632
|
| 760 |
+
1750217881.227099,76211,0.6508572101593018
|
| 761 |
+
1750218001.397727,76311,0.6519503593444824
|
| 762 |
+
1750218132.3350358,76411,0.6496427655220032
|
| 763 |
+
1750218258.737887,76511,0.6489430069923401
|
| 764 |
+
1750218393.898423,76611,0.6514148116111755
|
| 765 |
+
1750218510.02509,76711,0.6520465612411499
|
| 766 |
+
1750218629.485386,76811,0.6529031991958618
|
| 767 |
+
1750218745.9510798,76911,0.6506954431533813
|
| 768 |
+
1750218861.8234,77011,0.6509944796562195
|
| 769 |
+
1750218976.982463,77111,0.6486415266990662
|
| 770 |
+
1750219091.593133,77211,0.6513308882713318
|
| 771 |
+
1750219207.006262,77311,0.6480122804641724
|
| 772 |
+
1750219324.491625,77411,0.6490435004234314
|
| 773 |
+
1750219443.126828,77511,0.6507389545440674
|
| 774 |
+
1750219562.186364,77611,0.6509172916412354
|
| 775 |
+
1750219682.374294,77711,0.6519840955734253
|
| 776 |
+
1750219803.256639,77811,0.6488437652587891
|
| 777 |
+
1750220547.487328,77924,0.6541770100593567
|
| 778 |
+
1750220666.5607219,78024,0.6527420282363892
|
| 779 |
+
1750220785.9198349,78124,0.6526396870613098
|
| 780 |
+
1750220905.561426,78224,0.6513082385063171
|
| 781 |
+
1750221024.579232,78324,0.6541329622268677
|
| 782 |
+
1750221145.641382,78424,0.6535055041313171
|
| 783 |
+
1750221262.671669,78524,0.6536703705787659
|
| 784 |
+
1750221380.535554,78624,0.651840090751648
|
| 785 |
+
1750221498.180607,78724,0.6509056091308594
|
| 786 |
+
1750221616.054903,78824,0.653022050857544
|
| 787 |
+
1750221736.730556,78924,0.6500710844993591
|
| 788 |
+
1750221857.297961,79024,0.6526850461959839
|
| 789 |
+
1750221975.378724,79124,0.6527885794639587
|
| 790 |
+
1750222093.4923189,79224,0.6522634625434875
|
| 791 |
+
1750222211.5956328,79324,0.6526820063591003
|
| 792 |
+
1750222329.568151,79424,0.6509130001068115
|
| 793 |
+
1750222448.042943,79524,0.65059494972229
|
| 794 |
+
1750222566.028182,79624,0.6490649580955505
|
| 795 |
+
1750222683.794375,79724,0.650840699672699
|
| 796 |
+
1750222801.529133,79824,0.651219367980957
|
| 797 |
+
1750222919.185378,79924,0.6499607563018799
|
| 798 |
+
1750223036.869143,80024,0.6507965922355652
|
| 799 |
+
1750223154.4993632,80124,0.652316153049469
|
| 800 |
+
1750223273.096011,80224,0.6516691446304321
|
| 801 |
+
1750223392.282187,80324,0.6491348147392273
|
| 802 |
+
1750223511.047883,80424,0.648965060710907
|
| 803 |
+
1750223629.70635,80524,0.6500606536865234
|
| 804 |
+
1750223747.561592,80624,0.6500563621520996
|
| 805 |
+
1750223865.3728168,80724,0.6505728960037231
|
| 806 |
+
1750223983.192951,80824,0.6500864028930664
|
| 807 |
+
1750224100.908614,80924,0.6509608030319214
|
| 808 |
+
1750224826.8349671,81037,0.652090847492218
|
| 809 |
+
1750224943.717508,81137,0.6527524590492249
|
| 810 |
+
1750225060.981622,81237,0.6508351564407349
|
| 811 |
+
1750225178.264178,81337,0.6526764631271362
|
| 812 |
+
1750225295.705386,81437,0.6532114148139954
|
| 813 |
+
1750225412.985178,81537,0.6510863900184631
|
| 814 |
+
1750225531.5854342,81637,0.6527928709983826
|
| 815 |
+
1750225652.238388,81737,0.6523639559745789
|
| 816 |
+
1750225769.389229,81837,0.6519240140914917
|
| 817 |
+
1750225886.7443252,81937,0.6535325050354004
|
| 818 |
+
1750226004.139014,82037,0.6526923775672913
|
| 819 |
+
1750226121.744965,82137,0.6520079374313354
|
| 820 |
+
1750226239.267585,82237,0.6528774499893188
|
| 821 |
+
1750226356.8135831,82337,0.6542622447013855
|
| 822 |
+
1750226474.220803,82437,0.6500110030174255
|
| 823 |
+
1750226591.654186,82537,0.6520367860794067
|
| 824 |
+
1750226709.390264,82637,0.6523934006690979
|
| 825 |
+
1750226827.765235,82737,0.6517738699913025
|
| 826 |
+
1750226945.789679,82837,0.649882972240448
|
| 827 |
+
1750227063.175751,82937,0.6507132649421692
|
| 828 |
+
1750227180.6074429,83037,0.6521985530853271
|
| 829 |
+
1750227298.25555,83137,0.6523768305778503
|
| 830 |
+
1750227415.797388,83237,0.6505563855171204
|
| 831 |
+
1750227533.07081,83337,0.6523523330688477
|
| 832 |
+
1750227650.184134,83437,0.6497150659561157
|
| 833 |
+
1750227767.183999,83537,0.6509068608283997
|
| 834 |
+
1750227884.621568,83637,0.649048388004303
|
| 835 |
+
1750228001.723806,83737,0.6508872509002686
|
| 836 |
+
1750228118.7199092,83837,0.6515741348266602
|
| 837 |
+
1750228235.600491,83937,0.6501850485801697
|
| 838 |
+
1750228352.460177,84037,0.6503933668136597
|
| 839 |
+
1750229072.101535,84150,0.6527323126792908
|
| 840 |
+
1750229188.532489,84250,0.6524515748023987
|
| 841 |
+
1750229305.9727669,84350,0.6531286835670471
|
| 842 |
+
1750229422.7452438,84450,0.6521519422531128
|
| 843 |
+
1750229542.9914508,84550,0.6535490155220032
|
| 844 |
+
1750229660.802776,84650,0.6511911749839783
|
| 845 |
+
1750229777.73071,84750,0.6520624756813049
|
| 846 |
+
1750229894.272873,84850,0.6516923904418945
|
| 847 |
+
1750230011.0650299,84950,0.6546746492385864
|
| 848 |
+
1750230127.8485062,85050,0.651620090007782
|
| 849 |
+
1750230244.4886231,85150,0.6534650921821594
|
| 850 |
+
1750230361.215148,85250,0.6513762474060059
|
| 851 |
+
1750230479.274983,85350,0.653814971446991
|
| 852 |
+
1750230596.4249868,85450,0.6522212028503418
|
| 853 |
+
1750230713.1627898,85550,0.6504136323928833
|
| 854 |
+
1750230829.994378,85650,0.6521758437156677
|
| 855 |
+
1750230946.848085,85750,0.6543266177177429
|
| 856 |
+
1750231063.6933088,85850,0.652149498462677
|
| 857 |
+
1750231180.7751548,85950,0.652924656867981
|
| 858 |
+
1750231297.4416032,86050,0.6517003774642944
|
| 859 |
+
1750231414.00541,86150,0.6498719453811646
|
| 860 |
+
1750231530.459255,86250,0.6528701186180115
|
| 861 |
+
1750231646.981014,86350,0.652484655380249
|
| 862 |
+
1750231763.465852,86450,0.6517757177352905
|
| 863 |
+
1750231879.910605,86550,0.6511158347129822
|
| 864 |
+
1750231996.4017532,86650,0.6514742374420166
|
| 865 |
+
1750232112.817041,86750,0.6500385999679565
|
| 866 |
+
1750232229.149757,86850,0.6491495370864868
|
| 867 |
+
1750232345.40557,86950,0.6505526900291443
|
| 868 |
+
1750232461.7093189,87050,0.651520848274231
|
| 869 |
+
1750232577.5444229,87150,0.6499301195144653
|
| 870 |
+
1750233293.810208,87263,0.654270350933075
|
| 871 |
+
1750233412.682642,87363,0.6527132391929626
|
| 872 |
+
1750233532.513292,87463,0.6526311039924622
|
| 873 |
+
1750233648.786123,87563,0.6516342163085938
|
| 874 |
+
1750233764.80583,87663,0.6509135961532593
|
| 875 |
+
1750233880.8387609,87763,0.6523756384849548
|
| 876 |
+
1750233997.011285,87863,0.6529742479324341
|
| 877 |
+
1750234114.62957,87963,0.6528559923171997
|
| 878 |
+
1750234230.975869,88063,0.6532873511314392
|
| 879 |
+
1750234347.233397,88163,0.6519926190376282
|
| 880 |
+
1750234463.849152,88263,0.6529381275177002
|
| 881 |
+
1750234580.176417,88363,0.6527230143547058
|
| 882 |
+
1750234696.535578,88463,0.6503474116325378
|
| 883 |
+
1750234812.71581,88563,0.6534693837165833
|
| 884 |
+
1750234928.711324,88663,0.6540380120277405
|
| 885 |
+
1750235044.749944,88763,0.6518866419792175
|
| 886 |
+
1750235160.68432,88863,0.6515116691589355
|
| 887 |
+
1750235276.5570402,88963,0.652815580368042
|
| 888 |
+
1750235392.382603,89063,0.6518388390541077
|
| 889 |
+
1750235508.203466,89163,0.6518087983131409
|
| 890 |
+
1750235624.0762482,89263,0.6527689695358276
|
| 891 |
+
1750235739.848402,89363,0.6516948342323303
|
| 892 |
+
1750235855.594966,89463,0.652318000793457
|
| 893 |
+
1750235971.4175649,89563,0.6520777940750122
|
| 894 |
+
1750236087.155264,89663,0.653346836566925
|
| 895 |
+
1750236202.856292,89763,0.6505588293075562
|
| 896 |
+
1750236318.631579,89863,0.6495183706283569
|
| 897 |
+
1750236434.248589,89963,0.6511139869689941
|
| 898 |
+
1750236550.018139,90063,0.6504080891609192
|
| 899 |
+
1750236665.693109,90163,0.6510986685752869
|
| 900 |
+
1750236781.38388,90263,0.6509822010993958
|
| 901 |
+
1750237498.135129,90376,0.6541507244110107
|
| 902 |
+
1750237615.0882099,90476,0.6536709666252136
|
| 903 |
+
1750237732.1482828,90576,0.6522940993309021
|
| 904 |
+
1750237848.0962281,90676,0.6530110239982605
|
| 905 |
+
1750237963.832159,90776,0.651495099067688
|
| 906 |
+
1750238079.56806,90876,0.6530711054801941
|
| 907 |
+
1750238195.2640378,90976,0.6519693732261658
|
| 908 |
+
1750238310.9922712,91076,0.6534608006477356
|
| 909 |
+
1750238426.7211242,91176,0.6535612940788269
|
| 910 |
+
1750238542.1769311,91276,0.652286171913147
|
| 911 |
+
1750238657.767673,91376,0.6521991491317749
|
| 912 |
+
1750238773.9103758,91476,0.6521053910255432
|
| 913 |
+
1750238889.683209,91576,0.6514387130737305
|
| 914 |
+
1750239005.221324,91676,0.6537463068962097
|
| 915 |
+
1750239120.568118,91776,0.6524190902709961
|
| 916 |
+
1750239235.766874,91876,0.6518057584762573
|
| 917 |
+
1750239350.995439,91976,0.652538001537323
|
| 918 |
+
1750239466.191257,92076,0.6496225595474243
|
| 919 |
+
1750239581.3315861,92176,0.6545753479003906
|
| 920 |
+
1750239696.6121259,92276,0.6525925397872925
|
| 921 |
+
1750239812.100725,92376,0.6508376002311707
|
| 922 |
+
1750239927.215979,92476,0.6518186330795288
|
| 923 |
+
1750240042.182482,92576,0.6532205939292908
|
| 924 |
+
1750240157.28223,92676,0.6516004800796509
|
| 925 |
+
1750240271.974707,92776,0.6527561545372009
|
| 926 |
+
1750240386.43259,92876,0.6521452069282532
|
| 927 |
+
1750240501.080244,92976,0.6521005034446716
|
| 928 |
+
1750240615.6359181,93076,0.6517248749732971
|
| 929 |
+
1750240730.188971,93176,0.6491905450820923
|
| 930 |
+
1750240845.331557,93276,0.6526237726211548
|
| 931 |
+
1750240960.604365,93376,0.650396466255188
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750850577.6079428,99,0.28115931153297424
|
| 3 |
+
1750850694.707842,199,0.37388479709625244
|
| 4 |
+
1750850811.767244,299,0.4071066081523895
|
| 5 |
+
1750850928.9375691,399,0.43296018242836
|
| 6 |
+
1750851045.901768,499,0.45245465636253357
|
| 7 |
+
1750851162.862512,599,0.46473467350006104
|
| 8 |
+
1750852597.229173,722,0.4919930696487427
|
| 9 |
+
1750852717.123183,822,0.5057536959648132
|
| 10 |
+
1750852833.988262,922,0.5127604007720947
|
| 11 |
+
1750852950.821522,1022,0.520756721496582
|
| 12 |
+
1750853067.686709,1122,0.5296911597251892
|
| 13 |
+
1750853184.429913,1222,0.5354975461959839
|
| 14 |
+
1750854663.42876,1345,0.5358098745346069
|
| 15 |
+
1750854780.403699,1445,0.5411611795425415
|
| 16 |
+
1750854898.699645,1545,0.5472536683082581
|
| 17 |
+
1750855019.05928,1645,0.5525686144828796
|
| 18 |
+
1750855138.038317,1745,0.5584620237350464
|
| 19 |
+
1750855255.6974368,1845,0.5636035799980164
|
| 20 |
+
1750856739.877013,1968,0.5708346962928772
|
| 21 |
+
1750856857.0484312,2068,0.5771746039390564
|
| 22 |
+
1750856976.928643,2168,0.5785061120986938
|
| 23 |
+
1750857094.258836,2268,0.5807254910469055
|
| 24 |
+
1750857217.580058,2368,0.5823449492454529
|
| 25 |
+
1750857334.0503318,2468,0.5875085592269897
|
| 26 |
+
1750858807.092771,2591,0.5817051529884338
|
| 27 |
+
1750858925.080086,2691,0.5819307565689087
|
| 28 |
+
1750859042.594805,2791,0.5876415371894836
|
| 29 |
+
1750859160.043197,2891,0.5881660580635071
|
| 30 |
+
1750859276.457642,2991,0.5912389755249023
|
| 31 |
+
1750859396.0647879,3091,0.5902340412139893
|
| 32 |
+
1750860867.422347,3214,0.5981722474098206
|
| 33 |
+
1750860983.654924,3314,0.6010686159133911
|
| 34 |
+
1750861107.934299,3414,0.600269615650177
|
| 35 |
+
1750861242.432056,3514,0.6020055413246155
|
| 36 |
+
1750861357.54804,3614,0.60275799036026
|
| 37 |
+
1750861473.101582,3714,0.6043713092803955
|
| 38 |
+
1750862942.001462,3837,0.5971077084541321
|
| 39 |
+
1750863059.0574589,3937,0.5979332327842712
|
| 40 |
+
1750863179.12206,4037,0.5981176495552063
|
| 41 |
+
1750863301.301445,4137,0.5991507172584534
|
| 42 |
+
1750863419.9197068,4237,0.600868284702301
|
| 43 |
+
1750863540.890483,4337,0.5996053814888
|
| 44 |
+
1750865026.1622899,4460,0.6069161891937256
|
| 45 |
+
1750865148.40977,4560,0.6094215512275696
|
| 46 |
+
1750865269.679025,4660,0.6069871187210083
|
| 47 |
+
1750865390.869505,4760,0.6085416674613953
|
| 48 |
+
1750865515.5613399,4860,0.6078510880470276
|
| 49 |
+
1750865643.630974,4960,0.6096580624580383
|
| 50 |
+
1750867176.255392,5083,0.5974678993225098
|
| 51 |
+
1750867299.122742,5183,0.5987929105758667
|
| 52 |
+
1750867430.3805718,5283,0.5946066379547119
|
| 53 |
+
1750867569.451477,5383,0.601796567440033
|
| 54 |
+
1750867703.357517,5483,0.598520815372467
|
| 55 |
+
1750867838.90882,5583,0.6008124947547913
|
| 56 |
+
1750869442.570985,5706,0.6059330105781555
|
| 57 |
+
1750869573.518789,5806,0.6075226664543152
|
| 58 |
+
1750869704.9694211,5906,0.604717493057251
|
| 59 |
+
1750869835.4636421,6006,0.6086176633834839
|
| 60 |
+
1750869963.869592,6106,0.6054001450538635
|
| 61 |
+
1750870092.8343601,6206,0.6075674295425415
|
| 62 |
+
1750870996.7552729,31229,0.6220373511314392
|
| 63 |
+
1750871130.67268,31329,0.6198210716247559
|
| 64 |
+
1750871275.926871,31429,0.6185900568962097
|
| 65 |
+
1750871416.433268,31529,0.6199577450752258
|
| 66 |
+
1750871558.685056,31629,0.6209883689880371
|
| 67 |
+
1750871703.876673,31729,0.6211740374565125
|
| 68 |
+
1750871831.815979,31829,0.6202947497367859
|
| 69 |
+
1750871969.127967,31929,0.6218584775924683
|
| 70 |
+
1750872120.680382,32029,0.6228578686714172
|
| 71 |
+
1750872259.82047,32129,0.6231299042701721
|
| 72 |
+
1750872388.811221,32229,0.6221286654472351
|
| 73 |
+
1750872548.4661372,32329,0.6245875954627991
|
| 74 |
+
1750872705.31099,32429,0.6251531839370728
|
| 75 |
+
1750872863.2039318,32529,0.6232358813285828
|
| 76 |
+
1750873029.33886,32629,0.621163010597229
|
| 77 |
+
1750873206.787233,32729,0.6249099373817444
|
| 78 |
+
1750873366.4264479,32829,0.625666081905365
|
| 79 |
+
1750873522.343495,32929,0.6261237859725952
|
| 80 |
+
1750873679.8278,33029,0.6246703267097473
|
| 81 |
+
1750873838.1228411,33129,0.6268603205680847
|
| 82 |
+
1750873995.625974,33229,0.6253376007080078
|
| 83 |
+
1750874154.71947,33329,0.6255349516868591
|
| 84 |
+
1750874314.983135,33429,0.6262009739875793
|
| 85 |
+
1750874460.9879549,33529,0.6277205944061279
|
| 86 |
+
1750874606.7178721,33629,0.6270428895950317
|
| 87 |
+
1750874751.407423,33729,0.6290870308876038
|
| 88 |
+
1750874904.389549,33829,0.6264516115188599
|
| 89 |
+
1750875043.44718,33929,0.6280067563056946
|
| 90 |
+
1750875167.557311,34029,0.6270428895950317
|
| 91 |
+
1750875295.4132302,34129,0.6269865036010742
|
| 92 |
+
1750875414.519587,34229,0.6273002624511719
|
| 93 |
+
1750876217.3065972,34342,0.6303669214248657
|
| 94 |
+
1750876336.9180648,34442,0.6308363676071167
|
| 95 |
+
1750876456.8001559,34542,0.6302604079246521
|
| 96 |
+
1750876578.2574131,34642,0.6324717998504639
|
| 97 |
+
1750876697.580331,34742,0.6301899552345276
|
| 98 |
+
1750876818.058099,34842,0.6293897032737732
|
| 99 |
+
1750876941.269356,34942,0.6334614157676697
|
| 100 |
+
1750877067.386539,35042,0.6296336054801941
|
| 101 |
+
1750877201.552383,35142,0.6310955882072449
|
| 102 |
+
1750877337.687994,35242,0.6322169303894043
|
| 103 |
+
1750877470.697649,35342,0.6314730644226074
|
| 104 |
+
1750877601.397642,35442,0.6331887245178223
|
| 105 |
+
1750877740.69434,35542,0.6329497694969177
|
| 106 |
+
1750877865.283885,35642,0.6317598223686218
|
| 107 |
+
1750877985.03089,35742,0.6339185237884521
|
| 108 |
+
1750878106.057319,35842,0.6319141983985901
|
| 109 |
+
1750878228.407137,35942,0.6321470737457275
|
| 110 |
+
1750878349.436917,36042,0.631462037563324
|
| 111 |
+
1750878469.454664,36142,0.6317794322967529
|
| 112 |
+
1750878600.80924,36242,0.6296960711479187
|
| 113 |
+
1750878735.579987,36342,0.6366875171661377
|
| 114 |
+
1750878864.145241,36442,0.6329087018966675
|
| 115 |
+
1750878989.7037902,36542,0.6324595808982849
|
| 116 |
+
1750879120.459902,36642,0.6337567567825317
|
| 117 |
+
1750879260.620251,36742,0.6324607729911804
|
| 118 |
+
1750879395.440415,36842,0.6325894594192505
|
| 119 |
+
1750879515.2062788,36942,0.6337254643440247
|
| 120 |
+
1750879635.8409731,37042,0.6325735449790955
|
| 121 |
+
1750879754.3542411,37142,0.6336103081703186
|
| 122 |
+
1750879873.1619258,37242,0.6338854432106018
|
| 123 |
+
1750880001.208302,37342,0.6328412890434265
|
| 124 |
+
1750880788.32362,37455,0.6362589597702026
|
| 125 |
+
1750880915.265224,37555,0.6373688578605652
|
| 126 |
+
1750881043.94467,37655,0.6366102695465088
|
| 127 |
+
1750881171.139327,37755,0.6367561221122742
|
| 128 |
+
1750881303.697291,37855,0.6348167657852173
|
| 129 |
+
1750881431.8726342,37955,0.6375747323036194
|
| 130 |
+
1750881555.054423,38055,0.638455867767334
|
| 131 |
+
1750881678.0593972,38155,0.6361470818519592
|
| 132 |
+
1750881802.045056,38255,0.6364325881004333
|
| 133 |
+
1750881924.236502,38355,0.6367426514625549
|
| 134 |
+
1750882046.318992,38455,0.6358743906021118
|
| 135 |
+
1750882168.6089668,38555,0.6360135078430176
|
| 136 |
+
1750882290.50672,38655,0.6362383365631104
|
| 137 |
+
1750882417.132798,38755,0.6376954913139343
|
| 138 |
+
1750882541.07625,38855,0.6359668970108032
|
| 139 |
+
1750882665.201195,38955,0.6364895701408386
|
| 140 |
+
1750882790.203367,39055,0.6373271942138672
|
| 141 |
+
1750882934.318351,39155,0.6360600590705872
|
| 142 |
+
1750883072.642247,39255,0.6367928981781006
|
| 143 |
+
1750883203.800666,39355,0.634830892086029
|
| 144 |
+
1750883327.9033291,39455,0.6364111304283142
|
| 145 |
+
1750883458.7586079,39555,0.6354846954345703
|
| 146 |
+
1750883578.854849,39655,0.6363229155540466
|
| 147 |
+
1750883697.435915,39755,0.6364748477935791
|
| 148 |
+
1750883816.138189,39855,0.6361415386199951
|
| 149 |
+
1750883934.742814,39955,0.6367193460464478
|
| 150 |
+
1750884058.696786,40055,0.6363431215286255
|
| 151 |
+
1750884182.448661,40155,0.6362065076828003
|
| 152 |
+
1750884301.546071,40255,0.6375808715820312
|
| 153 |
+
1750884421.238451,40355,0.6364301443099976
|
| 154 |
+
1750884542.540622,40455,0.6373621225357056
|
| 155 |
+
1750885360.973109,40568,0.6391980648040771
|
| 156 |
+
1750885481.833812,40668,0.6400465965270996
|
| 157 |
+
1750885599.6937668,40768,0.6411629915237427
|
| 158 |
+
1750885718.592056,40868,0.6390974521636963
|
| 159 |
+
1750885840.1120589,40968,0.638247549533844
|
| 160 |
+
1750885960.0050821,41068,0.6382505893707275
|
| 161 |
+
1750886085.665708,41168,0.6392971873283386
|
| 162 |
+
1750886206.945123,41268,0.6405600309371948
|
| 163 |
+
1750886327.482077,41368,0.6364178657531738
|
| 164 |
+
1750886451.088746,41468,0.6395796537399292
|
| 165 |
+
1750886572.309311,41568,0.6387757062911987
|
| 166 |
+
1750886691.2056682,41668,0.6397720575332642
|
| 167 |
+
1750886821.646378,41768,0.6410680413246155
|
| 168 |
+
1750886952.998621,41868,0.638858437538147
|
| 169 |
+
1750887076.256232,41968,0.6374497413635254
|
| 170 |
+
1750887200.8472629,42068,0.6399393081665039
|
| 171 |
+
1750887323.13034,42168,0.6400049328804016
|
| 172 |
+
1750887452.120953,42268,0.6396507620811462
|
| 173 |
+
1750887579.971466,42368,0.6391195058822632
|
| 174 |
+
1750887710.160838,42468,0.6401317119598389
|
| 175 |
+
1750887836.69287,42568,0.637844979763031
|
| 176 |
+
1750887961.934599,42668,0.6387500166893005
|
| 177 |
+
1750888085.3976698,42768,0.6408082246780396
|
| 178 |
+
1750888208.27091,42868,0.640357255935669
|
| 179 |
+
1750888330.497325,42968,0.6409405469894409
|
| 180 |
+
1750888458.281606,43068,0.6405863761901855
|
| 181 |
+
1750888585.1346078,43168,0.6402457356452942
|
| 182 |
+
1750888713.844177,43268,0.6408848166465759
|
| 183 |
+
1750888847.294386,43368,0.6382824778556824
|
| 184 |
+
1750888978.981641,43468,0.6391090750694275
|
| 185 |
+
1750889120.402831,43568,0.6393290162086487
|
| 186 |
+
1750889934.630212,43681,0.6431518197059631
|
| 187 |
+
1750890071.319562,43781,0.6426819562911987
|
| 188 |
+
1750890213.91127,43881,0.6394056081771851
|
| 189 |
+
1750890344.088576,43981,0.6426188945770264
|
| 190 |
+
1750890474.425335,44081,0.6417880058288574
|
| 191 |
+
1750890608.4396338,44181,0.6409375071525574
|
| 192 |
+
1750890749.367145,44281,0.642723023891449
|
| 193 |
+
1750890889.3314428,44381,0.64364093542099
|
| 194 |
+
1750891032.219209,44481,0.6411648392677307
|
| 195 |
+
1750891165.964857,44581,0.6419350504875183
|
| 196 |
+
1750891293.480545,44681,0.6405717134475708
|
| 197 |
+
1750891416.125021,44781,0.6419503688812256
|
| 198 |
+
1750891540.518293,44881,0.6417635083198547
|
| 199 |
+
1750891660.080145,44981,0.6410398483276367
|
| 200 |
+
1750891781.948166,45081,0.6419135928153992
|
| 201 |
+
1750891905.11839,45181,0.6410024762153625
|
| 202 |
+
1750892029.265239,45281,0.644433856010437
|
| 203 |
+
1750892153.8494442,45381,0.6413002610206604
|
| 204 |
+
1750892280.011641,45481,0.6401182413101196
|
| 205 |
+
1750892410.555498,45581,0.6407414078712463
|
| 206 |
+
1750892536.069839,45681,0.6414871215820312
|
| 207 |
+
1750892659.696752,45781,0.6405300498008728
|
| 208 |
+
1750892782.067382,45881,0.6402732729911804
|
| 209 |
+
1750892911.29154,45981,0.6424650549888611
|
| 210 |
+
1750893033.8896341,46081,0.6400704383850098
|
| 211 |
+
1750893154.7128499,46181,0.6428565979003906
|
| 212 |
+
1750893276.050278,46281,0.6393039226531982
|
| 213 |
+
1750893399.0719528,46381,0.6423039436340332
|
| 214 |
+
1750893521.6037629,46481,0.6420502662658691
|
| 215 |
+
1750893641.629602,46581,0.641614556312561
|
| 216 |
+
1750893779.075807,46681,0.6429068446159363
|
| 217 |
+
1750894583.6880262,46794,0.6428733468055725
|
| 218 |
+
1750894711.184323,46894,0.6453744173049927
|
| 219 |
+
1750894839.150917,46994,0.6419810056686401
|
| 220 |
+
1750894967.514038,47094,0.6449546813964844
|
| 221 |
+
1750895094.755141,47194,0.6430398225784302
|
| 222 |
+
1750895222.492712,47294,0.6443498730659485
|
| 223 |
+
1750895351.571402,47394,0.6427984237670898
|
| 224 |
+
1750895479.660096,47494,0.642681360244751
|
| 225 |
+
1750895606.71259,47594,0.6447800397872925
|
| 226 |
+
1750895739.731491,47694,0.6446108818054199
|
| 227 |
+
1750895868.058298,47794,0.6433719396591187
|
| 228 |
+
1750895997.39312,47894,0.6455802917480469
|
| 229 |
+
1750896124.759792,47994,0.6433308720588684
|
| 230 |
+
1750896250.305929,48094,0.6430606842041016
|
| 231 |
+
1750896374.635793,48194,0.6415778398513794
|
| 232 |
+
1750896498.919508,48294,0.6433780789375305
|
| 233 |
+
1750896623.4286542,48394,0.6438223123550415
|
| 234 |
+
1750896747.847158,48494,0.6433033347129822
|
| 235 |
+
1750896876.367622,48594,0.6434356570243835
|
| 236 |
+
1750897001.81382,48694,0.6426544189453125
|
| 237 |
+
1750897127.529459,48794,0.6441066265106201
|
| 238 |
+
1750897251.997642,48894,0.6419779658317566
|
| 239 |
+
1750897391.9284601,48994,0.641223669052124
|
| 240 |
+
1750897530.420704,49094,0.6415784358978271
|
| 241 |
+
1750897656.3747,49194,0.642179548740387
|
| 242 |
+
1750897781.185787,49294,0.6422781944274902
|
| 243 |
+
1750897905.838592,49394,0.6428572535514832
|
| 244 |
+
1750898030.6905658,49494,0.6434853076934814
|
| 245 |
+
1750898155.156597,49594,0.6452162861824036
|
| 246 |
+
1750898279.965455,49694,0.6438688635826111
|
| 247 |
+
1750898404.8010619,49794,0.6422671675682068
|
| 248 |
+
1750899212.633543,49907,0.6464402675628662
|
| 249 |
+
1750899343.2950358,50007,0.646129310131073
|
| 250 |
+
1750899472.941449,50107,0.6457077264785767
|
| 251 |
+
1750899606.5571651,50207,0.645046591758728
|
| 252 |
+
1750899737.428975,50307,0.643833339214325
|
| 253 |
+
1750899877.032158,50407,0.6462070941925049
|
| 254 |
+
1750900005.486089,50507,0.6452683806419373
|
| 255 |
+
1750900133.916393,50607,0.6452322006225586
|
| 256 |
+
1750900256.1888168,50707,0.6424969434738159
|
| 257 |
+
1750900380.848832,50807,0.6473706960678101
|
| 258 |
+
1750900504.3940322,50907,0.6434172987937927
|
| 259 |
+
1750900627.8239639,51007,0.6444602012634277
|
| 260 |
+
1750900753.072267,51107,0.643002450466156
|
| 261 |
+
1750900886.204142,51207,0.6453762054443359
|
| 262 |
+
1750901035.190274,51307,0.6462138295173645
|
| 263 |
+
1750901163.328012,51407,0.6459166407585144
|
| 264 |
+
1750901286.782429,51507,0.6438118815422058
|
| 265 |
+
1750901409.128047,51607,0.6438590884208679
|
| 266 |
+
1750901532.0578618,51707,0.6446274518966675
|
| 267 |
+
1750901660.4352782,51807,0.6424828171730042
|
| 268 |
+
1750901792.7007182,51907,0.6434687376022339
|
| 269 |
+
1750901923.531831,52007,0.6444613933563232
|
| 270 |
+
1750902052.132679,52107,0.6430226564407349
|
| 271 |
+
1750902179.658934,52207,0.644810676574707
|
| 272 |
+
1750902309.190856,52307,0.6444748640060425
|
| 273 |
+
1750902436.6699321,52407,0.6428700685501099
|
| 274 |
+
1750902564.216942,52507,0.6438847780227661
|
| 275 |
+
1750902690.0879822,52607,0.6460637450218201
|
| 276 |
+
1750902813.023041,52707,0.6452077031135559
|
| 277 |
+
1750902939.8120599,52807,0.6433609127998352
|
| 278 |
+
1750903070.9000618,52907,0.6433253884315491
|
| 279 |
+
1750903862.9156759,53020,0.6475154161453247
|
| 280 |
+
1750903986.861263,53120,0.646629273891449
|
| 281 |
+
1750904112.39692,53220,0.6469932794570923
|
| 282 |
+
1750904239.780994,53320,0.6450784206390381
|
| 283 |
+
1750904366.4874518,53420,0.6478762030601501
|
| 284 |
+
1750904496.969588,53520,0.6455214619636536
|
| 285 |
+
1750904635.007241,53620,0.6467383503913879
|
| 286 |
+
1750904757.660865,53720,0.6458204388618469
|
| 287 |
+
1750904880.1947489,53820,0.6460667848587036
|
| 288 |
+
1750905002.967725,53920,0.6446973085403442
|
| 289 |
+
1750905125.842842,54020,0.6452922821044922
|
| 290 |
+
1750905248.7482188,54120,0.6465686559677124
|
| 291 |
+
1750905371.10138,54220,0.6452928781509399
|
| 292 |
+
1750905493.773238,54320,0.6482236385345459
|
| 293 |
+
1750905616.159102,54420,0.6453559994697571
|
| 294 |
+
1750905738.843713,54520,0.6479975581169128
|
| 295 |
+
1750905861.588533,54620,0.6480312347412109
|
| 296 |
+
1750905985.703113,54720,0.6466445922851562
|
| 297 |
+
1750906116.521803,54820,0.6444534063339233
|
| 298 |
+
1750906245.937283,54920,0.6436856389045715
|
| 299 |
+
1750906372.03501,55020,0.6463027000427246
|
| 300 |
+
1750906500.091948,55120,0.6455140709877014
|
| 301 |
+
1750906631.044678,55220,0.6453578472137451
|
| 302 |
+
1750906762.9924512,55320,0.6443836092948914
|
| 303 |
+
1750906890.046156,55420,0.645324170589447
|
| 304 |
+
1750907025.419771,55520,0.6473572254180908
|
| 305 |
+
1750907151.996316,55620,0.6437892317771912
|
| 306 |
+
1750907280.478845,55720,0.6435680389404297
|
| 307 |
+
1750907410.0027452,55820,0.6442157030105591
|
| 308 |
+
1750907537.871084,55920,0.6427842974662781
|
| 309 |
+
1750907666.4693801,56020,0.6448125243186951
|
| 310 |
+
1750908471.8054981,56133,0.649034857749939
|
| 311 |
+
1750908598.565808,56233,0.6494669318199158
|
| 312 |
+
1750908727.921131,56333,0.6510263681411743
|
| 313 |
+
1750908855.573693,56433,0.6477836966514587
|
| 314 |
+
1750908991.2967548,56533,0.6450141072273254
|
| 315 |
+
1750909131.634036,56633,0.6461188793182373
|
| 316 |
+
1750909284.3952,56733,0.647019624710083
|
| 317 |
+
1750909411.5474439,56833,0.646393358707428
|
| 318 |
+
1750909536.3520918,56933,0.6487659215927124
|
| 319 |
+
1750909658.71984,57033,0.6472156643867493
|
| 320 |
+
1750909780.587345,57133,0.6461611390113831
|
| 321 |
+
1750909910.2807622,57233,0.646244466304779
|
| 322 |
+
1750910045.99917,57333,0.6470863819122314
|
| 323 |
+
1750910185.240757,57433,0.643696665763855
|
| 324 |
+
1750910323.0858161,57533,0.6463320851325989
|
| 325 |
+
1750910463.68661,57633,0.6454797983169556
|
| 326 |
+
1750910596.877676,57733,0.648062527179718
|
| 327 |
+
1750910732.9618618,57833,0.6459338068962097
|
| 328 |
+
1750910862.7883341,57933,0.643030047416687
|
| 329 |
+
1750910994.566977,58033,0.647784948348999
|
| 330 |
+
1750911116.712628,58133,0.6461188793182373
|
| 331 |
+
1750911242.518239,58233,0.6463027000427246
|
| 332 |
+
1750911373.846729,58333,0.6458002328872681
|
| 333 |
+
1750911503.935261,58433,0.647658109664917
|
| 334 |
+
1750911626.761462,58533,0.6480233073234558
|
| 335 |
+
1750911755.233547,58633,0.6457616686820984
|
| 336 |
+
1750911888.391101,58733,0.6468002200126648
|
| 337 |
+
1750912013.4667919,58833,0.6444056630134583
|
| 338 |
+
1750912133.361022,58933,0.646145224571228
|
| 339 |
+
1750912255.219611,59033,0.6447996497154236
|
| 340 |
+
1750912378.537761,59133,0.6448271870613098
|
| 341 |
+
1750913179.433624,59246,0.6501647233963013
|
| 342 |
+
1750913303.739379,59346,0.6485196352005005
|
| 343 |
+
1750913425.917527,59446,0.6497438549995422
|
| 344 |
+
1750913547.49926,59546,0.647741436958313
|
| 345 |
+
1750913670.900279,59646,0.6488510966300964
|
| 346 |
+
1750913793.327086,59746,0.64717036485672
|
| 347 |
+
1750913914.975698,59846,0.6483498811721802
|
| 348 |
+
1750914039.2794409,59946,0.6461820006370544
|
| 349 |
+
1750914162.671892,60046,0.6479123830795288
|
| 350 |
+
1750914283.225902,60146,0.6449393630027771
|
| 351 |
+
1750914411.7684639,60246,0.6477500200271606
|
| 352 |
+
1750914539.323285,60346,0.6502971649169922
|
| 353 |
+
1750914659.7170491,60446,0.6488609313964844
|
| 354 |
+
1750914781.342937,60546,0.6469595432281494
|
| 355 |
+
1750914904.180421,60646,0.6473774313926697
|
| 356 |
+
1750915027.264678,60746,0.6466066241264343
|
| 357 |
+
1750915150.558697,60846,0.6461660861968994
|
| 358 |
+
1750915273.552982,60946,0.6473535299301147
|
| 359 |
+
1750915398.620424,61046,0.6482916474342346
|
| 360 |
+
1750915524.7711182,61146,0.6480728983879089
|
| 361 |
+
1750915647.482632,61246,0.647158682346344
|
| 362 |
+
1750915770.944706,61346,0.647311270236969
|
| 363 |
+
1750915893.186027,61446,0.6472414135932922
|
| 364 |
+
1750916015.889415,61546,0.6462309956550598
|
| 365 |
+
1750916138.166996,61646,0.6468639969825745
|
| 366 |
+
1750916260.279406,61746,0.6477304100990295
|
| 367 |
+
1750916382.71929,61846,0.645532488822937
|
| 368 |
+
1750916505.05823,61946,0.6470796465873718
|
| 369 |
+
1750916627.2343628,62046,0.6467499732971191
|
| 370 |
+
1750916749.403017,62146,0.6446335911750793
|
| 371 |
+
1750916871.43501,62246,0.6444540619850159
|
| 372 |
+
1750917670.038614,62359,0.647418737411499
|
| 373 |
+
1750917792.02987,62459,0.6491881012916565
|
| 374 |
+
1750917917.474452,62559,0.650884211063385
|
| 375 |
+
1750918043.013752,62659,0.648897647857666
|
| 376 |
+
1750918174.5421581,62759,0.6486213207244873
|
| 377 |
+
1750918308.542767,62859,0.6478437781333923
|
| 378 |
+
1750918438.4512389,62959,0.6487138271331787
|
| 379 |
+
1750918567.373274,63059,0.6486942172050476
|
| 380 |
+
1750918692.689415,63159,0.6492941379547119
|
| 381 |
+
1750918818.542409,63259,0.6484093070030212
|
| 382 |
+
1750918940.721191,63359,0.6445998549461365
|
| 383 |
+
1750919071.046887,63459,0.6481139659881592
|
| 384 |
+
1750919195.254519,63559,0.6460208296775818
|
| 385 |
+
1750919317.589064,63659,0.6489405632019043
|
| 386 |
+
1750919439.178564,63759,0.6469509601593018
|
| 387 |
+
1750919561.135176,63859,0.6483192443847656
|
| 388 |
+
1750919682.895743,63959,0.6480097770690918
|
| 389 |
+
1750919804.48234,64059,0.6475710868835449
|
| 390 |
+
1750919926.20049,64159,0.6472879648208618
|
| 391 |
+
1750920047.718527,64259,0.6479785442352295
|
| 392 |
+
1750920169.184226,64359,0.6482456922531128
|
| 393 |
+
1750920290.848407,64459,0.6483646035194397
|
| 394 |
+
1750920412.478467,64559,0.6474166512489319
|
| 395 |
+
1750920534.211891,64659,0.6492592096328735
|
| 396 |
+
1750920655.893172,64759,0.6492279171943665
|
| 397 |
+
1750920777.669284,64859,0.6471158266067505
|
| 398 |
+
1750920899.403708,64959,0.6476764678955078
|
| 399 |
+
1750921025.0799801,65059,0.6467646956443787
|
| 400 |
+
1750921150.997535,65159,0.6486464738845825
|
| 401 |
+
1750921276.822866,65259,0.6472935080528259
|
| 402 |
+
1750921406.559783,65359,0.6456985473632812
|
| 403 |
+
1750922201.8808472,65472,0.6491064429283142
|
| 404 |
+
1750922335.5998678,65572,0.6507015824317932
|
| 405 |
+
1750922466.940992,65672,0.6512285470962524
|
| 406 |
+
1750922591.68689,65772,0.6479865312576294
|
| 407 |
+
1750922721.988064,65872,0.6486464738845825
|
| 408 |
+
1750922843.08352,65972,0.6498088240623474
|
| 409 |
+
1750922964.566847,66072,0.6509638428688049
|
| 410 |
+
1750923085.474137,66172,0.649839460849762
|
| 411 |
+
1750923206.383039,66272,0.6484135985374451
|
| 412 |
+
1750923327.453134,66372,0.6500716805458069
|
| 413 |
+
1750923448.123444,66472,0.6503284573554993
|
| 414 |
+
1750923568.853112,66572,0.648423433303833
|
| 415 |
+
1750923689.65459,66672,0.6478958129882812
|
| 416 |
+
1750923810.447805,66772,0.6492162942886353
|
| 417 |
+
1750923931.042791,66872,0.6473988890647888
|
| 418 |
+
1750924051.916737,66972,0.6482604146003723
|
| 419 |
+
1750924173.199275,67072,0.648994505405426
|
| 420 |
+
1750924293.858598,67172,0.6486293077468872
|
| 421 |
+
1750924415.077677,67272,0.6478174328804016
|
| 422 |
+
1750924536.2253509,67372,0.6496537923812866
|
| 423 |
+
1750924662.9054248,67472,0.6481562256813049
|
| 424 |
+
1750924792.057019,67572,0.6468345522880554
|
| 425 |
+
1750924916.8148491,67672,0.6478131413459778
|
| 426 |
+
1750925042.747761,67772,0.646910548210144
|
| 427 |
+
1750925170.406854,67872,0.6478045582771301
|
| 428 |
+
1750925293.492859,67972,0.6473388671875
|
| 429 |
+
1750925418.957403,68072,0.6464074850082397
|
| 430 |
+
1750925541.840107,68172,0.6478223204612732
|
| 431 |
+
1750925663.5091,68272,0.6482077240943909
|
| 432 |
+
1750925784.65945,68372,0.6487855315208435
|
| 433 |
+
1750925906.0801122,68472,0.6465165615081787
|
| 434 |
+
1750926700.457063,68585,0.6502459049224854
|
| 435 |
+
1750926826.9456398,68685,0.651401937007904
|
| 436 |
+
1750926953.6492538,68785,0.6517910361289978
|
| 437 |
+
1750927078.952777,68885,0.6490827202796936
|
| 438 |
+
1750927205.3010828,68985,0.650858461856842
|
| 439 |
+
1750927329.743047,69085,0.6488449573516846
|
| 440 |
+
1750927454.28623,69185,0.6479975581169128
|
| 441 |
+
1750927580.5161228,69285,0.6503829956054688
|
| 442 |
+
1750927705.630354,69385,0.6467738747596741
|
| 443 |
+
1750927831.181097,69485,0.6512561440467834
|
| 444 |
+
1750927957.4820192,69585,0.6498749852180481
|
| 445 |
+
1750928083.418227,69685,0.6479362845420837
|
| 446 |
+
1750928210.516265,69785,0.6487597823143005
|
| 447 |
+
1750928337.9859421,69885,0.6513578295707703
|
| 448 |
+
1750928466.57218,69985,0.6503247618675232
|
| 449 |
+
1750928591.0808609,70085,0.6471317410469055
|
| 450 |
+
1750928712.549025,70185,0.648591935634613
|
| 451 |
+
1750928834.0284002,70285,0.6484154462814331
|
| 452 |
+
1750928955.571093,70385,0.6498407125473022
|
| 453 |
+
1750929075.983147,70485,0.6484828591346741
|
| 454 |
+
1750929196.275585,70585,0.6494001150131226
|
| 455 |
+
1750929316.778184,70685,0.6480600237846375
|
| 456 |
+
1750929436.865588,70785,0.6480422616004944
|
| 457 |
+
1750929560.202616,70885,0.6478958129882812
|
| 458 |
+
1750929680.995494,70985,0.647394597530365
|
| 459 |
+
1750929801.422886,71085,0.6490048766136169
|
| 460 |
+
1750929943.29896,71185,0.650200366973877
|
| 461 |
+
1750930068.967339,71285,0.6482769846916199
|
| 462 |
+
1750930189.072788,71385,0.6460214257240295
|
| 463 |
+
1750930308.7765188,71485,0.6505600214004517
|
| 464 |
+
1750930429.611978,71585,0.6480796337127686
|
| 465 |
+
1750931205.2413719,71698,0.6527718305587769
|
| 466 |
+
1750931326.428332,71798,0.6477978229522705
|
| 467 |
+
1750931449.9233081,71898,0.6518376469612122
|
| 468 |
+
1750931573.760788,71998,0.6521115303039551
|
| 469 |
+
1750931694.6096342,72098,0.6505722999572754
|
| 470 |
+
1750931818.165248,72198,0.6516188979148865
|
| 471 |
+
1750931948.603304,72298,0.6470564007759094
|
| 472 |
+
1750932076.934408,72398,0.650993287563324
|
| 473 |
+
1750932203.6527512,72498,0.6495453715324402
|
| 474 |
+
1750932332.362875,72598,0.652105987071991
|
| 475 |
+
1750932459.72476,72698,0.6486384868621826
|
| 476 |
+
1750932586.006664,72798,0.6518786549568176
|
| 477 |
+
1750932712.433674,72898,0.6480814814567566
|
| 478 |
+
1750932833.490408,72998,0.6507444977760315
|
| 479 |
+
1750932953.271996,73098,0.6490061283111572
|
| 480 |
+
1750933073.4454489,73198,0.647949755191803
|
| 481 |
+
1750933193.6256452,73298,0.6485024690628052
|
| 482 |
+
1750933313.522526,73398,0.6500251293182373
|
| 483 |
+
1750933432.8896089,73498,0.6501495242118835
|
| 484 |
+
1750933559.187637,73598,0.6502885818481445
|
| 485 |
+
1750933691.372032,73698,0.6501495242118835
|
| 486 |
+
1750933810.739407,73798,0.6492481827735901
|
| 487 |
+
1750933930.987875,73898,0.6489564776420593
|
| 488 |
+
1750934050.3341088,73998,0.650886058807373
|
| 489 |
+
1750934169.8655858,74098,0.6477953195571899
|
| 490 |
+
1750934289.4910982,74198,0.6487432718276978
|
| 491 |
+
1750934409.0436878,74298,0.6494399309158325
|
| 492 |
+
1750934528.496396,74398,0.6505404114723206
|
| 493 |
+
1750934647.6416872,74498,0.6483222842216492
|
| 494 |
+
1750934767.3372312,74598,0.6466078162193298
|
| 495 |
+
1750934886.8239932,74698,0.6476887464523315
|
| 496 |
+
1750935667.094647,74811,0.6520823240280151
|
| 497 |
+
1750935791.266507,74911,0.6515815258026123
|
| 498 |
+
1750935914.606653,75011,0.6508811116218567
|
| 499 |
+
1750936039.06153,75111,0.6499025821685791
|
| 500 |
+
1750936165.694906,75211,0.6508517265319824
|
| 501 |
+
1750936291.294213,75311,0.6521464586257935
|
| 502 |
+
1750936416.024253,75411,0.6500281691551208
|
| 503 |
+
1750936541.313317,75511,0.651495099067688
|
| 504 |
+
1750936663.961,75611,0.6510618925094604
|
| 505 |
+
1750936787.1065311,75711,0.6508431434631348
|
| 506 |
+
1750936910.651989,75811,0.6501868963241577
|
| 507 |
+
1750937035.475455,75911,0.650493860244751
|
| 508 |
+
1750937174.3356822,76011,0.6468051671981812
|
| 509 |
+
1750937296.402282,76111,0.6489773392677307
|
| 510 |
+
1750937415.056671,76211,0.6508811116218567
|
| 511 |
+
1750937534.0117319,76311,0.6501476764678955
|
| 512 |
+
1750937653.302214,76411,0.6487193703651428
|
| 513 |
+
1750937772.312762,76511,0.6498878598213196
|
| 514 |
+
1750937891.033729,76611,0.6482493877410889
|
| 515 |
+
1750938009.852423,76711,0.6508100628852844
|
| 516 |
+
1750938128.461038,76811,0.6482242941856384
|
| 517 |
+
1750938247.067235,76911,0.6491262316703796
|
| 518 |
+
1750938365.590821,77011,0.652357816696167
|
| 519 |
+
1750938484.2761998,77111,0.6490649580955505
|
| 520 |
+
1750938602.906268,77211,0.6494031548500061
|
| 521 |
+
1750938721.6004522,77311,0.6518670320510864
|
| 522 |
+
1750938840.3788958,77411,0.6487389802932739
|
| 523 |
+
1750938959.428231,77511,0.6489448547363281
|
| 524 |
+
1750939078.0850508,77611,0.6487916707992554
|
| 525 |
+
1750939200.7135448,77711,0.6509914398193359
|
| 526 |
+
1750939321.166645,77811,0.6488952040672302
|
| 527 |
+
1750940094.809199,77924,0.6526339650154114
|
| 528 |
+
1750940217.234244,78024,0.6542683839797974
|
| 529 |
+
1750940346.059194,78124,0.6495704650878906
|
| 530 |
+
1750940470.212898,78224,0.6520876288414001
|
| 531 |
+
1750940589.032979,78324,0.6522684097290039
|
| 532 |
+
1750940708.133315,78424,0.6509938836097717
|
| 533 |
+
1750940837.9103842,78524,0.6519785523414612
|
| 534 |
+
1750940975.8179379,78624,0.650454044342041
|
| 535 |
+
1750941095.2669902,78724,0.647087037563324
|
| 536 |
+
1750941213.9696698,78824,0.6488590836524963
|
| 537 |
+
1750941332.621811,78924,0.6520287990570068
|
| 538 |
+
1750941451.281897,79024,0.6517101526260376
|
| 539 |
+
1750941569.900013,79124,0.6491991281509399
|
| 540 |
+
1750941688.514817,79224,0.6504749059677124
|
| 541 |
+
1750941807.542513,79324,0.6506611704826355
|
| 542 |
+
1750941926.6724448,79424,0.6498695015907288
|
| 543 |
+
1750942045.9601479,79524,0.6501017212867737
|
| 544 |
+
1750942168.355915,79624,0.6487457156181335
|
| 545 |
+
1750942296.663371,79724,0.6524160504341125
|
| 546 |
+
1750942419.767167,79824,0.6513786911964417
|
| 547 |
+
1750942545.070986,79924,0.6490833163261414
|
| 548 |
+
1750942668.438934,80024,0.6487867832183838
|
| 549 |
+
1750942796.3619041,80124,0.6486017107963562
|
| 550 |
+
1750942923.124234,80224,0.6483988761901855
|
| 551 |
+
1750943058.1432269,80324,0.6503474116325378
|
| 552 |
+
1750943203.790324,80424,0.6502794027328491
|
| 553 |
+
1750943341.517162,80524,0.6492022275924683
|
| 554 |
+
1750943466.439969,80624,0.6479111313819885
|
| 555 |
+
1750943589.441782,80724,0.6514332294464111
|
| 556 |
+
1750943711.3610811,80824,0.6519209742546082
|
| 557 |
+
1750943834.455572,80924,0.6504687666893005
|
| 558 |
+
1750944606.875496,81037,0.6522871851921082
|
| 559 |
+
1750944727.609366,81137,0.6515563726425171
|
| 560 |
+
1750944846.120693,81237,0.6497898101806641
|
| 561 |
+
1750944967.2339032,81337,0.652232825756073
|
| 562 |
+
1750945090.3662348,81437,0.6507910490036011
|
| 563 |
+
1750945211.797076,81537,0.651106595993042
|
| 564 |
+
1750945330.042583,81637,0.6506397128105164
|
| 565 |
+
1750945448.8972192,81737,0.6519307494163513
|
| 566 |
+
1750945567.0563169,81837,0.6516096591949463
|
| 567 |
+
1750945685.100916,81937,0.6511819958686829
|
| 568 |
+
1750945803.9379368,82037,0.649468719959259
|
| 569 |
+
1750945926.481904,82137,0.6496985554695129
|
| 570 |
+
1750946055.549682,82237,0.6508976817131042
|
| 571 |
+
1750946178.625365,82337,0.6501617431640625
|
| 572 |
+
1750946303.409825,82437,0.6516960859298706
|
| 573 |
+
1750946429.152622,82537,0.6523486375808716
|
| 574 |
+
1750946552.534206,82637,0.6493486762046814
|
| 575 |
+
1750946674.518133,82737,0.6518946290016174
|
| 576 |
+
1750946798.236918,82837,0.6495766043663025
|
| 577 |
+
1750946924.468488,82937,0.6500422954559326
|
| 578 |
+
1750947047.724276,83037,0.6488174200057983
|
| 579 |
+
1750947170.843002,83137,0.6509907841682434
|
| 580 |
+
1750947294.740781,83237,0.6502095460891724
|
| 581 |
+
1750947417.294125,83337,0.6490539312362671
|
| 582 |
+
1750947540.771071,83437,0.6508578658103943
|
| 583 |
+
1750947662.384672,83537,0.6505318880081177
|
| 584 |
+
1750947783.331347,83637,0.6509711742401123
|
| 585 |
+
1750947908.258089,83737,0.6505796313285828
|
| 586 |
+
1750948039.4229639,83837,0.653219997882843
|
| 587 |
+
1750948171.129049,83937,0.6481268405914307
|
| 588 |
+
1750948289.858269,84037,0.6493921279907227
|
| 589 |
+
1750949058.226542,84150,0.6519173979759216
|
| 590 |
+
1750949181.067277,84250,0.652815580368042
|
| 591 |
+
1750949307.2892659,84350,0.6513253450393677
|
| 592 |
+
1750949439.01839,84450,0.6528223156929016
|
| 593 |
+
1750949551.864723,84550,0.6522849202156067
|
| 594 |
+
1750949737.308183,84650,0.6512879729270935
|
| 595 |
+
1750949855.809212,84750,0.6491966843605042
|
| 596 |
+
1750949977.819556,84850,0.6497671604156494
|
| 597 |
+
1750950090.820287,84950,0.6525888442993164
|
| 598 |
+
1750950202.941718,85050,0.6501887440681458
|
| 599 |
+
1750950315.218237,85150,0.6522665619850159
|
| 600 |
+
1750950429.6683562,85250,0.6511936187744141
|
| 601 |
+
1750950546.027575,85350,0.6516770720481873
|
| 602 |
+
1750950662.1023219,85450,0.651203453540802
|
| 603 |
+
1750950785.824854,85550,0.6510520577430725
|
| 604 |
+
1750950904.5093288,85650,0.6501231789588928
|
| 605 |
+
1750951019.37567,85750,0.6506231427192688
|
| 606 |
+
1750951137.001701,85850,0.6502812504768372
|
| 607 |
+
1750951257.822999,85950,0.6499130129814148
|
| 608 |
+
1750951376.334996,86050,0.6525477766990662
|
| 609 |
+
1750951508.36153,86150,0.6494105458259583
|
| 610 |
+
1750951653.929481,86250,0.6496948599815369
|
| 611 |
+
1750951780.7706292,86350,0.649940550327301
|
| 612 |
+
1750951900.429235,86450,0.6511250138282776
|
| 613 |
+
1750952020.460129,86550,0.6500226855278015
|
| 614 |
+
1750952140.587803,86650,0.6509822010993958
|
| 615 |
+
1750952258.030296,86750,0.6532450914382935
|
| 616 |
+
1750952376.401353,86850,0.6505300402641296
|
| 617 |
+
1750952494.550299,86950,0.6496709585189819
|
| 618 |
+
1750952611.356643,87050,0.650303304195404
|
| 619 |
+
1750952728.486352,87150,0.6507144570350647
|
| 620 |
+
1750953607.870789,87263,0.6540953516960144
|
| 621 |
+
1750953741.1156712,87363,0.6534571051597595
|
| 622 |
+
1750953876.673899,87463,0.6530048847198486
|
| 623 |
+
1750954008.2263958,87563,0.6522052884101868
|
| 624 |
+
1750954134.876569,87663,0.6529846787452698
|
| 625 |
+
1750954275.110658,87763,0.6527052521705627
|
| 626 |
+
1750954410.617163,87863,0.6522138714790344
|
| 627 |
+
1750954546.785998,87963,0.6516979336738586
|
| 628 |
+
1750954681.6060128,88063,0.654875636100769
|
| 629 |
+
1750954809.4235651,88163,0.6518474221229553
|
| 630 |
+
1750954935.581568,88263,0.6498578190803528
|
| 631 |
+
1750955063.266297,88363,0.648994505405426
|
| 632 |
+
1750955195.611402,88463,0.6514338254928589
|
| 633 |
+
1750955349.305851,88563,0.6500398516654968
|
| 634 |
+
1750955491.144202,88663,0.6512365341186523
|
| 635 |
+
1750955661.751243,88763,0.6501795053482056
|
| 636 |
+
1750955789.472915,88863,0.6513388752937317
|
| 637 |
+
1750955916.455606,88963,0.6507641077041626
|
| 638 |
+
1750956043.57365,89063,0.6520398259162903
|
| 639 |
+
1750956172.116692,89163,0.6523762345314026
|
| 640 |
+
1750956300.8387232,89263,0.6488468050956726
|
| 641 |
+
1750956426.0187678,89363,0.6504718065261841
|
| 642 |
+
1750956550.824841,89463,0.6528094410896301
|
| 643 |
+
1750956676.1422899,89563,0.6500986814498901
|
| 644 |
+
1750956795.7607691,89663,0.6482028365135193
|
| 645 |
+
1750956912.415222,89763,0.650150716304779
|
| 646 |
+
1750957031.373131,89863,0.6517420411109924
|
| 647 |
+
1750957156.32019,89963,0.6502113938331604
|
| 648 |
+
1750957304.596373,90063,0.6499865055084229
|
| 649 |
+
1750957440.024402,90163,0.6511102914810181
|
| 650 |
+
1750957571.496244,90263,0.6512358784675598
|
| 651 |
+
1750958396.879992,90376,0.6540916562080383
|
| 652 |
+
1750958523.848976,90476,0.6543688774108887
|
| 653 |
+
1750958650.727561,90576,0.6532585620880127
|
| 654 |
+
1750958781.975317,90676,0.6527677774429321
|
| 655 |
+
1750958926.18809,90776,0.6502892374992371
|
| 656 |
+
1750959057.247673,90876,0.6526997685432434
|
| 657 |
+
1750959183.969264,90976,0.6530202031135559
|
| 658 |
+
1750959309.7882721,91076,0.6501832008361816
|
| 659 |
+
1750959440.487211,91176,0.6517861485481262
|
| 660 |
+
1750959567.645947,91276,0.6520073413848877
|
| 661 |
+
1750959694.23929,91376,0.6519497632980347
|
| 662 |
+
1750959821.394371,91476,0.6525055170059204
|
| 663 |
+
1750959955.037148,91576,0.6517549157142639
|
| 664 |
+
1750960083.06543,91676,0.6512113809585571
|
| 665 |
+
1750960206.475574,91776,0.6483603119850159
|
| 666 |
+
1750960339.1817,91876,0.6516813635826111
|
| 667 |
+
1750960477.6669922,91976,0.6513210535049438
|
| 668 |
+
1750960610.298056,92076,0.6508394479751587
|
| 669 |
+
1750960738.587141,92176,0.6515735387802124
|
| 670 |
+
1750960856.138477,92276,0.6536997556686401
|
| 671 |
+
1750960975.38271,92376,0.6503039002418518
|
| 672 |
+
1750961094.6123688,92476,0.6526568531990051
|
| 673 |
+
1750961212.9602098,92576,0.65289705991745
|
| 674 |
+
1750961330.572914,92676,0.6479264497756958
|
| 675 |
+
1750961450.333251,92776,0.6491641998291016
|
| 676 |
+
1750961569.7984881,92876,0.6493884921073914
|
| 677 |
+
1750961687.424532,92976,0.6523302793502808
|
| 678 |
+
1750961805.406246,93076,0.6503909230232239
|
| 679 |
+
1750961937.18595,93176,0.6519724130630493
|
| 680 |
+
1750962080.5853848,93276,0.6517199873924255
|
| 681 |
+
1750962219.942724,93376,0.6518253684043884
|
archive-misc/runs_jsons/acc_trainstep/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750742837.6197991,99,0.36904042959213257
|
| 3 |
+
1750742959.546508,199,0.44737011194229126
|
| 4 |
+
1750743082.732747,299,0.47564032673835754
|
| 5 |
+
1750743206.693352,399,0.49883824586868286
|
| 6 |
+
1750743330.856798,499,0.5141243934631348
|
| 7 |
+
1750743454.929158,599,0.5264865159988403
|
| 8 |
+
1750744239.287515,722,0.541310727596283
|
| 9 |
+
1750744363.888371,822,0.5509797930717468
|
| 10 |
+
1750744488.6690261,922,0.5590435266494751
|
| 11 |
+
1750744612.728082,1022,0.5634166598320007
|
| 12 |
+
1750744736.573202,1122,0.5711072087287903
|
| 13 |
+
1750744860.3516622,1222,0.5781397223472595
|
| 14 |
+
1750745639.087075,1345,0.5573936700820923
|
| 15 |
+
1750745761.870153,1445,0.5579822063446045
|
| 16 |
+
1750745884.924454,1545,0.5657849311828613
|
| 17 |
+
1750746008.142229,1645,0.5697389841079712
|
| 18 |
+
1750746130.9888499,1745,0.5756507515907288
|
| 19 |
+
1750746254.2531168,1845,0.5814270973205566
|
| 20 |
+
1750747034.411872,1968,0.5898361206054688
|
| 21 |
+
1750747161.104775,2068,0.5952500104904175
|
| 22 |
+
1750747284.3177412,2168,0.5954675078392029
|
| 23 |
+
1750747407.53179,2268,0.5999111533164978
|
| 24 |
+
1750747530.13609,2368,0.601463258266449
|
| 25 |
+
1750747652.508849,2468,0.6034111380577087
|
| 26 |
+
1750748426.4023712,2591,0.5962967872619629
|
| 27 |
+
1750748548.152691,2691,0.5989730358123779
|
| 28 |
+
1750748670.332829,2791,0.6034019589424133
|
| 29 |
+
1750748792.033957,2891,0.6055275797843933
|
| 30 |
+
1750748913.654305,2991,0.6092022061347961
|
| 31 |
+
1750749035.48637,3091,0.611865222454071
|
| 32 |
+
1750749804.5840662,3214,0.617388904094696
|
| 33 |
+
1750749926.68753,3314,0.6186991333961487
|
| 34 |
+
1750750048.534391,3414,0.6210447549819946
|
| 35 |
+
1750750170.009557,3514,0.622041642665863
|
| 36 |
+
1750750292.006481,3614,0.6248449683189392
|
| 37 |
+
1750750414.405195,3714,0.6247016191482544
|
| 38 |
+
1750751193.8930078,3837,0.6037773489952087
|
| 39 |
+
1750751315.359325,3937,0.6039528250694275
|
| 40 |
+
1750751437.273057,4037,0.6069276928901672
|
| 41 |
+
1750751559.02651,4137,0.6077849268913269
|
| 42 |
+
1750751681.462891,4237,0.6098229289054871
|
| 43 |
+
1750751803.531113,4337,0.6127052903175354
|
| 44 |
+
1750752568.891242,4460,0.6153458952903748
|
| 45 |
+
1750752689.6196811,4560,0.6187671422958374
|
| 46 |
+
1750752810.5254478,4660,0.6224969625473022
|
| 47 |
+
1750752931.439138,4760,0.6224191188812256
|
| 48 |
+
1750753052.3244538,4860,0.6226696968078613
|
| 49 |
+
1750753173.179165,4960,0.6215306520462036
|
| 50 |
+
1750753937.2430022,5083,0.539279043674469
|
| 51 |
+
1750754057.59066,5183,0.528941810131073
|
| 52 |
+
1750754178.295239,5283,0.5329399704933167
|
| 53 |
+
1750754299.033118,5383,0.5406439900398254
|
| 54 |
+
1750754422.568199,5483,0.5422530770301819
|
| 55 |
+
1750754548.9622169,5583,0.5443247556686401
|
| 56 |
+
1750755328.347894,5706,0.5503765344619751
|
| 57 |
+
1750755447.7443478,5806,0.5573063492774963
|
| 58 |
+
1750755568.281234,5906,0.5552720427513123
|
| 59 |
+
1750755688.8679402,6006,0.5544227957725525
|
| 60 |
+
1750755809.374611,6106,0.5525067448616028
|
| 61 |
+
1750755929.7489762,6206,0.5546917915344238
|
| 62 |
+
1750756705.562717,31229,0.6134282946586609
|
| 63 |
+
1750756829.0259671,31329,0.6172665357589722
|
| 64 |
+
1750756949.343882,31429,0.6197683811187744
|
| 65 |
+
1750757078.677712,31529,0.6175140738487244
|
| 66 |
+
1750757198.826624,31629,0.6200233101844788
|
| 67 |
+
1750757318.9408782,31729,0.6193560361862183
|
| 68 |
+
1750757439.0837288,31829,0.6225073337554932
|
| 69 |
+
1750757559.06675,31929,0.620078444480896
|
| 70 |
+
1750757678.7195451,32029,0.6205655932426453
|
| 71 |
+
1750757799.1143239,32129,0.6206102967262268
|
| 72 |
+
1750757919.020712,32229,0.6213443875312805
|
| 73 |
+
1750758042.009888,32329,0.6223890781402588
|
| 74 |
+
1750758166.2468889,32429,0.622481644153595
|
| 75 |
+
1750758299.388862,32529,0.6226991415023804
|
| 76 |
+
1750758424.9395342,32629,0.625359058380127
|
| 77 |
+
1750758545.61158,32729,0.6247769594192505
|
| 78 |
+
1750758665.587701,32829,0.6244283318519592
|
| 79 |
+
1750758785.512001,32929,0.6239809989929199
|
| 80 |
+
1750758908.149125,33029,0.6258223056793213
|
| 81 |
+
1750759030.2707908,33129,0.6224032044410706
|
| 82 |
+
1750759151.089634,33229,0.6246237754821777
|
| 83 |
+
1750759270.41044,33329,0.6262695789337158
|
| 84 |
+
1750759389.422902,33429,0.6265717148780823
|
| 85 |
+
1750759508.650507,33529,0.6267462968826294
|
| 86 |
+
1750759627.92784,33629,0.6261611580848694
|
| 87 |
+
1750759747.075024,33729,0.6264215707778931
|
| 88 |
+
1750759866.299139,33829,0.6259289383888245
|
| 89 |
+
1750759985.405504,33929,0.6276574730873108
|
| 90 |
+
1750760104.837285,34029,0.6260238885879517
|
| 91 |
+
1750760224.402069,34129,0.6263854503631592
|
| 92 |
+
1750760345.750891,34229,0.626671552658081
|
| 93 |
+
1750761103.4888792,34342,0.6312656998634338
|
| 94 |
+
1750761229.4441931,34442,0.6303835511207581
|
| 95 |
+
1750761355.2628942,34542,0.6316807866096497
|
| 96 |
+
1750761480.874649,34642,0.6313860416412354
|
| 97 |
+
1750761607.7712588,34742,0.6320606470108032
|
| 98 |
+
1750761736.7761228,34842,0.6289032101631165
|
| 99 |
+
1750761863.39511,34942,0.6316948533058167
|
| 100 |
+
1750761990.0101068,35042,0.6324865221977234
|
| 101 |
+
1750762116.3877618,35142,0.6316875219345093
|
| 102 |
+
1750762249.288865,35242,0.6300404667854309
|
| 103 |
+
1750762376.090362,35342,0.632953405380249
|
| 104 |
+
1750762502.080409,35442,0.6323081851005554
|
| 105 |
+
1750762627.82608,35542,0.6321666836738586
|
| 106 |
+
1750762753.5969422,35642,0.6300557851791382
|
| 107 |
+
1750762879.876796,35742,0.6326311230659485
|
| 108 |
+
1750763006.5001202,35842,0.6319313645362854
|
| 109 |
+
1750763132.9935622,35942,0.631047785282135
|
| 110 |
+
1750763258.156081,36042,0.6321427822113037
|
| 111 |
+
1750763383.903738,36142,0.63047856092453
|
| 112 |
+
1750763510.011431,36242,0.6317977905273438
|
| 113 |
+
1750763635.5001469,36342,0.6309497356414795
|
| 114 |
+
1750763760.9809768,36442,0.6323658227920532
|
| 115 |
+
1750763886.428213,36542,0.6336813569068909
|
| 116 |
+
1750764012.081731,36642,0.630462646484375
|
| 117 |
+
1750764137.8974771,36742,0.6334981918334961
|
| 118 |
+
1750764263.873915,36842,0.6330251097679138
|
| 119 |
+
1750764390.009352,36942,0.6332083344459534
|
| 120 |
+
1750764515.930391,37042,0.6311777234077454
|
| 121 |
+
1750764641.606347,37142,0.6333014965057373
|
| 122 |
+
1750764767.127562,37242,0.6339362859725952
|
| 123 |
+
1750764893.1785972,37342,0.632925271987915
|
| 124 |
+
1750765665.5694988,37455,0.6345545649528503
|
| 125 |
+
1750765790.936673,37555,0.635745108127594
|
| 126 |
+
1750765916.488437,37655,0.6353376507759094
|
| 127 |
+
1750766043.204027,37755,0.6342653036117554
|
| 128 |
+
1750766176.155006,37855,0.633561909198761
|
| 129 |
+
1750766309.9887629,37955,0.6367040276527405
|
| 130 |
+
1750766435.9808052,38055,0.6353290677070618
|
| 131 |
+
1750766561.507215,38155,0.6362383365631104
|
| 132 |
+
1750766687.761547,38255,0.6351893544197083
|
| 133 |
+
1750766813.374468,38355,0.6356801390647888
|
| 134 |
+
1750766938.476672,38455,0.6352922916412354
|
| 135 |
+
1750767063.94742,38555,0.6348260045051575
|
| 136 |
+
1750767188.9039412,38655,0.6365422606468201
|
| 137 |
+
1750767314.098763,38755,0.6388063430786133
|
| 138 |
+
1750767439.509332,38855,0.6352959275245667
|
| 139 |
+
1750767565.280206,38955,0.6363357901573181
|
| 140 |
+
1750767690.333876,39055,0.6391231417655945
|
| 141 |
+
1750767816.262549,39155,0.636873185634613
|
| 142 |
+
1750767942.017864,39255,0.6374209523200989
|
| 143 |
+
1750768067.717936,39355,0.6357077360153198
|
| 144 |
+
1750768194.353793,39455,0.6367469429969788
|
| 145 |
+
1750768320.1663208,39555,0.6365545392036438
|
| 146 |
+
1750768445.402051,39655,0.6376004815101624
|
| 147 |
+
1750768571.1441479,39755,0.6360937356948853
|
| 148 |
+
1750768696.381954,39855,0.6375337243080139
|
| 149 |
+
1750768822.0582972,39955,0.6336041688919067
|
| 150 |
+
1750768949.197896,40055,0.6351929903030396
|
| 151 |
+
1750769075.347171,40155,0.6376004815101624
|
| 152 |
+
1750769200.98032,40255,0.6377028226852417
|
| 153 |
+
1750769325.995973,40355,0.6360937356948853
|
| 154 |
+
1750769450.1222012,40455,0.6372702121734619
|
| 155 |
+
1750770212.22892,40568,0.6391403675079346
|
| 156 |
+
1750770337.235692,40668,0.6396825909614563
|
| 157 |
+
1750770461.035936,40768,0.6387885808944702
|
| 158 |
+
1750770586.025459,40868,0.6397027969360352
|
| 159 |
+
1750770710.65363,40968,0.6380771994590759
|
| 160 |
+
1750770836.624546,41068,0.6397095322608948
|
| 161 |
+
1750770960.969816,41168,0.6382555365562439
|
| 162 |
+
1750771085.822515,41268,0.6392702460289001
|
| 163 |
+
1750771210.295008,41368,0.6390502452850342
|
| 164 |
+
1750771335.099481,41468,0.6391182541847229
|
| 165 |
+
1750771460.042669,41568,0.6393253803253174
|
| 166 |
+
1750771584.8335161,41668,0.6398566365242004
|
| 167 |
+
1750771709.965019,41768,0.6393627524375916
|
| 168 |
+
1750771834.376404,41868,0.6393468379974365
|
| 169 |
+
1750771959.458994,41968,0.6397126317024231
|
| 170 |
+
1750772084.903645,42068,0.6398835778236389
|
| 171 |
+
1750772209.9900868,42168,0.6368670463562012
|
| 172 |
+
1750772334.583624,42268,0.638695478439331
|
| 173 |
+
1750772459.2145548,42368,0.6395208239555359
|
| 174 |
+
1750772583.934293,42468,0.6379546523094177
|
| 175 |
+
1750772710.0044641,42568,0.6402867436408997
|
| 176 |
+
1750772834.619936,42668,0.6405624747276306
|
| 177 |
+
1750772959.3818722,42768,0.6397273540496826
|
| 178 |
+
1750773082.836871,42868,0.6404123902320862
|
| 179 |
+
1750773206.818681,42968,0.638261616230011
|
| 180 |
+
1750773330.99267,43068,0.6365735530853271
|
| 181 |
+
1750773454.452687,43168,0.6383835673332214
|
| 182 |
+
1750773577.893802,43268,0.6385894417762756
|
| 183 |
+
1750773702.293399,43368,0.6393002271652222
|
| 184 |
+
1750773826.6592598,43468,0.636163592338562
|
| 185 |
+
1750773956.231112,43568,0.641834557056427
|
| 186 |
+
1750774701.058917,43681,0.6414767503738403
|
| 187 |
+
1750774824.324727,43781,0.6403449773788452
|
| 188 |
+
1750774947.902482,43881,0.6420478224754333
|
| 189 |
+
1750775072.5444129,43981,0.6443719267845154
|
| 190 |
+
1750775197.573143,44081,0.6438602805137634
|
| 191 |
+
1750775321.648554,44181,0.6409393548965454
|
| 192 |
+
1750775445.6248288,44281,0.6413835883140564
|
| 193 |
+
1750775569.206202,44381,0.6412414312362671
|
| 194 |
+
1750775693.360138,44481,0.6426268219947815
|
| 195 |
+
1750775818.041559,44581,0.6406102776527405
|
| 196 |
+
1750775942.3875961,44681,0.6418321132659912
|
| 197 |
+
1750776067.079108,44781,0.6407567262649536
|
| 198 |
+
1750776190.216455,44881,0.642294704914093
|
| 199 |
+
1750776316.605943,44981,0.6407910585403442
|
| 200 |
+
1750776441.708267,45081,0.6397812366485596
|
| 201 |
+
1750776566.3821728,45181,0.6401249766349792
|
| 202 |
+
1750776691.835101,45281,0.6421715617179871
|
| 203 |
+
1750776815.85951,45381,0.6388425230979919
|
| 204 |
+
1750776940.483356,45481,0.6395741105079651
|
| 205 |
+
1750777064.732235,45581,0.6405282020568848
|
| 206 |
+
1750777189.073138,45681,0.6414981484413147
|
| 207 |
+
1750777314.5960279,45781,0.6402794122695923
|
| 208 |
+
1750777438.241591,45881,0.6405006051063538
|
| 209 |
+
1750777561.9531991,45981,0.6422346830368042
|
| 210 |
+
1750777692.820798,46081,0.6408694982528687
|
| 211 |
+
1750777820.8835342,46181,0.640204668045044
|
| 212 |
+
1750777945.470925,46281,0.6404834389686584
|
| 213 |
+
1750778069.65347,46381,0.6402432322502136
|
| 214 |
+
1750778194.368608,46481,0.6402347087860107
|
| 215 |
+
1750778322.778739,46581,0.6419828534126282
|
| 216 |
+
1750778447.16852,46681,0.6393155455589294
|
| 217 |
+
1750779212.0583649,46794,0.6435402035713196
|
| 218 |
+
1750779336.033227,46894,0.6429583430290222
|
| 219 |
+
1750779460.355482,46994,0.6421666741371155
|
| 220 |
+
1750779582.112957,47094,0.6412267088890076
|
| 221 |
+
1750779715.6459038,47194,0.6452603936195374
|
| 222 |
+
1750779839.06378,47294,0.6431770920753479
|
| 223 |
+
1750779969.444735,47394,0.644919753074646
|
| 224 |
+
1750780100.559294,47494,0.6444944739341736
|
| 225 |
+
1750780228.8061728,47594,0.6433780789375305
|
| 226 |
+
1750780355.893033,47694,0.6408945918083191
|
| 227 |
+
1750780489.041636,47794,0.6430104374885559
|
| 228 |
+
1750780621.223639,47894,0.6415698528289795
|
| 229 |
+
1750780752.219553,47994,0.6443069577217102
|
| 230 |
+
1750780888.478504,48094,0.6418939828872681
|
| 231 |
+
1750781028.7073689,48194,0.6430557370185852
|
| 232 |
+
1750781175.704441,48294,0.6399730443954468
|
| 233 |
+
1750781324.396688,48394,0.6406427621841431
|
| 234 |
+
1750781473.980017,48494,0.6425551772117615
|
| 235 |
+
1750781609.7190971,48594,0.6419417858123779
|
| 236 |
+
1750781740.119979,48694,0.6428192257881165
|
| 237 |
+
1750781871.363491,48794,0.6444454789161682
|
| 238 |
+
1750782004.174865,48894,0.6416041851043701
|
| 239 |
+
1750782137.862446,48994,0.6425073742866516
|
| 240 |
+
1750782265.936386,49094,0.6432487964630127
|
| 241 |
+
1750782403.7187278,49194,0.6440649628639221
|
| 242 |
+
1750782538.301892,49294,0.6432787775993347
|
| 243 |
+
1750782689.3264818,49394,0.6424552798271179
|
| 244 |
+
1750782843.259275,49494,0.6423174142837524
|
| 245 |
+
1750782986.946542,49594,0.6410165429115295
|
| 246 |
+
1750783123.844516,49694,0.642348051071167
|
| 247 |
+
1750783252.6179578,49794,0.6417592167854309
|
| 248 |
+
1750784134.094883,49907,0.6442288160324097
|
| 249 |
+
1750784267.055677,50007,0.6450937390327454
|
| 250 |
+
1750784396.809908,50107,0.6455336809158325
|
| 251 |
+
1750784540.68293,50207,0.6459117531776428
|
| 252 |
+
1750784685.3494022,50307,0.6453884840011597
|
| 253 |
+
1750784825.603266,50407,0.6443143486976624
|
| 254 |
+
1750784970.852562,50507,0.6460220813751221
|
| 255 |
+
1750785124.676715,50607,0.6426783204078674
|
| 256 |
+
1750785261.507145,50707,0.6435110569000244
|
| 257 |
+
1750785403.2566102,50807,0.6428578495979309
|
| 258 |
+
1750785549.3775492,50907,0.6444724202156067
|
| 259 |
+
1750785694.352969,51007,0.6436213254928589
|
| 260 |
+
1750785842.9487228,51107,0.6439338326454163
|
| 261 |
+
1750785995.988441,51207,0.6430392265319824
|
| 262 |
+
1750786138.111999,51307,0.6441458463668823
|
| 263 |
+
1750786279.883648,51407,0.6437824964523315
|
| 264 |
+
1750786424.398026,51507,0.6429178714752197
|
| 265 |
+
1750786562.827903,51607,0.6433768272399902
|
| 266 |
+
1750786703.119499,51707,0.644769012928009
|
| 267 |
+
1750786846.0823522,51807,0.6429730653762817
|
| 268 |
+
1750786988.558388,51907,0.6424209475517273
|
| 269 |
+
1750787130.541497,52007,0.6440122723579407
|
| 270 |
+
1750787272.7065651,52107,0.6456740498542786
|
| 271 |
+
1750787418.573973,52207,0.6420202255249023
|
| 272 |
+
1750787555.390604,52307,0.643044114112854
|
| 273 |
+
1750787695.138582,52407,0.6435778141021729
|
| 274 |
+
1750787835.8369222,52507,0.6435808539390564
|
| 275 |
+
1750787974.315232,52607,0.6420220732688904
|
| 276 |
+
1750788107.1543639,52707,0.6427579522132874
|
| 277 |
+
1750788233.893576,52807,0.6421329379081726
|
| 278 |
+
1750788361.041194,52907,0.6442506313323975
|
| 279 |
+
1750789258.583812,53020,0.646504819393158
|
| 280 |
+
1750789414.647167,53120,0.646045982837677
|
| 281 |
+
1750789569.265596,53220,0.6455931067466736
|
| 282 |
+
1750789725.542769,53320,0.6448137164115906
|
| 283 |
+
1750789886.1022232,53420,0.6451066136360168
|
| 284 |
+
1750790043.21908,53520,0.6452879905700684
|
| 285 |
+
1750790197.523573,53620,0.645102322101593
|
| 286 |
+
1750790350.642303,53720,0.6442456841468811
|
| 287 |
+
1750790506.690637,53820,0.6454032063484192
|
| 288 |
+
1750790665.733622,53920,0.6463805437088013
|
| 289 |
+
1750790828.387039,54020,0.6445643305778503
|
| 290 |
+
1750790989.766167,54120,0.645153820514679
|
| 291 |
+
1750791156.6413922,54220,0.6454338431358337
|
| 292 |
+
1750791315.356648,54320,0.6458118557929993
|
| 293 |
+
1750791468.996352,54420,0.6443431377410889
|
| 294 |
+
1750791607.7428281,54520,0.6441152095794678
|
| 295 |
+
1750791737.503336,54620,0.6445784568786621
|
| 296 |
+
1750791870.312643,54720,0.6458590626716614
|
| 297 |
+
1750792012.619204,54820,0.644293487071991
|
| 298 |
+
1750792173.602272,54920,0.6431084275245667
|
| 299 |
+
1750792345.805465,55020,0.6445538997650146
|
| 300 |
+
1750792504.482763,55120,0.6442646980285645
|
| 301 |
+
1750792662.77343,55220,0.6441115140914917
|
| 302 |
+
1750792826.4392908,55320,0.6436017155647278
|
| 303 |
+
1750792989.490883,55420,0.6445349454879761
|
| 304 |
+
1750793150.201789,55520,0.6444632411003113
|
| 305 |
+
1750793310.96963,55620,0.64418625831604
|
| 306 |
+
1750793471.689198,55720,0.64411461353302
|
| 307 |
+
1750793633.997622,55820,0.6446427702903748
|
| 308 |
+
1750793771.361454,55920,0.6421868801116943
|
| 309 |
+
1750793898.339762,56020,0.6449227929115295
|
| 310 |
+
1750794715.279263,56133,0.6482804417610168
|
| 311 |
+
1750794844.989433,56233,0.6481776833534241
|
| 312 |
+
1750794975.590302,56333,0.6468339562416077
|
| 313 |
+
1750795106.289749,56433,0.6473713517189026
|
| 314 |
+
1750795236.418029,56533,0.6481985449790955
|
| 315 |
+
1750795367.739757,56633,0.6463897228240967
|
| 316 |
+
1750795496.9391842,56733,0.6464086771011353
|
| 317 |
+
1750795625.8119879,56833,0.64665687084198
|
| 318 |
+
1750795755.705375,56933,0.6464675068855286
|
| 319 |
+
1750795899.6942139,57033,0.6448878645896912
|
| 320 |
+
1750796035.104954,57133,0.6464515924453735
|
| 321 |
+
1750796170.040396,57233,0.6453817486763
|
| 322 |
+
1750796298.5071092,57333,0.6441231369972229
|
| 323 |
+
1750796429.311371,57433,0.6473431587219238
|
| 324 |
+
1750796554.3222442,57533,0.6448198556900024
|
| 325 |
+
1750796680.498879,57633,0.6428216695785522
|
| 326 |
+
1750796808.865582,57733,0.6424534320831299
|
| 327 |
+
1750796936.229256,57833,0.6466004848480225
|
| 328 |
+
1750797064.150819,57933,0.6458192467689514
|
| 329 |
+
1750797191.786251,58033,0.6452126502990723
|
| 330 |
+
1750797319.7743769,58133,0.6464528441429138
|
| 331 |
+
1750797447.7426932,58233,0.6451654434204102
|
| 332 |
+
1750797575.880682,58333,0.6441439986228943
|
| 333 |
+
1750797703.1639428,58433,0.6445943713188171
|
| 334 |
+
1750797831.238049,58533,0.6445538997650146
|
| 335 |
+
1750797960.4362,58633,0.6459847092628479
|
| 336 |
+
1750798087.6576068,58733,0.645104169845581
|
| 337 |
+
1750798215.118301,58833,0.6446556448936462
|
| 338 |
+
1750798343.66004,58933,0.6447536945343018
|
| 339 |
+
1750798472.9229262,59033,0.6429099440574646
|
| 340 |
+
1750798601.3384461,59133,0.644894003868103
|
| 341 |
+
1750799411.726611,59246,0.648718535900116
|
| 342 |
+
1750799562.9718988,59346,0.6476292610168457
|
| 343 |
+
1750799693.71536,59446,0.6495410799980164
|
| 344 |
+
1750799825.5060081,59546,0.6477291584014893
|
| 345 |
+
1750799961.1823318,59646,0.6461580991744995
|
| 346 |
+
1750800093.838211,59746,0.6455496549606323
|
| 347 |
+
1750800222.3160698,59846,0.6458125114440918
|
| 348 |
+
1750800349.5177312,59946,0.6471948623657227
|
| 349 |
+
1750800476.517913,60046,0.6473553776741028
|
| 350 |
+
1750800603.51851,60146,0.646906852722168
|
| 351 |
+
1750800732.724886,60246,0.6469632387161255
|
| 352 |
+
1750800860.773531,60346,0.6476641893386841
|
| 353 |
+
1750800994.139314,60446,0.6453768610954285
|
| 354 |
+
1750801128.625023,60546,0.6463921666145325
|
| 355 |
+
1750801255.306974,60646,0.6476666927337646
|
| 356 |
+
1750801381.878898,60746,0.6465967893600464
|
| 357 |
+
1750801511.829727,60846,0.6465104222297668
|
| 358 |
+
1750801637.147579,60946,0.6430771946907043
|
| 359 |
+
1750801761.5167391,61046,0.6442641019821167
|
| 360 |
+
1750801888.139904,61146,0.6429436206817627
|
| 361 |
+
1750802014.436662,61246,0.6462824940681458
|
| 362 |
+
1750802155.183458,61346,0.6463149189949036
|
| 363 |
+
1750802280.9436212,61446,0.6453002691268921
|
| 364 |
+
1750802406.023594,61546,0.6450214385986328
|
| 365 |
+
1750802535.9519181,61646,0.644806981086731
|
| 366 |
+
1750802664.659689,61746,0.6448878645896912
|
| 367 |
+
1750802791.178323,61846,0.645933210849762
|
| 368 |
+
1750802914.792695,61946,0.6460140943527222
|
| 369 |
+
1750803039.588114,62046,0.6473057866096497
|
| 370 |
+
1750803177.726551,62146,0.646781861782074
|
| 371 |
+
1750803304.093698,62246,0.6445061564445496
|
| 372 |
+
1750804067.461365,62359,0.6465290784835815
|
| 373 |
+
1750804190.3091588,62459,0.6480244994163513
|
| 374 |
+
1750804314.915205,62559,0.6477248668670654
|
| 375 |
+
1750804440.797796,62659,0.6470097899436951
|
| 376 |
+
1750804569.121289,62759,0.6475049257278442
|
| 377 |
+
1750804694.160801,62859,0.6483259797096252
|
| 378 |
+
1750804823.114181,62959,0.6467800140380859
|
| 379 |
+
1750804950.241332,63059,0.6467885971069336
|
| 380 |
+
1750805079.370759,63159,0.6475520730018616
|
| 381 |
+
1750805208.732679,63259,0.6473645567893982
|
| 382 |
+
1750805338.920463,63359,0.6480992436408997
|
| 383 |
+
1750805469.50536,63459,0.6476678848266602
|
| 384 |
+
1750805608.147779,63559,0.647253692150116
|
| 385 |
+
1750805734.972286,63659,0.6449258327484131
|
| 386 |
+
1750805862.4968588,63759,0.6461672782897949
|
| 387 |
+
1750805988.45022,63859,0.6458964347839355
|
| 388 |
+
1750806123.169887,63959,0.647939920425415
|
| 389 |
+
1750806256.202873,64059,0.6484871506690979
|
| 390 |
+
1750806393.472111,64159,0.6470263600349426
|
| 391 |
+
1750806547.472808,64259,0.6475827097892761
|
| 392 |
+
1750806691.373438,64359,0.6466715931892395
|
| 393 |
+
1750806849.185892,64459,0.6458413004875183
|
| 394 |
+
1750806990.452616,64559,0.6463345885276794
|
| 395 |
+
1750807125.020457,64659,0.6483137011528015
|
| 396 |
+
1750807258.217799,64759,0.6457217931747437
|
| 397 |
+
1750807391.006315,64859,0.6449068784713745
|
| 398 |
+
1750807526.301808,64959,0.6463021039962769
|
| 399 |
+
1750807666.378596,65059,0.6463823318481445
|
| 400 |
+
1750807809.388958,65159,0.6463333368301392
|
| 401 |
+
1750807951.5288792,65259,0.6469528079032898
|
| 402 |
+
1750808092.63092,65359,0.6464956998825073
|
| 403 |
+
1750808938.0724359,65472,0.6471639275550842
|
| 404 |
+
1750809074.119413,65572,0.6487616300582886
|
| 405 |
+
1750809209.820045,65672,0.6496335864067078
|
| 406 |
+
1750809342.745279,65772,0.6473774313926697
|
| 407 |
+
1750809476.897032,65872,0.6502751111984253
|
| 408 |
+
1750809613.8903549,65972,0.6470043063163757
|
| 409 |
+
1750809747.467062,66072,0.6485587954521179
|
| 410 |
+
1750809872.350077,66172,0.6457512378692627
|
| 411 |
+
1750809997.746201,66272,0.6469516158103943
|
| 412 |
+
1750810121.1808681,66372,0.6476795077323914
|
| 413 |
+
1750810245.578121,66472,0.6476421356201172
|
| 414 |
+
1750810380.9506109,66572,0.6473364233970642
|
| 415 |
+
1750810512.713094,66672,0.6464914083480835
|
| 416 |
+
1750810633.451982,66772,0.6471274495124817
|
| 417 |
+
1750810755.61098,66872,0.6466550230979919
|
| 418 |
+
1750810881.8008718,66972,0.6478750109672546
|
| 419 |
+
1750811009.929226,67072,0.647144615650177
|
| 420 |
+
1750811138.430103,67172,0.6447530388832092
|
| 421 |
+
1750811266.86188,67272,0.6493449807167053
|
| 422 |
+
1750811395.091655,67372,0.6474999785423279
|
| 423 |
+
1750811523.0771708,67472,0.646438717842102
|
| 424 |
+
1750811651.687756,67572,0.6476942300796509
|
| 425 |
+
1750811783.037171,67672,0.6473756432533264
|
| 426 |
+
1750811917.467489,67772,0.6467695832252502
|
| 427 |
+
1750812047.789182,67872,0.6464332342147827
|
| 428 |
+
1750812173.486615,67972,0.645909309387207
|
| 429 |
+
1750812300.31341,68072,0.6483431458473206
|
| 430 |
+
1750812423.2969952,68172,0.6461353898048401
|
| 431 |
+
1750812550.8289511,68272,0.6480061411857605
|
| 432 |
+
1750812678.884728,68372,0.6471066474914551
|
| 433 |
+
1750812805.8926039,68472,0.6482904553413391
|
| 434 |
+
1750813604.1502929,68585,0.650193989276886
|
| 435 |
+
1750813731.223047,68685,0.6522193551063538
|
| 436 |
+
1750813857.732806,68785,0.6497787833213806
|
| 437 |
+
1750813988.8907259,68885,0.6475673913955688
|
| 438 |
+
1750814119.260182,68985,0.6473192572593689
|
| 439 |
+
1750814248.132718,69085,0.6485839486122131
|
| 440 |
+
1750814377.243381,69185,0.6481679081916809
|
| 441 |
+
1750814505.812932,69285,0.6453449726104736
|
| 442 |
+
1750814634.419463,69385,0.6485612988471985
|
| 443 |
+
1750814763.763408,69485,0.6482040286064148
|
| 444 |
+
1750814893.179103,69585,0.6466060280799866
|
| 445 |
+
1750815021.7957919,69685,0.6475943326950073
|
| 446 |
+
1750815150.5378401,69785,0.6472702026367188
|
| 447 |
+
1750815279.177554,69885,0.6487181186676025
|
| 448 |
+
1750815407.75913,69985,0.647549033164978
|
| 449 |
+
1750815535.9784162,70085,0.6479987502098083
|
| 450 |
+
1750815664.166744,70185,0.6472996473312378
|
| 451 |
+
1750815792.8404732,70285,0.6481243968009949
|
| 452 |
+
1750815921.5008771,70385,0.6492622494697571
|
| 453 |
+
1750816050.331789,70485,0.6485159397125244
|
| 454 |
+
1750816178.767342,70585,0.648588240146637
|
| 455 |
+
1750816306.1265862,70685,0.6481764912605286
|
| 456 |
+
1750816433.7588422,70785,0.6460980176925659
|
| 457 |
+
1750816561.7959769,70885,0.6478241682052612
|
| 458 |
+
1750816688.949652,70985,0.6460447311401367
|
| 459 |
+
1750816816.109664,71085,0.6486065983772278
|
| 460 |
+
1750816943.5686212,71185,0.6461054086685181
|
| 461 |
+
1750817070.7129998,71285,0.6465563774108887
|
| 462 |
+
1750817197.603124,71385,0.6458885073661804
|
| 463 |
+
1750817325.305474,71485,0.6494895815849304
|
| 464 |
+
1750817453.850782,71585,0.6482126116752625
|
| 465 |
+
1750818235.341888,71698,0.6487380266189575
|
| 466 |
+
1750818357.7280252,71798,0.6504595875740051
|
| 467 |
+
1750818481.7980459,71898,0.6481531858444214
|
| 468 |
+
1750818604.542128,71998,0.6499889492988586
|
| 469 |
+
1750818730.118634,72098,0.6505637168884277
|
| 470 |
+
1750818853.118078,72198,0.652050256729126
|
| 471 |
+
1750818974.4946308,72298,0.6500778198242188
|
| 472 |
+
1750819101.224452,72398,0.6472787857055664
|
| 473 |
+
1750819223.747354,72498,0.6491146087646484
|
| 474 |
+
1750819345.070886,72598,0.6484160423278809
|
| 475 |
+
1750819471.8304262,72698,0.6470293998718262
|
| 476 |
+
1750819611.2639308,72798,0.6482524275779724
|
| 477 |
+
1750819747.3201559,72898,0.649369478225708
|
| 478 |
+
1750819875.1713212,72998,0.6482445001602173
|
| 479 |
+
1750820012.2468321,73098,0.6460986733436584
|
| 480 |
+
1750820141.524881,73198,0.6498842239379883
|
| 481 |
+
1750820266.279186,73298,0.6474179029464722
|
| 482 |
+
1750820389.3882308,73398,0.6481782793998718
|
| 483 |
+
1750820513.378579,73498,0.6480472087860107
|
| 484 |
+
1750820635.609552,73598,0.6474031805992126
|
| 485 |
+
1750820758.281336,73698,0.6487126350402832
|
| 486 |
+
1750820880.701854,73798,0.6488277912139893
|
| 487 |
+
1750821000.5747242,73898,0.6469320058822632
|
| 488 |
+
1750821121.916939,73998,0.6461231708526611
|
| 489 |
+
1750821249.6419818,74098,0.6483603119850159
|
| 490 |
+
1750821376.5807638,74198,0.648493230342865
|
| 491 |
+
1750821501.672632,74298,0.648620069026947
|
| 492 |
+
1750821628.2809339,74398,0.6486850380897522
|
| 493 |
+
1750821753.825325,74498,0.6474534273147583
|
| 494 |
+
1750821880.516624,74598,0.6453946232795715
|
| 495 |
+
1750822006.337743,74698,0.6465251445770264
|
| 496 |
+
1750822768.416353,74811,0.6495864391326904
|
| 497 |
+
1750822893.587872,74911,0.648423433303833
|
| 498 |
+
1750823021.0332532,75011,0.6498854160308838
|
| 499 |
+
1750823146.548241,75111,0.6494516134262085
|
| 500 |
+
1750823273.2604852,75211,0.6486611366271973
|
| 501 |
+
1750823400.978105,75311,0.6499724388122559
|
| 502 |
+
1750823528.704538,75411,0.6516096591949463
|
| 503 |
+
1750823656.129306,75511,0.6495631337165833
|
| 504 |
+
1750823784.509665,75611,0.6481942534446716
|
| 505 |
+
1750823911.075191,75711,0.6502769589424133
|
| 506 |
+
1750824037.748201,75811,0.6512530446052551
|
| 507 |
+
1750824164.504883,75911,0.6486047506332397
|
| 508 |
+
1750824291.308073,76011,0.648506760597229
|
| 509 |
+
1750824418.683721,76111,0.6514491438865662
|
| 510 |
+
1750824546.1074848,76211,0.6475459337234497
|
| 511 |
+
1750824672.645638,76311,0.6464601755142212
|
| 512 |
+
1750824801.644362,76411,0.6480851769447327
|
| 513 |
+
1750824932.714845,76511,0.6480717062950134
|
| 514 |
+
1750825062.607043,76611,0.6477800011634827
|
| 515 |
+
1750825190.624785,76711,0.6490055322647095
|
| 516 |
+
1750825318.172802,76811,0.6477978229522705
|
| 517 |
+
1750825445.538239,76911,0.6483823657035828
|
| 518 |
+
1750825572.384473,77011,0.6485171318054199
|
| 519 |
+
1750825700.182718,77111,0.6484546661376953
|
| 520 |
+
1750825827.936522,77211,0.6489546298980713
|
| 521 |
+
1750825955.436144,77311,0.6471041440963745
|
| 522 |
+
1750826083.645848,77411,0.647773265838623
|
| 523 |
+
1750826210.9560719,77511,0.6483694911003113
|
| 524 |
+
1750826338.876941,77611,0.6459589600563049
|
| 525 |
+
1750826466.961814,77711,0.6473443508148193
|
| 526 |
+
1750826594.736065,77811,0.6475484371185303
|
| 527 |
+
1750827371.309746,77924,0.650373637676239
|
| 528 |
+
1750827498.822388,78024,0.6503903269767761
|
| 529 |
+
1750827628.078668,78124,0.6522181630134583
|
| 530 |
+
1750827755.8670568,78224,0.6497867703437805
|
| 531 |
+
1750827886.349826,78324,0.6486838459968567
|
| 532 |
+
1750828014.371655,78424,0.6503210663795471
|
| 533 |
+
1750828143.997387,78524,0.6503744125366211
|
| 534 |
+
1750828270.322001,78624,0.6480919122695923
|
| 535 |
+
1750828398.756683,78724,0.6498051285743713
|
| 536 |
+
1750828537.040342,78824,0.6486409306526184
|
| 537 |
+
1750828664.805336,78924,0.6491219401359558
|
| 538 |
+
1750828792.283156,79024,0.6501562595367432
|
| 539 |
+
1750828920.453904,79124,0.6485809087753296
|
| 540 |
+
1750829048.98982,79224,0.651231586933136
|
| 541 |
+
1750829176.8632069,79324,0.6481494903564453
|
| 542 |
+
1750829305.751989,79424,0.649010419845581
|
| 543 |
+
1750829433.509136,79524,0.6484068632125854
|
| 544 |
+
1750829562.596454,79624,0.6482904553413391
|
| 545 |
+
1750829690.386684,79724,0.6503511071205139
|
| 546 |
+
1750829817.7913349,79824,0.6483615040779114
|
| 547 |
+
1750829946.319406,79924,0.6486973166465759
|
| 548 |
+
1750830073.0913548,80024,0.6493173837661743
|
| 549 |
+
1750830201.1831071,80124,0.6492328643798828
|
| 550 |
+
1750830330.114635,80224,0.6475808620452881
|
| 551 |
+
1750830461.9016042,80324,0.6484013199806213
|
| 552 |
+
1750830591.590712,80424,0.647438108921051
|
| 553 |
+
1750830718.6005561,80524,0.6450526714324951
|
| 554 |
+
1750830850.0460238,80624,0.6485833525657654
|
| 555 |
+
1750830977.883887,80724,0.6485416889190674
|
| 556 |
+
1750831105.3197818,80824,0.6496464610099792
|
| 557 |
+
1750831232.8340611,80924,0.6479148268699646
|
| 558 |
+
1750832007.374463,81037,0.6519679427146912
|
| 559 |
+
1750832142.4253492,81137,0.6512359380722046
|
| 560 |
+
1750832271.85084,81237,0.6501054167747498
|
| 561 |
+
1750832398.0974,81337,0.650800883769989
|
| 562 |
+
1750832523.7339048,81437,0.6511005163192749
|
| 563 |
+
1750832649.128074,81537,0.6487438678741455
|
| 564 |
+
1750832774.8890471,81637,0.650283694267273
|
| 565 |
+
1750832899.332615,81737,0.6482524275779724
|
| 566 |
+
1750833024.3810802,81837,0.6513112783432007
|
| 567 |
+
1750833149.348097,81937,0.6498198509216309
|
| 568 |
+
1750833275.481499,82037,0.6512113809585571
|
| 569 |
+
1750833400.406284,82137,0.6496102809906006
|
| 570 |
+
1750833524.9375072,82237,0.6483327150344849
|
| 571 |
+
1750833650.5324612,82337,0.6503376364707947
|
| 572 |
+
1750833774.197678,82437,0.6503565907478333
|
| 573 |
+
1750833898.7189372,82537,0.6475784182548523
|
| 574 |
+
1750834023.5484319,82637,0.6491605639457703
|
| 575 |
+
1750834150.958664,82737,0.6483026742935181
|
| 576 |
+
1750834275.947628,82837,0.6492665410041809
|
| 577 |
+
1750834402.872994,82937,0.6511304974555969
|
| 578 |
+
1750834531.4198,83037,0.6469154357910156
|
| 579 |
+
1750834657.1064892,83137,0.647325336933136
|
| 580 |
+
1750834781.427836,83237,0.6476887464523315
|
| 581 |
+
1750834907.936564,83337,0.64775550365448
|
| 582 |
+
1750835031.289141,83437,0.6493425369262695
|
| 583 |
+
1750835154.701272,83537,0.6464117765426636
|
| 584 |
+
1750835279.25683,83637,0.649427056312561
|
| 585 |
+
1750835403.225004,83737,0.6485557556152344
|
| 586 |
+
1750835527.815967,83837,0.649300217628479
|
| 587 |
+
1750835651.535708,83937,0.647672176361084
|
| 588 |
+
1750835785.81686,84037,0.6468572020530701
|
| 589 |
+
1750836534.634183,84150,0.6491373181343079
|
| 590 |
+
1750836656.4746299,84250,0.6519834399223328
|
| 591 |
+
1750836779.568351,84350,0.6510815024375916
|
| 592 |
+
1750836903.123183,84450,0.649770200252533
|
| 593 |
+
1750837027.663439,84550,0.649480402469635
|
| 594 |
+
1750837150.8689868,84650,0.6516231894493103
|
| 595 |
+
1750837273.5473018,84750,0.6521262526512146
|
| 596 |
+
1750837396.218327,84850,0.6507377624511719
|
| 597 |
+
1750837518.651513,84950,0.6494099497795105
|
| 598 |
+
1750837642.729192,85050,0.6519491672515869
|
| 599 |
+
1750837767.173884,85150,0.64809250831604
|
| 600 |
+
1750837890.435542,85250,0.6499093174934387
|
| 601 |
+
1750838015.162685,85350,0.6489932537078857
|
| 602 |
+
1750838141.841892,85450,0.6509203314781189
|
| 603 |
+
1750838264.395839,85550,0.6487199664115906
|
| 604 |
+
1750838386.4672089,85650,0.6507787704467773
|
| 605 |
+
1750838508.305929,85750,0.6505226492881775
|
| 606 |
+
1750838630.9132092,85850,0.6481819748878479
|
| 607 |
+
1750838752.80761,85950,0.6477187275886536
|
| 608 |
+
1750838876.729792,86050,0.6482365131378174
|
| 609 |
+
1750839000.481405,86150,0.6497224569320679
|
| 610 |
+
1750839124.189415,86250,0.6488774418830872
|
| 611 |
+
1750839249.659301,86350,0.6483216881752014
|
| 612 |
+
1750839380.5320761,86450,0.6500600576400757
|
| 613 |
+
1750839511.1560152,86550,0.6495729088783264
|
| 614 |
+
1750839632.920226,86650,0.6476078629493713
|
| 615 |
+
1750839754.502992,86750,0.6494852900505066
|
| 616 |
+
1750839876.169039,86850,0.647813081741333
|
| 617 |
+
1750839997.6025548,86950,0.6475661993026733
|
| 618 |
+
1750840119.2894459,87050,0.6494755148887634
|
| 619 |
+
1750840241.411623,87150,0.6488339304924011
|
| 620 |
+
1750840989.9151518,87263,0.6530276536941528
|
| 621 |
+
1750841110.055352,87363,0.6512236595153809
|
| 622 |
+
1750841231.6609762,87463,0.6514938473701477
|
| 623 |
+
1750841356.174644,87563,0.6533076167106628
|
| 624 |
+
1750841478.984334,87663,0.6507328152656555
|
| 625 |
+
1750841601.728671,87763,0.6493033170700073
|
| 626 |
+
1750841729.127606,87863,0.6501617431640625
|
| 627 |
+
1750841854.381567,87963,0.6488339304924011
|
| 628 |
+
1750841978.459552,88063,0.6512187719345093
|
| 629 |
+
1750842099.504819,88163,0.6482947468757629
|
| 630 |
+
1750842226.03354,88263,0.6501396894454956
|
| 631 |
+
1750842348.654759,88363,0.6503596901893616
|
| 632 |
+
1750842472.3478332,88463,0.6484742760658264
|
| 633 |
+
1750842593.27832,88563,0.6495208144187927
|
| 634 |
+
1750842713.752249,88663,0.6514754891395569
|
| 635 |
+
1750842834.968949,88763,0.6502481698989868
|
| 636 |
+
1750842960.018307,88863,0.6500110030174255
|
| 637 |
+
1750843092.0033002,88963,0.6480465531349182
|
| 638 |
+
1750843212.1028411,89063,0.64847731590271
|
| 639 |
+
1750843331.764721,89163,0.6518651843070984
|
| 640 |
+
1750843456.1175401,89263,0.6491274237632751
|
| 641 |
+
1750843577.4408052,89363,0.6497352719306946
|
| 642 |
+
1750843698.726388,89463,0.6468449831008911
|
| 643 |
+
1750843820.6274261,89563,0.6505067348480225
|
| 644 |
+
1750843942.463534,89663,0.65004962682724
|
| 645 |
+
1750844064.361599,89763,0.6500349044799805
|
| 646 |
+
1750844185.531637,89863,0.6491274237632751
|
| 647 |
+
1750844307.055935,89963,0.6490735411643982
|
| 648 |
+
1750844428.23875,90063,0.64860600233078
|
| 649 |
+
1750844548.884526,90163,0.6485992670059204
|
| 650 |
+
1750844669.689771,90263,0.6477720737457275
|
| 651 |
+
1750845419.776615,90376,0.6516579985618591
|
| 652 |
+
1750845539.936069,90476,0.6498026847839355
|
| 653 |
+
1750845660.686485,90576,0.650747537612915
|
| 654 |
+
1750845781.7870138,90676,0.6518860459327698
|
| 655 |
+
1750845903.486742,90776,0.6498921513557434
|
| 656 |
+
1750846024.118847,90876,0.6507340669631958
|
| 657 |
+
1750846145.2169468,90976,0.6494007110595703
|
| 658 |
+
1750846266.158889,91076,0.6519228219985962
|
| 659 |
+
1750846388.121265,91176,0.6499393582344055
|
| 660 |
+
1750846511.9636748,91276,0.6514620184898376
|
| 661 |
+
1750846645.2285812,91376,0.6507598161697388
|
| 662 |
+
1750846776.012331,91476,0.6510759592056274
|
| 663 |
+
1750846901.397064,91576,0.6497622728347778
|
| 664 |
+
1750847023.2603798,91676,0.6497886180877686
|
| 665 |
+
1750847143.491034,91776,0.6475152969360352
|
| 666 |
+
1750847264.5538418,91876,0.649131715297699
|
| 667 |
+
1750847386.1762078,91976,0.6515067219734192
|
| 668 |
+
1750847508.7911491,92076,0.6488627195358276
|
| 669 |
+
1750847630.0856042,92176,0.6505784392356873
|
| 670 |
+
1750847751.669802,92276,0.6482217907905579
|
| 671 |
+
1750847871.691395,92376,0.6489706039428711
|
| 672 |
+
1750847992.1358988,92476,0.6497169137001038
|
| 673 |
+
1750848112.580328,92576,0.6478866338729858
|
| 674 |
+
1750848234.56429,92676,0.649066150188446
|
| 675 |
+
1750848355.870071,92776,0.6504510045051575
|
| 676 |
+
1750848476.582013,92876,0.6467065215110779
|
| 677 |
+
1750848607.900353,92976,0.6504276990890503
|
| 678 |
+
1750848738.0173771,93076,0.6495931148529053
|
| 679 |
+
1750848857.766874,93176,0.650150716304779
|
| 680 |
+
1750848979.737535,93276,0.6482169032096863
|
| 681 |
+
1750849099.877127,93376,0.6505275964736938
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750393244.648032,0,4.497038841247559
|
| 3 |
+
1750395455.19265,1,3.903747320175171
|
| 4 |
+
1750398368.11519,2,3.5344879627227783
|
| 5 |
+
1750401224.5931342,3,3.3312270641326904
|
| 6 |
+
1750404823.3122802,4,3.210529088973999
|
| 7 |
+
1750408456.2591732,5,3.1416213512420654
|
| 8 |
+
1750412752.2084851,6,3.0853068828582764
|
| 9 |
+
1750416996.0416489,7,3.050424098968506
|
| 10 |
+
1750421987.898771,8,3.011716842651367
|
| 11 |
+
1750426988.913014,9,2.989940643310547
|
| 12 |
+
1750431261.3392692,10,2.989818572998047
|
| 13 |
+
1750435597.5351439,11,2.9662227630615234
|
| 14 |
+
1750440150.407784,12,2.954162359237671
|
| 15 |
+
1750444543.849094,13,2.9454574584960938
|
| 16 |
+
1750448894.259244,14,2.938507318496704
|
| 17 |
+
1750453403.6882539,15,2.9323973655700684
|
| 18 |
+
1750457802.088578,16,2.928183078765869
|
| 19 |
+
1750462587.858046,17,2.9238781929016113
|
| 20 |
+
1750466837.808796,18,2.919602870941162
|
| 21 |
+
1750471047.37008,19,2.916750431060791
|
| 22 |
+
1750475286.553396,20,2.9133033752441406
|
| 23 |
+
1750479461.077869,21,2.910808801651001
|
| 24 |
+
1750483616.5842361,22,2.9080207347869873
|
| 25 |
+
1750487760.517968,23,2.905743360519409
|
| 26 |
+
1750491885.537745,24,2.903864860534668
|
| 27 |
+
1750496002.133448,25,2.9018428325653076
|
| 28 |
+
1750500111.205085,26,2.900128126144409
|
| 29 |
+
1750504211.5523431,27,2.8988354206085205
|
| 30 |
+
1750508312.8637319,28,2.8973073959350586
|
| 31 |
+
1750512402.2931461,29,2.89581298828125
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750242880.7459168,0,4.869692325592041
|
| 3 |
+
1750244191.138468,1,4.110507488250732
|
| 4 |
+
1750246227.2990952,2,3.639007806777954
|
| 5 |
+
1750248253.918893,3,3.4058213233947754
|
| 6 |
+
1750251015.6637871,4,3.2411768436431885
|
| 7 |
+
1750253768.7380931,5,3.1608352661132812
|
| 8 |
+
1750257214.117406,6,3.0938303470611572
|
| 9 |
+
1750260713.1403809,7,3.0553507804870605
|
| 10 |
+
1750265084.791737,8,3.0033442974090576
|
| 11 |
+
1750269591.634613,9,2.980733871459961
|
| 12 |
+
1750273913.096587,10,2.9817233085632324
|
| 13 |
+
1750278210.551297,11,2.9585886001586914
|
| 14 |
+
1750282506.163321,12,2.9470789432525635
|
| 15 |
+
1750286970.443168,13,2.9389889240264893
|
| 16 |
+
1750291458.6969159,14,2.9322216510772705
|
| 17 |
+
1750295931.3745222,15,2.9269731044769287
|
| 18 |
+
1750300503.985491,16,2.9222328662872314
|
| 19 |
+
1750304961.6577501,17,2.9180169105529785
|
| 20 |
+
1750309383.141315,18,2.9146981239318848
|
| 21 |
+
1750313785.064812,19,2.9115850925445557
|
| 22 |
+
1750318127.0641851,20,2.908963441848755
|
| 23 |
+
1750322430.64604,21,2.906283378601074
|
| 24 |
+
1750326698.7906518,22,2.904263734817505
|
| 25 |
+
1750330951.355423,23,2.9020612239837646
|
| 26 |
+
1750335187.655484,24,2.90006422996521
|
| 27 |
+
1750339399.5665002,25,2.8983960151672363
|
| 28 |
+
1750343602.1534889,26,2.8967831134796143
|
| 29 |
+
1750381999.790324,27,2.910587787628174
|
| 30 |
+
1750386648.843059,28,2.9034934043884277
|
| 31 |
+
1750391063.2994308,29,2.896620035171509
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749978550.906435,0,4.503419399261475
|
| 3 |
+
1749982198.575948,1,3.7155017852783203
|
| 4 |
+
1749986570.5654368,2,3.421501636505127
|
| 5 |
+
1749990918.852331,3,3.2091517448425293
|
| 6 |
+
1749995961.55019,4,3.133439540863037
|
| 7 |
+
1750001056.034247,5,3.056046724319458
|
| 8 |
+
1750006899.604149,6,3.0407228469848633
|
| 9 |
+
1750012834.238941,7,3.001800298690796
|
| 10 |
+
1750019498.535541,8,3.0090649127960205
|
| 11 |
+
1750026135.227586,9,2.9857029914855957
|
| 12 |
+
1750030424.814303,10,2.9864180088043213
|
| 13 |
+
1750034741.041949,11,2.9633243083953857
|
| 14 |
+
1750039109.9737952,12,2.951890468597412
|
| 15 |
+
1750043848.344384,13,2.943676710128784
|
| 16 |
+
1750048701.546577,14,2.9368529319763184
|
| 17 |
+
1750053246.211833,15,2.9313669204711914
|
| 18 |
+
1750057533.257866,16,2.9266626834869385
|
| 19 |
+
1750061807.049831,17,2.9223711490631104
|
| 20 |
+
1750066096.786038,18,2.91867995262146
|
| 21 |
+
1750070333.225527,19,2.9154393672943115
|
| 22 |
+
1750074521.0456681,20,2.912705659866333
|
| 23 |
+
1750078718.076233,21,2.9102835655212402
|
| 24 |
+
1750082899.140814,22,2.9077396392822266
|
| 25 |
+
1750087512.151513,23,2.905846357345581
|
| 26 |
+
1750091828.878402,24,2.903412103652954
|
| 27 |
+
1750096276.2888389,25,2.9015419483184814
|
| 28 |
+
1750100481.644725,26,2.9000253677368164
|
| 29 |
+
1750104739.717453,27,2.8983654975891113
|
| 30 |
+
1750109186.5023448,28,2.8976855278015137
|
| 31 |
+
1750113609.3978848,29,2.895890712738037
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749863283.3324292,0,4.141076564788818
|
| 3 |
+
1749864592.20433,1,3.486952543258667
|
| 4 |
+
1749866617.404851,2,3.261892080307007
|
| 5 |
+
1749868684.412531,3,3.062371253967285
|
| 6 |
+
1749871473.75741,4,3.018030881881714
|
| 7 |
+
1749875060.477828,5,2.9788944721221924
|
| 8 |
+
1749878531.724277,6,2.972249746322632
|
| 9 |
+
1749881971.612274,7,2.918388843536377
|
| 10 |
+
1749886106.593578,8,3.02687931060791
|
| 11 |
+
1749890253.398643,9,2.9919655323028564
|
| 12 |
+
1749894408.7118459,10,2.9894230365753174
|
| 13 |
+
1749898526.6077719,11,2.964907169342041
|
| 14 |
+
1749902645.408054,12,2.952655792236328
|
| 15 |
+
1749906756.7643032,13,2.943563938140869
|
| 16 |
+
1749910871.86694,14,2.9363276958465576
|
| 17 |
+
1749915142.938997,15,2.9653398990631104
|
| 18 |
+
1749919236.457516,16,2.950559616088867
|
| 19 |
+
1749923334.0531998,17,2.9297029972076416
|
| 20 |
+
1749927446.811945,18,2.9225661754608154
|
| 21 |
+
1749931565.0115268,19,2.9181556701660156
|
| 22 |
+
1749935700.2720928,20,2.914483070373535
|
| 23 |
+
1749940017.87676,21,2.9113500118255615
|
| 24 |
+
1749944700.7074828,22,2.9086496829986572
|
| 25 |
+
1749949409.596221,23,2.907172203063965
|
| 26 |
+
1749953801.5466728,24,2.9044301509857178
|
| 27 |
+
1749958169.537259,25,2.9020256996154785
|
| 28 |
+
1749962421.037086,26,2.900437355041504
|
| 29 |
+
1749966597.282641,27,2.8991353511810303
|
| 30 |
+
1749970740.188172,28,2.8973348140716553
|
| 31 |
+
1749974864.4389849,29,2.8956847190856934
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750632371.14848,0,4.48432731628418
|
| 3 |
+
1750634606.382995,1,3.7629923820495605
|
| 4 |
+
1750636890.23707,2,3.517442226409912
|
| 5 |
+
1750639149.8933601,3,3.3807172775268555
|
| 6 |
+
1750641343.348047,4,3.3065621852874756
|
| 7 |
+
1750643529.9231098,5,3.2629871368408203
|
| 8 |
+
1750645794.686304,6,3.239704132080078
|
| 9 |
+
1750648101.2390442,7,3.229910373687744
|
| 10 |
+
1750650363.3526568,8,3.241615056991577
|
| 11 |
+
1750652619.7843251,9,3.190913200378418
|
| 12 |
+
1750656967.762392,10,3.1016592979431152
|
| 13 |
+
1750661271.727958,11,3.0410385131835938
|
| 14 |
+
1750665520.585057,12,3.009328603744507
|
| 15 |
+
1750669774.234454,13,2.9884204864501953
|
| 16 |
+
1750673984.9646752,14,2.9735829830169678
|
| 17 |
+
1750678184.292531,15,2.961824417114258
|
| 18 |
+
1750682381.31706,16,2.95274019241333
|
| 19 |
+
1750686553.589227,17,2.9454822540283203
|
| 20 |
+
1750690784.6460009,18,2.9388856887817383
|
| 21 |
+
1750695602.3815339,19,2.9336090087890625
|
| 22 |
+
1750701233.219311,20,2.9288313388824463
|
| 23 |
+
1750705842.984583,21,2.925161361694336
|
| 24 |
+
1750710360.7942688,22,2.921215057373047
|
| 25 |
+
1750716082.206906,23,2.9179205894470215
|
| 26 |
+
1750720587.083864,24,2.91546368598938
|
| 27 |
+
1750725037.9899788,25,2.9129858016967773
|
| 28 |
+
1750729396.893383,26,2.9098708629608154
|
| 29 |
+
1750733776.534288,27,2.9085488319396973
|
| 30 |
+
1750738221.1560059,28,2.905959129333496
|
| 31 |
+
1750742708.6763039,29,2.9041709899902344
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750513695.726909,0,4.168796062469482
|
| 3 |
+
1750514985.9507,1,3.5498626232147217
|
| 4 |
+
1750516298.708396,2,3.385937213897705
|
| 5 |
+
1750517591.810358,3,3.2866876125335693
|
| 6 |
+
1750518885.955939,4,3.189992904663086
|
| 7 |
+
1750520180.033519,5,3.143275499343872
|
| 8 |
+
1750521474.176068,6,3.1730546951293945
|
| 9 |
+
1750522767.9193249,7,3.1737563610076904
|
| 10 |
+
1750542035.493311,8,3.481318235397339
|
| 11 |
+
1750543428.10198,9,3.4353036880493164
|
| 12 |
+
1750547793.590507,10,3.1064982414245605
|
| 13 |
+
1750552335.730495,11,3.03554105758667
|
| 14 |
+
1750556730.614615,12,3.002964973449707
|
| 15 |
+
1750561135.750268,13,2.982447385787964
|
| 16 |
+
1750565414.917825,14,2.9676835536956787
|
| 17 |
+
1750569689.9089541,15,2.956284284591675
|
| 18 |
+
1750573916.3536901,16,2.9475157260894775
|
| 19 |
+
1750578123.218335,17,2.9403586387634277
|
| 20 |
+
1750582314.4729521,18,2.934619426727295
|
| 21 |
+
1750586526.732241,19,2.929375171661377
|
| 22 |
+
1750590694.093346,20,2.9248242378234863
|
| 23 |
+
1750594847.352438,21,2.9210140705108643
|
| 24 |
+
1750599012.9660952,22,2.9179041385650635
|
| 25 |
+
1750603153.176775,23,2.914592981338501
|
| 26 |
+
1750607416.40974,24,2.9117836952209473
|
| 27 |
+
1750611744.08181,25,2.9093730449676514
|
| 28 |
+
1750616158.759497,26,2.906916856765747
|
| 29 |
+
1750620626.200702,27,2.9057676792144775
|
| 30 |
+
1750625418.3782148,28,2.9032325744628906
|
| 31 |
+
1750630090.167115,29,2.9016776084899902
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750117867.7551942,0,3.7190332412719727
|
| 3 |
+
1750122118.772188,1,3.18149995803833
|
| 4 |
+
1750126451.403994,2,3.078232765197754
|
| 5 |
+
1750130680.443696,3,3.0302648544311523
|
| 6 |
+
1750134907.800519,4,3.001476764678955
|
| 7 |
+
1750139213.399209,5,2.9822638034820557
|
| 8 |
+
1750143404.626784,6,2.9680933952331543
|
| 9 |
+
1750147564.933099,7,2.9573044776916504
|
| 10 |
+
1750151715.1421719,8,2.948808431625366
|
| 11 |
+
1750155848.0789561,9,2.941603422164917
|
| 12 |
+
1750159971.2616692,10,2.952218532562256
|
| 13 |
+
1750164079.575281,11,2.935922145843506
|
| 14 |
+
1750168191.7420218,12,2.928865671157837
|
| 15 |
+
1750172297.5916102,13,2.924445629119873
|
| 16 |
+
1750176453.961837,14,2.920473337173462
|
| 17 |
+
1750180850.016053,15,2.916942834854126
|
| 18 |
+
1750185154.177082,16,2.914234161376953
|
| 19 |
+
1750189391.2774699,17,2.912083387374878
|
| 20 |
+
1750193791.2887151,18,2.9088315963745117
|
| 21 |
+
1750198091.669888,19,2.906681776046753
|
| 22 |
+
1750202651.749294,20,2.904893159866333
|
| 23 |
+
1750207163.865692,21,2.9029510021209717
|
| 24 |
+
1750211713.375293,22,2.9017179012298584
|
| 25 |
+
1750216052.8991191,23,2.8993325233459473
|
| 26 |
+
1750220428.1077309,24,2.898499011993408
|
| 27 |
+
1750224709.561266,25,2.8970789909362793
|
| 28 |
+
1750228955.137394,26,2.8957550525665283
|
| 29 |
+
1750233176.7532659,27,2.8942177295684814
|
| 30 |
+
1750237381.096981,28,2.893112897872925
|
| 31 |
+
1750241562.5282292,29,2.891904592514038
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750851784.794701,0,4.511007785797119
|
| 3 |
+
1750853835.8761,1,3.705850601196289
|
| 4 |
+
1750855908.694172,2,3.5212864875793457
|
| 5 |
+
1750857984.476949,3,3.338186264038086
|
| 6 |
+
1750860045.676554,4,3.3243212699890137
|
| 7 |
+
1750862121.035478,5,3.2101285457611084
|
| 8 |
+
1750864200.552823,6,3.256943941116333
|
| 9 |
+
1750866328.966167,7,3.169743299484253
|
| 10 |
+
1750868555.664671,8,3.2560107707977295
|
| 11 |
+
1750870840.9499478,9,3.179680109024048
|
| 12 |
+
1750876091.3780549,10,3.0882112979888916
|
| 13 |
+
1750880660.639144,11,3.0279011726379395
|
| 14 |
+
1750885227.6555479,12,2.9967029094696045
|
| 15 |
+
1750889799.821455,13,2.9763381481170654
|
| 16 |
+
1750894456.987148,14,2.9617059230804443
|
| 17 |
+
1750899082.675252,15,2.9503867626190186
|
| 18 |
+
1750903736.3783329,16,2.941533327102661
|
| 19 |
+
1750908342.7633312,17,2.9340784549713135
|
| 20 |
+
1750913044.888563,18,2.927919387817383
|
| 21 |
+
1750917542.481531,19,2.9227824211120605
|
| 22 |
+
1750922072.556569,20,2.918488025665283
|
| 23 |
+
1750926571.310741,21,2.9141738414764404
|
| 24 |
+
1750931079.8687649,22,2.911165237426758
|
| 25 |
+
1750935536.838163,23,2.9075887203216553
|
| 26 |
+
1750939971.365792,24,2.904879093170166
|
| 27 |
+
1750944482.602001,25,2.902339458465576
|
| 28 |
+
1750948934.391331,26,2.90018367767334
|
| 29 |
+
1750953472.56209,27,2.8982222080230713
|
| 30 |
+
1750958261.6061661,28,2.8960745334625244
|
| 31 |
+
1750962989.888033,29,2.8941383361816406
|
archive-misc/runs_jsons/loss_epoch/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750744114.997982,0,4.144684791564941
|
| 3 |
+
1750745515.7019641,1,3.491670608520508
|
| 4 |
+
1750746911.6984901,2,3.4095680713653564
|
| 5 |
+
1750748303.8670888,3,3.2133724689483643
|
| 6 |
+
1750749682.481517,4,3.227614641189575
|
| 7 |
+
1750751071.4764748,5,3.0969951152801514
|
| 8 |
+
1750752447.519634,6,3.219228982925415
|
| 9 |
+
1750753816.043037,7,3.104700803756714
|
| 10 |
+
1750755208.793823,8,3.5905277729034424
|
| 11 |
+
1750756580.028522,9,3.432602882385254
|
| 12 |
+
1750760976.771947,10,3.0987296104431152
|
| 13 |
+
1750765539.2077901,11,3.03275203704834
|
| 14 |
+
1750770075.641336,12,3.001636028289795
|
| 15 |
+
1750774577.088722,13,2.9812886714935303
|
| 16 |
+
1750779086.689203,14,2.966902732849121
|
| 17 |
+
1750783996.870292,15,2.956063985824585
|
| 18 |
+
1750789102.207841,16,2.947303056716919
|
| 19 |
+
1750794584.888244,17,2.940584659576416
|
| 20 |
+
1750799274.569878,18,2.93465256690979
|
| 21 |
+
1750803944.550782,19,2.9295992851257324
|
| 22 |
+
1750808795.973741,20,2.925201892852783
|
| 23 |
+
1750813476.9015622,21,2.921534299850464
|
| 24 |
+
1750818113.2744079,22,2.9179158210754395
|
| 25 |
+
1750822645.292929,23,2.914977550506592
|
| 26 |
+
1750827242.8207428,24,2.9125137329101562
|
| 27 |
+
1750831878.2705438,25,2.9098050594329834
|
| 28 |
+
1750836412.47137,26,2.9085917472839355
|
| 29 |
+
1750840869.555746,27,2.906043291091919
|
| 30 |
+
1750845298.118175,28,2.904139518737793
|
| 31 |
+
1750849718.6242921,29,2.903306722640991
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750393244.648103,0,48.165138244628906
|
| 3 |
+
1750395455.192726,1,33.44196701049805
|
| 4 |
+
1750398368.115252,2,25.141826629638672
|
| 5 |
+
1750401224.593175,3,22.759912490844727
|
| 6 |
+
1750404823.31231,4,20.737628936767578
|
| 7 |
+
1750408456.259201,5,19.79881477355957
|
| 8 |
+
1750412752.2085462,6,19.20827865600586
|
| 9 |
+
1750416996.041706,7,18.66091537475586
|
| 10 |
+
1750421987.898804,8,18.275800704956055
|
| 11 |
+
1750426988.9130409,9,18.060943603515625
|
| 12 |
+
1750431261.3393211,10,17.835844039916992
|
| 13 |
+
1750435597.5352042,11,17.654146194458008
|
| 14 |
+
1750440150.407841,12,17.513704299926758
|
| 15 |
+
1750444543.849122,13,17.49494171142578
|
| 16 |
+
1750448894.259268,14,17.388378143310547
|
| 17 |
+
1750453403.688282,15,17.3646240234375
|
| 18 |
+
1750457802.088604,16,17.25867462158203
|
| 19 |
+
1750462587.8580768,17,17.154783248901367
|
| 20 |
+
1750466837.808824,18,17.124469757080078
|
| 21 |
+
1750471047.37011,19,17.113733291625977
|
| 22 |
+
1750475286.553422,20,17.124876022338867
|
| 23 |
+
1750479461.0779,21,17.068208694458008
|
| 24 |
+
1750483616.584269,22,17.040496826171875
|
| 25 |
+
1750487760.517992,23,17.030763626098633
|
| 26 |
+
1750491885.537775,24,16.933334350585938
|
| 27 |
+
1750496002.133483,25,16.948078155517578
|
| 28 |
+
1750500111.2051148,26,16.96770668029785
|
| 29 |
+
1750504211.552373,27,16.927907943725586
|
| 30 |
+
1750508312.8637671,28,16.89035987854004
|
| 31 |
+
1750512402.293206,29,16.9533748626709
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750242880.745945,0,53.867008209228516
|
| 3 |
+
1750244191.138497,1,37.6105842590332
|
| 4 |
+
1750246227.299125,2,26.738567352294922
|
| 5 |
+
1750248253.918934,3,23.37044906616211
|
| 6 |
+
1750251015.6638331,4,20.932710647583008
|
| 7 |
+
1750253768.7381241,5,20.084152221679688
|
| 8 |
+
1750257214.1177301,6,19.152809143066406
|
| 9 |
+
1750260713.140447,7,18.58722686767578
|
| 10 |
+
1750265084.791805,8,18.09245491027832
|
| 11 |
+
1750269591.634642,9,17.810033798217773
|
| 12 |
+
1750273913.0966349,10,17.79850959777832
|
| 13 |
+
1750278210.55135,11,17.607419967651367
|
| 14 |
+
1750282506.1633558,12,17.50511932373047
|
| 15 |
+
1750286970.443201,13,17.351274490356445
|
| 16 |
+
1750291458.696942,14,17.263975143432617
|
| 17 |
+
1750295931.374552,15,17.2078857421875
|
| 18 |
+
1750300503.9855201,16,17.132694244384766
|
| 19 |
+
1750304961.65779,17,17.121416091918945
|
| 20 |
+
1750309383.141357,18,17.135910034179688
|
| 21 |
+
1750313785.064841,19,17.052095413208008
|
| 22 |
+
1750318127.064212,20,17.08173370361328
|
| 23 |
+
1750322430.6460679,21,17.006471633911133
|
| 24 |
+
1750326698.7906811,22,17.02798843383789
|
| 25 |
+
1750330951.355455,23,16.987016677856445
|
| 26 |
+
1750335187.655519,24,16.947071075439453
|
| 27 |
+
1750339399.566528,25,17.024276733398438
|
| 28 |
+
1750343602.1535301,26,16.952640533447266
|
| 29 |
+
1750381999.7903502,27,17.084932327270508
|
| 30 |
+
1750386648.8430898,28,16.945514678955078
|
| 31 |
+
1750391063.299486,29,16.956756591796875
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_curriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749978550.906464,0,48.71048355102539
|
| 3 |
+
1749982198.575983,1,34.35873794555664
|
| 4 |
+
1749986570.565465,2,25.615686416625977
|
| 5 |
+
1749990918.8523571,3,22.831928253173828
|
| 6 |
+
1749995961.5502348,4,20.793249130249023
|
| 7 |
+
1750001056.034306,5,19.962661743164062
|
| 8 |
+
1750006899.604214,6,19.140541076660156
|
| 9 |
+
1750012834.2390242,7,18.668323516845703
|
| 10 |
+
1750019498.535569,8,18.27408790588379
|
| 11 |
+
1750026135.227616,9,17.97447967529297
|
| 12 |
+
1750030424.814328,10,17.869421005249023
|
| 13 |
+
1750034741.0419781,11,17.722238540649414
|
| 14 |
+
1750039109.973821,12,17.575319290161133
|
| 15 |
+
1750043848.344415,13,17.574447631835938
|
| 16 |
+
1750048701.5466409,14,17.354393005371094
|
| 17 |
+
1750053246.211863,15,17.31399154663086
|
| 18 |
+
1750057533.257897,16,17.21194839477539
|
| 19 |
+
1750061807.049879,17,17.228086471557617
|
| 20 |
+
1750066096.786062,18,17.149364471435547
|
| 21 |
+
1750070333.22556,19,17.173809051513672
|
| 22 |
+
1750074521.045703,20,17.086381912231445
|
| 23 |
+
1750078718.0762591,21,17.187509536743164
|
| 24 |
+
1750082899.14084,22,17.079730987548828
|
| 25 |
+
1750087512.1517591,23,17.033306121826172
|
| 26 |
+
1750091828.8784692,24,16.973848342895508
|
| 27 |
+
1750096276.288867,25,17.029022216796875
|
| 28 |
+
1750100481.644755,26,16.918643951416016
|
| 29 |
+
1750104739.71748,27,16.91028594970703
|
| 30 |
+
1750109186.502416,28,16.98614501953125
|
| 31 |
+
1750113609.3979561,29,16.994081497192383
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_curriculum-noloss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1749863283.332475,0,55.554115295410156
|
| 3 |
+
1749864592.20436,1,38.6469612121582
|
| 4 |
+
1749866617.4049091,2,28.14055824279785
|
| 5 |
+
1749868684.4125981,3,25.03399085998535
|
| 6 |
+
1749871473.757462,4,22.157758712768555
|
| 7 |
+
1749875060.4778938,5,21.59893035888672
|
| 8 |
+
1749878531.7243142,6,20.037084579467773
|
| 9 |
+
1749881971.612328,7,19.365510940551758
|
| 10 |
+
1749886106.593612,8,18.370973587036133
|
| 11 |
+
1749890253.398698,9,18.02777862548828
|
| 12 |
+
1749894408.711875,10,17.842618942260742
|
| 13 |
+
1749898526.6078122,11,17.59517478942871
|
| 14 |
+
1749902645.4080808,12,17.457857131958008
|
| 15 |
+
1749906756.764334,13,17.35848617553711
|
| 16 |
+
1749910871.8669991,14,17.400739669799805
|
| 17 |
+
1749915142.939046,15,17.950729370117188
|
| 18 |
+
1749919236.457552,16,17.27395248413086
|
| 19 |
+
1749923334.053243,17,17.22505760192871
|
| 20 |
+
1749927446.811975,18,17.116395950317383
|
| 21 |
+
1749931565.0115561,19,17.109756469726562
|
| 22 |
+
1749935700.272127,20,17.01395034790039
|
| 23 |
+
1749940017.8768482,21,16.97368812561035
|
| 24 |
+
1749944700.7075331,22,16.974573135375977
|
| 25 |
+
1749949409.596299,23,16.941125869750977
|
| 26 |
+
1749953801.546734,24,17.035322189331055
|
| 27 |
+
1749958169.537314,25,16.969554901123047
|
| 28 |
+
1749962421.037111,26,16.842958450317383
|
| 29 |
+
1749966597.282666,27,16.842815399169922
|
| 30 |
+
1749970740.1882,28,16.859209060668945
|
| 31 |
+
1749974864.439013,29,16.862390518188477
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_hybrid-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750632371.148538,0,49.487388610839844
|
| 3 |
+
1750634606.383071,1,34.278865814208984
|
| 4 |
+
1750636890.237139,2,27.99624252319336
|
| 5 |
+
1750639149.893426,3,25.216676712036133
|
| 6 |
+
1750641343.3481212,4,23.53975486755371
|
| 7 |
+
1750643529.923166,5,22.532695770263672
|
| 8 |
+
1750645794.686364,6,21.782365798950195
|
| 9 |
+
1750648101.239102,7,21.24361228942871
|
| 10 |
+
1750650363.352725,8,20.716150283813477
|
| 11 |
+
1750652619.7844238,9,20.394941329956055
|
| 12 |
+
1750656967.762419,10,19.212770462036133
|
| 13 |
+
1750661271.727992,11,18.654909133911133
|
| 14 |
+
1750665520.5851111,12,18.28499984741211
|
| 15 |
+
1750669774.2345128,13,17.99468421936035
|
| 16 |
+
1750673984.9647071,14,17.756744384765625
|
| 17 |
+
1750678184.292585,15,17.66889762878418
|
| 18 |
+
1750682381.317089,16,17.62046241760254
|
| 19 |
+
1750686553.589252,17,17.473724365234375
|
| 20 |
+
1750690784.6460302,18,17.390039443969727
|
| 21 |
+
1750695602.3816,19,17.322021484375
|
| 22 |
+
1750701233.21937,20,17.29917335510254
|
| 23 |
+
1750705842.984617,21,17.225284576416016
|
| 24 |
+
1750710360.794543,22,17.184450149536133
|
| 25 |
+
1750716082.20699,23,17.170568466186523
|
| 26 |
+
1750720587.083894,24,17.155235290527344
|
| 27 |
+
1750725037.990058,25,17.128530502319336
|
| 28 |
+
1750729396.893443,26,17.081998825073242
|
| 29 |
+
1750733776.534314,27,17.105083465576172
|
| 30 |
+
1750738221.15604,28,16.993898391723633
|
| 31 |
+
1750742708.676333,29,17.012083053588867
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_hybrid_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750513695.72694,0,55.51445007324219
|
| 3 |
+
1750514985.950729,1,39.2695198059082
|
| 4 |
+
1750516298.708423,2,31.212324142456055
|
| 5 |
+
1750517591.810386,3,27.47551155090332
|
| 6 |
+
1750518885.95597,4,25.029054641723633
|
| 7 |
+
1750520180.033547,5,23.71649932861328
|
| 8 |
+
1750521474.176096,6,22.651994705200195
|
| 9 |
+
1750522767.919355,7,22.043315887451172
|
| 10 |
+
1750542035.493428,8,22.119585037231445
|
| 11 |
+
1750543428.102027,9,21.723588943481445
|
| 12 |
+
1750547793.590572,10,19.168630599975586
|
| 13 |
+
1750552335.730549,11,18.56935691833496
|
| 14 |
+
1750556730.614645,12,18.131643295288086
|
| 15 |
+
1750561135.750296,13,17.852975845336914
|
| 16 |
+
1750565414.917851,14,17.68834114074707
|
| 17 |
+
1750569689.908984,15,17.585899353027344
|
| 18 |
+
1750573916.35372,16,17.48543357849121
|
| 19 |
+
1750578123.218365,17,17.39935874938965
|
| 20 |
+
1750582314.472981,18,17.333566665649414
|
| 21 |
+
1750586526.732269,19,17.398630142211914
|
| 22 |
+
1750590694.093396,20,17.14397430419922
|
| 23 |
+
1750594847.352469,21,17.201200485229492
|
| 24 |
+
1750599012.966125,22,17.124305725097656
|
| 25 |
+
1750603153.176806,23,17.060588836669922
|
| 26 |
+
1750607416.4097679,24,17.077545166015625
|
| 27 |
+
1750611744.081841,25,16.95123291015625
|
| 28 |
+
1750616158.7595599,26,17.02434730529785
|
| 29 |
+
1750620626.200762,27,16.964458465576172
|
| 30 |
+
1750625418.378272,28,16.929859161376953
|
| 31 |
+
1750630090.167186,29,16.960468292236328
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_noop_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750117867.755234,0,23.239450454711914
|
| 3 |
+
1750122118.772226,1,20.12681770324707
|
| 4 |
+
1750126451.404056,2,18.946428298950195
|
| 5 |
+
1750130680.443724,3,18.481264114379883
|
| 6 |
+
1750134907.800551,4,18.190732955932617
|
| 7 |
+
1750139213.39924,5,17.848926544189453
|
| 8 |
+
1750143404.62681,6,17.755733489990234
|
| 9 |
+
1750147564.933135,7,17.527202606201172
|
| 10 |
+
1750151715.142207,8,17.498178482055664
|
| 11 |
+
1750155848.0789828,9,17.42560386657715
|
| 12 |
+
1750159971.261696,10,17.40009117126465
|
| 13 |
+
1750164079.57537,11,17.289079666137695
|
| 14 |
+
1750168191.742054,12,17.23641014099121
|
| 15 |
+
1750172297.59164,13,17.132404327392578
|
| 16 |
+
1750176453.9619021,14,17.157405853271484
|
| 17 |
+
1750180850.0160792,15,17.11432456970215
|
| 18 |
+
1750185154.1771278,16,17.09980583190918
|
| 19 |
+
1750189391.2775,17,17.059040069580078
|
| 20 |
+
1750193791.288744,18,16.979639053344727
|
| 21 |
+
1750198091.669916,19,16.972339630126953
|
| 22 |
+
1750202651.749393,20,16.979339599609375
|
| 23 |
+
1750207163.865746,21,16.89175796508789
|
| 24 |
+
1750211713.375345,22,16.97710609436035
|
| 25 |
+
1750216052.899157,23,16.890480041503906
|
| 26 |
+
1750220428.1077561,24,16.858570098876953
|
| 27 |
+
1750224709.5613,25,16.90614891052246
|
| 28 |
+
1750228955.137419,26,16.897302627563477
|
| 29 |
+
1750233176.753309,27,16.809844970703125
|
| 30 |
+
1750237381.097017,28,16.8604679107666
|
| 31 |
+
1750241562.528254,29,16.772478103637695
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_sequential-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750851784.794734,0,47.883819580078125
|
| 3 |
+
1750853835.876152,1,34.170345306396484
|
| 4 |
+
1750855908.6942,2,27.97521209716797
|
| 5 |
+
1750857984.4770079,3,25.124433517456055
|
| 6 |
+
1750860045.676613,4,23.297279357910156
|
| 7 |
+
1750862121.035541,5,22.294965744018555
|
| 8 |
+
1750864200.552898,6,21.455820083618164
|
| 9 |
+
1750866328.966235,7,21.07961082458496
|
| 10 |
+
1750868555.6648161,8,20.36427116394043
|
| 11 |
+
1750870840.952002,9,20.15283966064453
|
| 12 |
+
1750876091.378129,10,19.164051055908203
|
| 13 |
+
1750880660.639204,11,18.36109733581543
|
| 14 |
+
1750885227.655615,12,18.093502044677734
|
| 15 |
+
1750889799.8215308,13,17.82415199279785
|
| 16 |
+
1750894456.987202,14,17.60459327697754
|
| 17 |
+
1750899082.675283,15,17.409502029418945
|
| 18 |
+
1750903736.3783958,16,17.403959274291992
|
| 19 |
+
1750908342.7635908,17,17.337602615356445
|
| 20 |
+
1750913044.888625,18,17.170236587524414
|
| 21 |
+
1750917542.481596,19,17.201303482055664
|
| 22 |
+
1750922072.5566359,20,17.051054000854492
|
| 23 |
+
1750926571.3109682,21,17.04094886779785
|
| 24 |
+
1750931079.868824,22,17.091703414916992
|
| 25 |
+
1750935536.840482,23,16.971879959106445
|
| 26 |
+
1750939971.36584,24,16.901966094970703
|
| 27 |
+
1750944482.602058,25,17.002233505249023
|
| 28 |
+
1750948934.391398,26,16.905183792114258
|
| 29 |
+
1750953472.564333,27,16.886463165283203
|
| 30 |
+
1750958261.6062322,28,16.828359603881836
|
| 31 |
+
1750962989.888096,29,16.814006805419922
|
archive-misc/runs_jsons/perplexityval_epoch/!code-decoder-v31-mega-licensed-1_sequential_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750744114.998019,0,55.897640228271484
|
| 3 |
+
1750745515.701992,1,39.123687744140625
|
| 4 |
+
1750746911.698519,2,31.01366424560547
|
| 5 |
+
1750748303.867115,3,27.91205596923828
|
| 6 |
+
1750749682.481547,4,25.122596740722656
|
| 7 |
+
1750751071.476505,5,24.014915466308594
|
| 8 |
+
1750752447.519661,6,22.796173095703125
|
| 9 |
+
1750753816.043069,7,22.192251205444336
|
| 10 |
+
1750755208.793853,8,21.739532470703125
|
| 11 |
+
1750756580.028548,9,21.410900115966797
|
| 12 |
+
1750760976.772001,10,19.162677764892578
|
| 13 |
+
1750765539.2078211,11,18.53352928161621
|
| 14 |
+
1750770075.641408,12,18.13855743408203
|
| 15 |
+
1750774577.088748,13,17.958200454711914
|
| 16 |
+
1750779086.6892831,14,17.704954147338867
|
| 17 |
+
1750783996.8703659,15,17.57247543334961
|
| 18 |
+
1750789102.2079082,16,17.522357940673828
|
| 19 |
+
1750794584.888319,17,17.44750213623047
|
| 20 |
+
1750799274.570146,18,17.346946716308594
|
| 21 |
+
1750803944.5508559,19,17.277511596679688
|
| 22 |
+
1750808795.973829,20,17.262670516967773
|
| 23 |
+
1750813476.90166,21,17.25432586669922
|
| 24 |
+
1750818113.274487,22,17.158058166503906
|
| 25 |
+
1750822645.295224,23,17.09587860107422
|
| 26 |
+
1750827242.820804,24,17.148212432861328
|
| 27 |
+
1750831878.270625,25,17.039690017700195
|
| 28 |
+
1750836412.471421,26,16.9765625
|
| 29 |
+
1750840869.5558138,27,16.93719482421875
|
| 30 |
+
1750845298.1182668,28,16.99267578125
|
| 31 |
+
1750849718.624556,29,16.954301834106445
|
archive-misc/runs_jsons/topkacc_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum-loss_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750393244.648108,0,0.6343427300453186
|
| 3 |
+
1750395455.192731,1,0.7130727171897888
|
| 4 |
+
1750398368.115256,2,0.7620786428451538
|
| 5 |
+
1750401224.593179,3,0.7874769568443298
|
| 6 |
+
1750404823.312315,4,0.8027870059013367
|
| 7 |
+
1750408456.2592058,5,0.8109784722328186
|
| 8 |
+
1750412752.20855,6,0.8182910084724426
|
| 9 |
+
1750416996.041711,7,0.822501540184021
|
| 10 |
+
1750421987.8988092,8,0.8276702761650085
|
| 11 |
+
1750426988.913045,9,0.83023601770401
|
| 12 |
+
1750431261.339326,10,0.8304120898246765
|
| 13 |
+
1750435597.535208,11,0.8330893516540527
|
| 14 |
+
1750440150.407846,12,0.8346453905105591
|
| 15 |
+
1750444543.8491259,13,0.8356961011886597
|
| 16 |
+
1750448894.2592719,14,0.8365774750709534
|
| 17 |
+
1750453403.688287,15,0.8373432755470276
|
| 18 |
+
1750457802.088609,16,0.8378559350967407
|
| 19 |
+
1750462587.858082,17,0.8383844494819641
|
| 20 |
+
1750466837.8088279,18,0.8389201164245605
|
| 21 |
+
1750471047.370114,19,0.8392801880836487
|
| 22 |
+
1750475286.553427,20,0.8397110104560852
|
| 23 |
+
1750479461.077905,21,0.8400309085845947
|
| 24 |
+
1750483616.5842738,22,0.8404346108436584
|
| 25 |
+
1750487760.517996,23,0.8406738638877869
|
| 26 |
+
1750491885.5377789,24,0.8409435749053955
|
| 27 |
+
1750496002.1334882,25,0.8411745429039001
|
| 28 |
+
1750500111.20512,26,0.8413821458816528
|
| 29 |
+
1750504211.5523782,27,0.8415029644966125
|
| 30 |
+
1750508312.863772,28,0.8417081236839294
|
| 31 |
+
1750512402.29321,29,0.8419042229652405
|
archive-misc/runs_jsons/topkacc_epoch/!code-decoder-v31-mega-licensed-1_anticurriculum_tensorboard.csv
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wall time,Step,Value
|
| 2 |
+
1750242880.745949,0,0.5624285340309143
|
| 3 |
+
1750244191.138502,1,0.6705055832862854
|
| 4 |
+
1750246227.2991302,2,0.7446311116218567
|
| 5 |
+
1750248253.9189382,3,0.7735044956207275
|
| 6 |
+
1750251015.663837,4,0.7975758910179138
|
| 7 |
+
1750253768.738128,5,0.807144045829773
|
| 8 |
+
1750257214.117735,6,0.81730055809021
|
| 9 |
+
1750260713.140451,7,0.8217986822128296
|
| 10 |
+
1750265084.791811,8,0.8293147683143616
|
| 11 |
+
1750269591.6346471,9,0.8318940997123718
|
| 12 |
+
1750273913.096639,10,0.831876814365387
|
| 13 |
+
1750278210.5513558,11,0.8344914317131042
|
| 14 |
+
1750282506.16336,12,0.8358903527259827
|
| 15 |
+
1750286970.4432058,13,0.8368845582008362
|
| 16 |
+
1750291458.6969469,14,0.8377507328987122
|
| 17 |
+
1750295931.374558,15,0.8383060097694397
|
| 18 |
+
1750300503.985525,16,0.8389614224433899
|
| 19 |
+
1750304961.6577952,17,0.8394600749015808
|
| 20 |
+
1750309383.141361,18,0.8399106860160828
|
| 21 |
+
1750313785.064845,19,0.8403005003929138
|
| 22 |
+
1750318127.0642161,20,0.8405484557151794
|
| 23 |
+
1750322430.646074,21,0.84091717004776
|
| 24 |
+
1750326698.790686,22,0.8411679267883301
|
| 25 |
+
1750330951.355459,23,0.8414445519447327
|
| 26 |
+
1750335187.6555252,24,0.8416553139686584
|
| 27 |
+
1750339399.5665321,25,0.8419271111488342
|
| 28 |
+
1750343602.153534,26,0.8420628309249878
|
| 29 |
+
1750381999.790354,27,0.8403863310813904
|
| 30 |
+
1750386648.843095,28,0.8411089181900024
|
| 31 |
+
1750391063.299506,29,0.8420304656028748
|