Instructions to use kevinpro/MetaMathOctopus-7B with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use kevinpro/MetaMathOctopus-7B with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="kevinpro/MetaMathOctopus-7B")# Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("kevinpro/MetaMathOctopus-7B") model = AutoModelForCausalLM.from_pretrained("kevinpro/MetaMathOctopus-7B") - Notebooks
- Google Colab
- Kaggle
- Local Apps
- vLLM
How to use kevinpro/MetaMathOctopus-7B with vLLM:
Install from pip and serve model
# Install vLLM from pip: pip install vllm # Start the vLLM server: vllm serve "kevinpro/MetaMathOctopus-7B" # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:8000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "kevinpro/MetaMathOctopus-7B", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker
docker model run hf.co/kevinpro/MetaMathOctopus-7B
- SGLang
How to use kevinpro/MetaMathOctopus-7B with SGLang:
Install from pip and serve model
# Install SGLang from pip: pip install sglang # Start the SGLang server: python3 -m sglang.launch_server \ --model-path "kevinpro/MetaMathOctopus-7B" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "kevinpro/MetaMathOctopus-7B", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker images
docker run --gpus all \ --shm-size 32g \ -p 30000:30000 \ -v ~/.cache/huggingface:/root/.cache/huggingface \ --env "HF_TOKEN=<secret>" \ --ipc=host \ lmsysorg/sglang:latest \ python3 -m sglang.launch_server \ --model-path "kevinpro/MetaMathOctopus-7B" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "kevinpro/MetaMathOctopus-7B", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }' - Docker Model Runner
How to use kevinpro/MetaMathOctopus-7B with Docker Model Runner:
docker model run hf.co/kevinpro/MetaMathOctopus-7B
| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9997951239500102, | |
| "eval_steps": 500, | |
| "global_step": 3660, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.545454545454546e-08, | |
| "loss": 0.2252, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 9.090909090909091e-08, | |
| "loss": 0.1907, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.3636363636363637e-07, | |
| "loss": 0.1524, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 1.8181818181818183e-07, | |
| "loss": 0.153, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.2727272727272729e-07, | |
| "loss": 0.1352, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 2.7272727272727274e-07, | |
| "loss": 0.1773, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.181818181818182e-07, | |
| "loss": 0.13, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 3.6363636363636366e-07, | |
| "loss": 0.1289, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.090909090909091e-07, | |
| "loss": 0.1203, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.5454545454545457e-07, | |
| "loss": 0.1379, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.1351, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5.454545454545455e-07, | |
| "loss": 0.1433, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5.90909090909091e-07, | |
| "loss": 0.1517, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 6.363636363636364e-07, | |
| "loss": 0.1494, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 6.818181818181818e-07, | |
| "loss": 0.131, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 7.272727272727273e-07, | |
| "loss": 0.1199, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 7.727272727272727e-07, | |
| "loss": 0.1618, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 8.181818181818182e-07, | |
| "loss": 0.1546, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 8.636363636363637e-07, | |
| "loss": 0.177, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.1109, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 9.545454545454548e-07, | |
| "loss": 0.121, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.1073, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.0454545454545456e-06, | |
| "loss": 0.1164, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.090909090909091e-06, | |
| "loss": 0.1049, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.1363636363636364e-06, | |
| "loss": 0.1206, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.181818181818182e-06, | |
| "loss": 0.1182, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.2272727272727274e-06, | |
| "loss": 0.1643, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.2727272727272728e-06, | |
| "loss": 0.1036, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.3181818181818182e-06, | |
| "loss": 0.1423, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.3636363636363636e-06, | |
| "loss": 0.1191, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.409090909090909e-06, | |
| "loss": 0.1062, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.4545454545454546e-06, | |
| "loss": 0.1245, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1187, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.5454545454545454e-06, | |
| "loss": 0.1194, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.590909090909091e-06, | |
| "loss": 0.1112, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.6363636363636365e-06, | |
| "loss": 0.1227, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.6818181818181819e-06, | |
| "loss": 0.126, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.7272727272727275e-06, | |
| "loss": 0.118, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.7727272727272729e-06, | |
| "loss": 0.1086, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.1217, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.863636363636364e-06, | |
| "loss": 0.1152, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.9090909090909095e-06, | |
| "loss": 0.1321, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 1.954545454545455e-06, | |
| "loss": 0.1188, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.1198, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0454545454545457e-06, | |
| "loss": 0.129, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.090909090909091e-06, | |
| "loss": 0.1448, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.1363636363636365e-06, | |
| "loss": 0.1137, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.181818181818182e-06, | |
| "loss": 0.1178, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.2272727272727274e-06, | |
| "loss": 0.1101, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.2727272727272728e-06, | |
| "loss": 0.1099, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.318181818181818e-06, | |
| "loss": 0.1629, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.363636363636364e-06, | |
| "loss": 0.0881, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.4090909090909094e-06, | |
| "loss": 0.1063, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.454545454545455e-06, | |
| "loss": 0.1441, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.1068, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.5454545454545456e-06, | |
| "loss": 0.1715, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.590909090909091e-06, | |
| "loss": 0.1086, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.6363636363636364e-06, | |
| "loss": 0.1168, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.6818181818181822e-06, | |
| "loss": 0.0977, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.1334, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.772727272727273e-06, | |
| "loss": 0.1236, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.818181818181818e-06, | |
| "loss": 0.1362, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.863636363636364e-06, | |
| "loss": 0.1068, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.9090909090909093e-06, | |
| "loss": 0.0958, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 2.954545454545455e-06, | |
| "loss": 0.1218, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3e-06, | |
| "loss": 0.1427, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.045454545454546e-06, | |
| "loss": 0.1211, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.090909090909091e-06, | |
| "loss": 0.1288, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.1363636363636367e-06, | |
| "loss": 0.127, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.181818181818182e-06, | |
| "loss": 0.1224, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.227272727272728e-06, | |
| "loss": 0.122, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.272727272727273e-06, | |
| "loss": 0.1165, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.3181818181818188e-06, | |
| "loss": 0.1229, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.3636363636363637e-06, | |
| "loss": 0.0932, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.409090909090909e-06, | |
| "loss": 0.1259, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.454545454545455e-06, | |
| "loss": 0.1216, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.1035, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.5454545454545458e-06, | |
| "loss": 0.1242, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.590909090909091e-06, | |
| "loss": 0.1384, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.1474, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.681818181818182e-06, | |
| "loss": 0.0985, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.727272727272728e-06, | |
| "loss": 0.1536, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.772727272727273e-06, | |
| "loss": 0.1166, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.818181818181819e-06, | |
| "loss": 0.1153, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.863636363636364e-06, | |
| "loss": 0.1065, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.90909090909091e-06, | |
| "loss": 0.1319, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.954545454545454e-06, | |
| "loss": 0.1217, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.1, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.045454545454546e-06, | |
| "loss": 0.0876, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.0909090909090915e-06, | |
| "loss": 0.1251, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.136363636363637e-06, | |
| "loss": 0.1044, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.181818181818182e-06, | |
| "loss": 0.0912, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.227272727272728e-06, | |
| "loss": 0.1118, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.272727272727273e-06, | |
| "loss": 0.0992, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.3181818181818185e-06, | |
| "loss": 0.1134, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.363636363636364e-06, | |
| "loss": 0.1105, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.409090909090909e-06, | |
| "loss": 0.1113, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.454545454545455e-06, | |
| "loss": 0.1341, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.1159, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.1014, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.590909090909092e-06, | |
| "loss": 0.1018, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.636363636363636e-06, | |
| "loss": 0.1135, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.681818181818183e-06, | |
| "loss": 0.1102, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.727272727272728e-06, | |
| "loss": 0.0976, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.772727272727273e-06, | |
| "loss": 0.1276, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.818181818181819e-06, | |
| "loss": 0.1053, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.863636363636364e-06, | |
| "loss": 0.1151, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.90909090909091e-06, | |
| "loss": 0.1026, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.954545454545455e-06, | |
| "loss": 0.1383, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5e-06, | |
| "loss": 0.1121, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999999021066877e-06, | |
| "loss": 0.1399, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999996084268271e-06, | |
| "loss": 0.1007, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999991189606484e-06, | |
| "loss": 0.097, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9999843370853485e-06, | |
| "loss": 0.1173, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.99997552671023e-06, | |
| "loss": 0.1018, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999964758488031e-06, | |
| "loss": 0.1211, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999952032427183e-06, | |
| "loss": 0.0957, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999937348537652e-06, | |
| "loss": 0.0992, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999920706830939e-06, | |
| "loss": 0.1016, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9999021073200754e-06, | |
| "loss": 0.1238, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999881550019628e-06, | |
| "loss": 0.1168, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999859034945696e-06, | |
| "loss": 0.1057, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999834562115912e-06, | |
| "loss": 0.1131, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999808131549443e-06, | |
| "loss": 0.1323, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9997797432669855e-06, | |
| "loss": 0.1079, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999749397290774e-06, | |
| "loss": 0.0935, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.999717093644572e-06, | |
| "loss": 0.0909, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 4.9996828323536805e-06, | |
| "loss": 0.1146, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9996466134449286e-06, | |
| "loss": 0.124, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999608436946682e-06, | |
| "loss": 0.113, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999568302888839e-06, | |
| "loss": 0.136, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999526211302828e-06, | |
| "loss": 0.1092, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999482162221617e-06, | |
| "loss": 0.1103, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9994361556797e-06, | |
| "loss": 0.0976, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9993881917131086e-06, | |
| "loss": 0.1015, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999338270359403e-06, | |
| "loss": 0.1112, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9992863916576804e-06, | |
| "loss": 0.1057, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9992325556485705e-06, | |
| "loss": 0.1073, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999176762374234e-06, | |
| "loss": 0.1582, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999119011878364e-06, | |
| "loss": 0.0982, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.999059304206189e-06, | |
| "loss": 0.1067, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998997639404468e-06, | |
| "loss": 0.12, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998934017521495e-06, | |
| "loss": 0.1189, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998868438607094e-06, | |
| "loss": 0.1048, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998800902712623e-06, | |
| "loss": 0.1011, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998731409890972e-06, | |
| "loss": 0.1225, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9986599601965656e-06, | |
| "loss": 0.1147, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998586553685359e-06, | |
| "loss": 0.1104, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9985111904148384e-06, | |
| "loss": 0.098, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998433870444026e-06, | |
| "loss": 0.1045, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9983545938334756e-06, | |
| "loss": 0.1044, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.99827336064527e-06, | |
| "loss": 0.1271, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998190170943028e-06, | |
| "loss": 0.0903, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998105024791898e-06, | |
| "loss": 0.1312, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.998017922258564e-06, | |
| "loss": 0.1015, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.99792886341124e-06, | |
| "loss": 0.1003, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.99783784831967e-06, | |
| "loss": 0.1076, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.997744877055134e-06, | |
| "loss": 0.107, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.997649949690442e-06, | |
| "loss": 0.0996, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.997553066299934e-06, | |
| "loss": 0.1107, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9974542269594865e-06, | |
| "loss": 0.1183, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.997353431746504e-06, | |
| "loss": 0.0917, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.997250680739923e-06, | |
| "loss": 0.1049, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.9971459740202145e-06, | |
| "loss": 0.1156, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9970393116693785e-06, | |
| "loss": 0.1187, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996930693770947e-06, | |
| "loss": 0.1258, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996820120409983e-06, | |
| "loss": 0.1242, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996707591673084e-06, | |
| "loss": 0.1233, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9965931076483735e-06, | |
| "loss": 0.122, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.99647666842551e-06, | |
| "loss": 0.1191, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996358274095684e-06, | |
| "loss": 0.1057, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996237924751616e-06, | |
| "loss": 0.1347, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.996115620487554e-06, | |
| "loss": 0.1271, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9959913613992824e-06, | |
| "loss": 0.1199, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9958651475841145e-06, | |
| "loss": 0.1241, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.995736979140894e-06, | |
| "loss": 0.1105, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.995606856169995e-06, | |
| "loss": 0.0962, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.995474778773322e-06, | |
| "loss": 0.1343, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9953407470543126e-06, | |
| "loss": 0.1089, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.995204761117933e-06, | |
| "loss": 0.1176, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9950668210706795e-06, | |
| "loss": 0.1005, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.994926927020579e-06, | |
| "loss": 0.1115, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.994785079077192e-06, | |
| "loss": 0.0989, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.994641277351601e-06, | |
| "loss": 0.0739, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9944955219564285e-06, | |
| "loss": 0.1092, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9943478130058194e-06, | |
| "loss": 0.1279, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.994198150615454e-06, | |
| "loss": 0.1004, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.994046534902537e-06, | |
| "loss": 0.0962, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.993892965985808e-06, | |
| "loss": 0.1083, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9937374439855336e-06, | |
| "loss": 0.094, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.993579969023509e-06, | |
| "loss": 0.1045, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.99342054122306e-06, | |
| "loss": 0.136, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.9932591607090456e-06, | |
| "loss": 0.1048, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.993095827607846e-06, | |
| "loss": 0.1106, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.992930542047377e-06, | |
| "loss": 0.1058, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.992763304157081e-06, | |
| "loss": 0.0895, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.99259411406793e-06, | |
| "loss": 0.1029, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.992422971912425e-06, | |
| "loss": 0.1081, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.992249877824594e-06, | |
| "loss": 0.1217, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.992074831939997e-06, | |
| "loss": 0.1402, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 4.991897834395718e-06, | |
| "loss": 0.1022, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.991718885330375e-06, | |
| "loss": 0.1274, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9915379848841086e-06, | |
| "loss": 0.1354, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.991355133198592e-06, | |
| "loss": 0.1096, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.991170330417024e-06, | |
| "loss": 0.1334, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990983576684133e-06, | |
| "loss": 0.1234, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990794872146175e-06, | |
| "loss": 0.105, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990604216950932e-06, | |
| "loss": 0.1268, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990411611247714e-06, | |
| "loss": 0.1184, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990217055187363e-06, | |
| "loss": 0.0951, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.990020548922241e-06, | |
| "loss": 0.1081, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9898220926062445e-06, | |
| "loss": 0.1154, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.989621686394792e-06, | |
| "loss": 0.1125, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.989419330444831e-06, | |
| "loss": 0.093, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.989215024914836e-06, | |
| "loss": 0.1146, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.989008769964809e-06, | |
| "loss": 0.1185, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9888005657562766e-06, | |
| "loss": 0.114, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.988590412452293e-06, | |
| "loss": 0.0982, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.988378310217441e-06, | |
| "loss": 0.0856, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.988164259217827e-06, | |
| "loss": 0.1101, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.987948259621083e-06, | |
| "loss": 0.1235, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.987730311596369e-06, | |
| "loss": 0.1175, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.987510415314371e-06, | |
| "loss": 0.0984, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.987288570947298e-06, | |
| "loss": 0.1035, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.987064778668888e-06, | |
| "loss": 0.128, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.986839038654406e-06, | |
| "loss": 0.1237, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.986611351080633e-06, | |
| "loss": 0.0991, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.986381716125887e-06, | |
| "loss": 0.1062, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.986150133970004e-06, | |
| "loss": 0.0934, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.985916604794348e-06, | |
| "loss": 0.1157, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.985681128781804e-06, | |
| "loss": 0.092, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.985443706116787e-06, | |
| "loss": 0.1161, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.985204336985232e-06, | |
| "loss": 0.1143, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.9849630215746e-06, | |
| "loss": 0.0983, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.984719760073877e-06, | |
| "loss": 0.1122, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.984474552673573e-06, | |
| "loss": 0.1035, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 4.984227399565719e-06, | |
| "loss": 0.1136, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.983978300943876e-06, | |
| "loss": 0.1252, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.98372725700312e-06, | |
| "loss": 0.1366, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.983474267940059e-06, | |
| "loss": 0.1075, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.983219333952819e-06, | |
| "loss": 0.1007, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.98296245524105e-06, | |
| "loss": 0.1037, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.982703632005926e-06, | |
| "loss": 0.1255, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.982442864450145e-06, | |
| "loss": 0.1031, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.982180152777925e-06, | |
| "loss": 0.1414, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.981915497195007e-06, | |
| "loss": 0.1166, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.981648897908656e-06, | |
| "loss": 0.0959, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.981380355127658e-06, | |
| "loss": 0.1237, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.981109869062323e-06, | |
| "loss": 0.0893, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.980837439924479e-06, | |
| "loss": 0.0962, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.980563067927478e-06, | |
| "loss": 0.1064, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.980286753286196e-06, | |
| "loss": 0.1223, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9800084962170235e-06, | |
| "loss": 0.1097, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.97972829693788e-06, | |
| "loss": 0.124, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.979446155668202e-06, | |
| "loss": 0.134, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.979162072628947e-06, | |
| "loss": 0.1425, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.978876048042593e-06, | |
| "loss": 0.1046, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9785880821331415e-06, | |
| "loss": 0.1074, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.97829817512611e-06, | |
| "loss": 0.143, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.978006327248537e-06, | |
| "loss": 0.1269, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.977712538728985e-06, | |
| "loss": 0.0954, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.977416809797531e-06, | |
| "loss": 0.0877, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.977119140685775e-06, | |
| "loss": 0.1366, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.976819531626838e-06, | |
| "loss": 0.0985, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.976517982855353e-06, | |
| "loss": 0.1079, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.976214494607481e-06, | |
| "loss": 0.1226, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.975909067120895e-06, | |
| "loss": 0.1224, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.975601700634791e-06, | |
| "loss": 0.1035, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9752923953898826e-06, | |
| "loss": 0.1174, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.974981151628398e-06, | |
| "loss": 0.1187, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.9746679695940904e-06, | |
| "loss": 0.0918, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.974352849532226e-06, | |
| "loss": 0.1126, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.974035791689588e-06, | |
| "loss": 0.1013, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 4.973716796314482e-06, | |
| "loss": 0.1011, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.973395863656726e-06, | |
| "loss": 0.1181, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.973072993967658e-06, | |
| "loss": 0.1219, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9727481875001326e-06, | |
| "loss": 0.1058, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.972421444508521e-06, | |
| "loss": 0.0898, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.972092765248709e-06, | |
| "loss": 0.135, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.971762149978103e-06, | |
| "loss": 0.118, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.971429598955623e-06, | |
| "loss": 0.106, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.971095112441703e-06, | |
| "loss": 0.1127, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.970758690698297e-06, | |
| "loss": 0.1077, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.970420333988873e-06, | |
| "loss": 0.1083, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.97008004257841e-06, | |
| "loss": 0.1172, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.969737816733411e-06, | |
| "loss": 0.0997, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.969393656721886e-06, | |
| "loss": 0.1076, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.969047562813363e-06, | |
| "loss": 0.1303, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.968699535278886e-06, | |
| "loss": 0.0969, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.968349574391009e-06, | |
| "loss": 0.1082, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.967997680423804e-06, | |
| "loss": 0.097, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.967643853652856e-06, | |
| "loss": 0.1287, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.967288094355262e-06, | |
| "loss": 0.1023, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9669304028096345e-06, | |
| "loss": 0.1069, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.966570779296098e-06, | |
| "loss": 0.1031, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.96620922409629e-06, | |
| "loss": 0.1261, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.965845737493363e-06, | |
| "loss": 0.1326, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.965480319771978e-06, | |
| "loss": 0.0959, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9651129712183116e-06, | |
| "loss": 0.0944, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9647436921200514e-06, | |
| "loss": 0.1297, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.964372482766397e-06, | |
| "loss": 0.1335, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.96399934344806e-06, | |
| "loss": 0.1158, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.963624274457264e-06, | |
| "loss": 0.114, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.963247276087742e-06, | |
| "loss": 0.1129, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.962868348634737e-06, | |
| "loss": 0.0995, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.962487492395008e-06, | |
| "loss": 0.0995, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.96210470766682e-06, | |
| "loss": 0.096, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.961719994749948e-06, | |
| "loss": 0.0992, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.9613333539456805e-06, | |
| "loss": 0.1059, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.960944785556814e-06, | |
| "loss": 0.1192, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 4.960554289887653e-06, | |
| "loss": 0.126, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.960161867244015e-06, | |
| "loss": 0.1059, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.959767517933222e-06, | |
| "loss": 0.09, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.959371242264109e-06, | |
| "loss": 0.101, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.958973040547016e-06, | |
| "loss": 0.1057, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.958572913093795e-06, | |
| "loss": 0.1063, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.958170860217804e-06, | |
| "loss": 0.1024, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.95776688223391e-06, | |
| "loss": 0.0998, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9573609794584856e-06, | |
| "loss": 0.0864, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.956953152209412e-06, | |
| "loss": 0.1063, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9565434008060795e-06, | |
| "loss": 0.1108, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.956131725569382e-06, | |
| "loss": 0.1233, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9557181268217225e-06, | |
| "loss": 0.0997, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.955302604887008e-06, | |
| "loss": 0.1027, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.954885160090653e-06, | |
| "loss": 0.1098, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.95446579275958e-06, | |
| "loss": 0.0987, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.954044503222214e-06, | |
| "loss": 0.1041, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.953621291808486e-06, | |
| "loss": 0.0998, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9531961588498325e-06, | |
| "loss": 0.0975, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.952769104679195e-06, | |
| "loss": 0.1058, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.95234012963102e-06, | |
| "loss": 0.093, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9519092340412575e-06, | |
| "loss": 0.106, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.951476418247362e-06, | |
| "loss": 0.1187, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.951041682588291e-06, | |
| "loss": 0.1201, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9506050274045076e-06, | |
| "loss": 0.1153, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.950166453037976e-06, | |
| "loss": 0.0906, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9497259598321625e-06, | |
| "loss": 0.1184, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.949283548132041e-06, | |
| "loss": 0.1113, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.948839218284082e-06, | |
| "loss": 0.1007, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.948392970636262e-06, | |
| "loss": 0.1071, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.947944805538059e-06, | |
| "loss": 0.1054, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.947494723340451e-06, | |
| "loss": 0.0957, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.947042724395918e-06, | |
| "loss": 0.1145, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.9465888090584425e-06, | |
| "loss": 0.0977, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.946132977683505e-06, | |
| "loss": 0.1105, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.945675230628089e-06, | |
| "loss": 0.114, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 4.94521556825068e-06, | |
| "loss": 0.1132, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.944753990911257e-06, | |
| "loss": 0.1051, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.944290498971305e-06, | |
| "loss": 0.1078, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.943825092793806e-06, | |
| "loss": 0.1112, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.94335777274324e-06, | |
| "loss": 0.1276, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.942888539185587e-06, | |
| "loss": 0.0904, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.942417392488329e-06, | |
| "loss": 0.1039, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.941944333020438e-06, | |
| "loss": 0.0937, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.941469361152392e-06, | |
| "loss": 0.1137, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.940992477256163e-06, | |
| "loss": 0.1002, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9405136817052205e-06, | |
| "loss": 0.1082, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9400329748745316e-06, | |
| "loss": 0.1089, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.93955035714056e-06, | |
| "loss": 0.0993, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9390658288812675e-06, | |
| "loss": 0.1185, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.938579390476109e-06, | |
| "loss": 0.0882, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.938091042306038e-06, | |
| "loss": 0.1108, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.937600784753502e-06, | |
| "loss": 0.103, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9371086182024445e-06, | |
| "loss": 0.1005, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.936614543038305e-06, | |
| "loss": 0.0999, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.936118559648015e-06, | |
| "loss": 0.1115, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9356206684200045e-06, | |
| "loss": 0.1151, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.935120869744194e-06, | |
| "loss": 0.1133, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.934619164011998e-06, | |
| "loss": 0.124, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9341155516163275e-06, | |
| "loss": 0.0915, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.9336100329515835e-06, | |
| "loss": 0.0833, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.933102608413662e-06, | |
| "loss": 0.1092, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.93259327839995e-06, | |
| "loss": 0.1012, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.932082043309328e-06, | |
| "loss": 0.0995, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.931568903542168e-06, | |
| "loss": 0.1223, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.931053859500334e-06, | |
| "loss": 0.118, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.930536911587179e-06, | |
| "loss": 0.1107, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.930018060207551e-06, | |
| "loss": 0.101, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.929497305767786e-06, | |
| "loss": 0.1065, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.928974648675711e-06, | |
| "loss": 0.1058, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.928450089340644e-06, | |
| "loss": 0.0815, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.92792362817339e-06, | |
| "loss": 0.0988, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.927395265586246e-06, | |
| "loss": 0.099, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 4.926865001992998e-06, | |
| "loss": 0.1202, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.926332837808918e-06, | |
| "loss": 0.1242, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9257987734507715e-06, | |
| "loss": 0.115, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.925262809336808e-06, | |
| "loss": 0.108, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9247249458867645e-06, | |
| "loss": 0.1055, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.924185183521868e-06, | |
| "loss": 0.1109, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.923643522664833e-06, | |
| "loss": 0.1153, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.923099963739856e-06, | |
| "loss": 0.1139, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.922554507172626e-06, | |
| "loss": 0.0926, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.922007153390313e-06, | |
| "loss": 0.1121, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.921457902821578e-06, | |
| "loss": 0.1063, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.920906755896563e-06, | |
| "loss": 0.0984, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.920353713046897e-06, | |
| "loss": 0.1025, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.919798774705694e-06, | |
| "loss": 0.1039, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.919241941307551e-06, | |
| "loss": 0.0857, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.918683213288551e-06, | |
| "loss": 0.0891, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.91812259108626e-06, | |
| "loss": 0.0983, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.917560075139727e-06, | |
| "loss": 0.1118, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.916995665889485e-06, | |
| "loss": 0.1183, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.916429363777547e-06, | |
| "loss": 0.0932, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.915861169247413e-06, | |
| "loss": 0.111, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9152910827440615e-06, | |
| "loss": 0.1157, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9147191047139535e-06, | |
| "loss": 0.1432, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.914145235605032e-06, | |
| "loss": 0.0997, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.913569475866721e-06, | |
| "loss": 0.097, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.912991825949923e-06, | |
| "loss": 0.1244, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.912412286307025e-06, | |
| "loss": 0.1182, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9118308573918905e-06, | |
| "loss": 0.0961, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.911247539659863e-06, | |
| "loss": 0.1003, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.910662333567765e-06, | |
| "loss": 0.1082, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.9100752395739e-06, | |
| "loss": 0.1156, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.909486258138048e-06, | |
| "loss": 0.1217, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.908895389721469e-06, | |
| "loss": 0.1192, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.908302634786897e-06, | |
| "loss": 0.1064, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.907707993798548e-06, | |
| "loss": 0.0993, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.907111467222112e-06, | |
| "loss": 0.0987, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 4.906513055524757e-06, | |
| "loss": 0.1191, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9059127591751284e-06, | |
| "loss": 0.0994, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.905310578643344e-06, | |
| "loss": 0.1019, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.904706514401001e-06, | |
| "loss": 0.0972, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9041005669211686e-06, | |
| "loss": 0.1035, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.903492736678393e-06, | |
| "loss": 0.1121, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.902883024148696e-06, | |
| "loss": 0.0996, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.90227142980957e-06, | |
| "loss": 0.1062, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.901657954139983e-06, | |
| "loss": 0.0939, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.9010425976203775e-06, | |
| "loss": 0.1002, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.900425360732667e-06, | |
| "loss": 0.1203, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.899806243960238e-06, | |
| "loss": 0.1098, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.899185247787951e-06, | |
| "loss": 0.0928, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.898562372702135e-06, | |
| "loss": 0.0956, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.897937619190594e-06, | |
| "loss": 0.096, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8973109877426e-06, | |
| "loss": 0.1427, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8966824788489e-06, | |
| "loss": 0.1119, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.896052093001706e-06, | |
| "loss": 0.1054, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.895419830694704e-06, | |
| "loss": 0.1093, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.894785692423047e-06, | |
| "loss": 0.0936, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8941496786833585e-06, | |
| "loss": 0.0987, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.89351178997373e-06, | |
| "loss": 0.0949, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8928720267937225e-06, | |
| "loss": 0.0962, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.892230389644364e-06, | |
| "loss": 0.1345, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.89158687902815e-06, | |
| "loss": 0.1156, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.890941495449045e-06, | |
| "loss": 0.1013, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.890294239412477e-06, | |
| "loss": 0.102, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.889645111425343e-06, | |
| "loss": 0.0979, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.888994111996007e-06, | |
| "loss": 0.0989, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.888341241634294e-06, | |
| "loss": 0.0992, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.887686500851499e-06, | |
| "loss": 0.1097, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8870298901603795e-06, | |
| "loss": 0.1085, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.886371410075158e-06, | |
| "loss": 0.1007, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.885711061111521e-06, | |
| "loss": 0.114, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.8850488437866175e-06, | |
| "loss": 0.1085, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.884384758619062e-06, | |
| "loss": 0.1027, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.88371880612893e-06, | |
| "loss": 0.0964, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 4.883050986837759e-06, | |
| "loss": 0.0899, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.882381301268551e-06, | |
| "loss": 0.129, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.881709749945766e-06, | |
| "loss": 0.105, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.881036333395329e-06, | |
| "loss": 0.1003, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.880361052144622e-06, | |
| "loss": 0.1131, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.879683906722491e-06, | |
| "loss": 0.1071, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.879004897659239e-06, | |
| "loss": 0.0948, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.878324025486629e-06, | |
| "loss": 0.1148, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8776412907378845e-06, | |
| "loss": 0.1119, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.876956693947686e-06, | |
| "loss": 0.1211, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8762702356521755e-06, | |
| "loss": 0.0954, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.875581916388947e-06, | |
| "loss": 0.1008, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8748917366970575e-06, | |
| "loss": 0.1076, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.874199697117019e-06, | |
| "loss": 0.0911, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8735057981907985e-06, | |
| "loss": 0.0998, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.87281004046182e-06, | |
| "loss": 0.1144, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8721124244749675e-06, | |
| "loss": 0.1002, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.871412950776572e-06, | |
| "loss": 0.1144, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.870711619914427e-06, | |
| "loss": 0.0868, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.870008432437775e-06, | |
| "loss": 0.1093, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.869303388897316e-06, | |
| "loss": 0.1188, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.868596489845202e-06, | |
| "loss": 0.1044, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8678877358350395e-06, | |
| "loss": 0.1112, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.867177127421886e-06, | |
| "loss": 0.1086, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8664646651622525e-06, | |
| "loss": 0.0904, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.8657503496141e-06, | |
| "loss": 0.1047, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.865034181336843e-06, | |
| "loss": 0.1131, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.864316160891347e-06, | |
| "loss": 0.1407, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.863596288839926e-06, | |
| "loss": 0.1088, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.862874565746346e-06, | |
| "loss": 0.1049, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.862150992175821e-06, | |
| "loss": 0.1306, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.861425568695016e-06, | |
| "loss": 0.0958, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.860698295872044e-06, | |
| "loss": 0.0982, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.859969174276465e-06, | |
| "loss": 0.1214, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.85923820447929e-06, | |
| "loss": 0.105, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.858505387052974e-06, | |
| "loss": 0.0892, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.85777072257142e-06, | |
| "loss": 0.1155, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 4.857034211609979e-06, | |
| "loss": 0.1055, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.856295854745446e-06, | |
| "loss": 0.1002, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.855555652556063e-06, | |
| "loss": 0.0895, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.854813605621518e-06, | |
| "loss": 0.1209, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.85406971452294e-06, | |
| "loss": 0.1033, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.853323979842907e-06, | |
| "loss": 0.0869, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.852576402165436e-06, | |
| "loss": 0.1033, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.851826982075993e-06, | |
| "loss": 0.0974, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.851075720161482e-06, | |
| "loss": 0.1133, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.850322617010251e-06, | |
| "loss": 0.113, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.84956767321209e-06, | |
| "loss": 0.1071, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.848810889358232e-06, | |
| "loss": 0.1184, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.8480522660413476e-06, | |
| "loss": 0.1257, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.847291803855551e-06, | |
| "loss": 0.1171, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.846529503396395e-06, | |
| "loss": 0.1099, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.845765365260874e-06, | |
| "loss": 0.1146, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.844999390047419e-06, | |
| "loss": 0.0924, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.8442315783559005e-06, | |
| "loss": 0.1241, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.843461930787629e-06, | |
| "loss": 0.13, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.842690447945349e-06, | |
| "loss": 0.1058, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.841917130433246e-06, | |
| "loss": 0.1259, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.84114197885694e-06, | |
| "loss": 0.1271, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.84036499382349e-06, | |
| "loss": 0.1155, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.839586175941389e-06, | |
| "loss": 0.113, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.838805525820563e-06, | |
| "loss": 0.097, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.8380230440723775e-06, | |
| "loss": 0.0981, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.83723873130963e-06, | |
| "loss": 0.0944, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.836452588146552e-06, | |
| "loss": 0.0992, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.835664615198808e-06, | |
| "loss": 0.1074, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.834874813083499e-06, | |
| "loss": 0.0886, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.834083182419153e-06, | |
| "loss": 0.1144, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.8332897238257335e-06, | |
| "loss": 0.1138, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.8324944379246355e-06, | |
| "loss": 0.1607, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.831697325338684e-06, | |
| "loss": 0.1096, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.830898386692133e-06, | |
| "loss": 0.0851, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.830097622610672e-06, | |
| "loss": 0.1061, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 4.829295033721415e-06, | |
| "loss": 0.1118, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.8284906206529056e-06, | |
| "loss": 0.1245, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.827684384035119e-06, | |
| "loss": 0.1491, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.826876324499455e-06, | |
| "loss": 0.0939, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.826066442678744e-06, | |
| "loss": 0.1199, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.8252547392072415e-06, | |
| "loss": 0.0923, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.824441214720629e-06, | |
| "loss": 0.1148, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.823625869856017e-06, | |
| "loss": 0.0993, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.822808705251939e-06, | |
| "loss": 0.111, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.821989721548356e-06, | |
| "loss": 0.0954, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.82116891938665e-06, | |
| "loss": 0.1124, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.820346299409632e-06, | |
| "loss": 0.1079, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.819521862261532e-06, | |
| "loss": 0.0902, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.818695608588005e-06, | |
| "loss": 0.1062, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.8178675390361295e-06, | |
| "loss": 0.0997, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.817037654254405e-06, | |
| "loss": 0.1085, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.816205954892753e-06, | |
| "loss": 0.1055, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.815372441602516e-06, | |
| "loss": 0.1153, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.814537115036457e-06, | |
| "loss": 0.0964, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.813699975848758e-06, | |
| "loss": 0.128, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.812861024695024e-06, | |
| "loss": 0.1044, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.812020262232274e-06, | |
| "loss": 0.1158, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.81117768911895e-06, | |
| "loss": 0.0991, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.81033330601491e-06, | |
| "loss": 0.1084, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.809487113581428e-06, | |
| "loss": 0.1193, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.808639112481199e-06, | |
| "loss": 0.1135, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.807789303378331e-06, | |
| "loss": 0.0994, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.806937686938349e-06, | |
| "loss": 0.1081, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.806084263828193e-06, | |
| "loss": 0.0883, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.805229034716219e-06, | |
| "loss": 0.0899, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.804372000272196e-06, | |
| "loss": 0.091, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.803513161167309e-06, | |
| "loss": 0.1132, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.802652518074153e-06, | |
| "loss": 0.1029, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.801790071666739e-06, | |
| "loss": 0.1328, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.800925822620487e-06, | |
| "loss": 0.1239, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.800059771612234e-06, | |
| "loss": 0.1124, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.799191919320221e-06, | |
| "loss": 0.1071, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4.7983222664241055e-06, | |
| "loss": 0.0923, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.797450813604954e-06, | |
| "loss": 0.0989, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7965775615452395e-06, | |
| "loss": 0.0887, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.795702510928848e-06, | |
| "loss": 0.1056, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.794825662441072e-06, | |
| "loss": 0.1272, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7939470167686115e-06, | |
| "loss": 0.1219, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.793066574599576e-06, | |
| "loss": 0.1067, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7921843366234785e-06, | |
| "loss": 0.126, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.791300303531243e-06, | |
| "loss": 0.0923, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.790414476015196e-06, | |
| "loss": 0.0966, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.78952685476907e-06, | |
| "loss": 0.0984, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.788637440488003e-06, | |
| "loss": 0.1025, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.787746233868537e-06, | |
| "loss": 0.1022, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.786853235608616e-06, | |
| "loss": 0.1218, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7859584464075895e-06, | |
| "loss": 0.1093, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.785061866966208e-06, | |
| "loss": 0.1071, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.784163497986625e-06, | |
| "loss": 0.1036, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.783263340172394e-06, | |
| "loss": 0.099, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7823613942284716e-06, | |
| "loss": 0.1023, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.781457660861214e-06, | |
| "loss": 0.1333, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.780552140778376e-06, | |
| "loss": 0.0937, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.779644834689112e-06, | |
| "loss": 0.1041, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.778735743303976e-06, | |
| "loss": 0.0834, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.77782486733492e-06, | |
| "loss": 0.086, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.776912207495294e-06, | |
| "loss": 0.1234, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.775997764499843e-06, | |
| "loss": 0.0966, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.77508153906471e-06, | |
| "loss": 0.1166, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.774163531907436e-06, | |
| "loss": 0.0931, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.77324374374695e-06, | |
| "loss": 0.1138, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7723221753035865e-06, | |
| "loss": 0.1219, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.771398827299065e-06, | |
| "loss": 0.1186, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.770473700456505e-06, | |
| "loss": 0.1139, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.769546795500414e-06, | |
| "loss": 0.0975, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.768618113156695e-06, | |
| "loss": 0.1028, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.767687654152642e-06, | |
| "loss": 0.1232, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.766755419216942e-06, | |
| "loss": 0.114, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.7658214090796705e-06, | |
| "loss": 0.1064, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 4.764885624472295e-06, | |
| "loss": 0.091, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.763948066127671e-06, | |
| "loss": 0.1214, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.763008734780044e-06, | |
| "loss": 0.0991, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.762067631165049e-06, | |
| "loss": 0.1017, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.761124756019708e-06, | |
| "loss": 0.1182, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.76018011008243e-06, | |
| "loss": 0.0944, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.75923369409301e-06, | |
| "loss": 0.1083, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7582855087926325e-06, | |
| "loss": 0.0913, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.757335554923863e-06, | |
| "loss": 0.1163, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.756383833230658e-06, | |
| "loss": 0.0839, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.755430344458352e-06, | |
| "loss": 0.1231, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.754475089353667e-06, | |
| "loss": 0.1115, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7535180686647085e-06, | |
| "loss": 0.0934, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.752559283140964e-06, | |
| "loss": 0.1176, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.751598733533302e-06, | |
| "loss": 0.108, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7506364205939745e-06, | |
| "loss": 0.1061, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.749672345076613e-06, | |
| "loss": 0.0987, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.748706507736229e-06, | |
| "loss": 0.1191, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.747738909329216e-06, | |
| "loss": 0.104, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.746769550613346e-06, | |
| "loss": 0.124, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.745798432347767e-06, | |
| "loss": 0.0981, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.744825555293007e-06, | |
| "loss": 0.1002, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7438509202109725e-06, | |
| "loss": 0.0952, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7428745278649455e-06, | |
| "loss": 0.124, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7418963790195825e-06, | |
| "loss": 0.1173, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.74091647444092e-06, | |
| "loss": 0.103, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7399348148963646e-06, | |
| "loss": 0.1117, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7389514011547e-06, | |
| "loss": 0.1214, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7379662339860844e-06, | |
| "loss": 0.1067, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7369793141620466e-06, | |
| "loss": 0.1079, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.73599064245549e-06, | |
| "loss": 0.1227, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.735000219640689e-06, | |
| "loss": 0.108, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.734008046493291e-06, | |
| "loss": 0.1056, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.733014123790312e-06, | |
| "loss": 0.114, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.732018452310139e-06, | |
| "loss": 0.0962, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.7310210328325285e-06, | |
| "loss": 0.0965, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 4.730021866138607e-06, | |
| "loss": 0.1199, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.729020953010867e-06, | |
| "loss": 0.1046, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.728018294233171e-06, | |
| "loss": 0.0932, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.727013890590748e-06, | |
| "loss": 0.1022, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.726007742870192e-06, | |
| "loss": 0.0971, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.724999851859465e-06, | |
| "loss": 0.0865, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.723990218347893e-06, | |
| "loss": 0.0993, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.722978843126168e-06, | |
| "loss": 0.1153, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.721965726986344e-06, | |
| "loss": 0.1288, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.720950870721839e-06, | |
| "loss": 0.0949, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.719934275127435e-06, | |
| "loss": 0.1005, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.718915940999274e-06, | |
| "loss": 0.0932, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.717895869134862e-06, | |
| "loss": 0.097, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.716874060333064e-06, | |
| "loss": 0.1127, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.715850515394108e-06, | |
| "loss": 0.1008, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.714825235119576e-06, | |
| "loss": 0.0854, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.713798220312416e-06, | |
| "loss": 0.0941, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.712769471776929e-06, | |
| "loss": 0.1251, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.711738990318776e-06, | |
| "loss": 0.1158, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.710706776744976e-06, | |
| "loss": 0.1057, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.709672831863903e-06, | |
| "loss": 0.0988, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.708637156485287e-06, | |
| "loss": 0.0999, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.707599751420214e-06, | |
| "loss": 0.1087, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.706560617481124e-06, | |
| "loss": 0.1072, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.705519755481811e-06, | |
| "loss": 0.102, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.7044771662374225e-06, | |
| "loss": 0.0946, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.703432850564458e-06, | |
| "loss": 0.1118, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.70238680928077e-06, | |
| "loss": 0.0916, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.701339043205563e-06, | |
| "loss": 0.1172, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.700289553159391e-06, | |
| "loss": 0.1055, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.6992383399641565e-06, | |
| "loss": 0.0965, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.698185404443116e-06, | |
| "loss": 0.1122, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.697130747420871e-06, | |
| "loss": 0.1226, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.696074369723373e-06, | |
| "loss": 0.1103, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.695016272177921e-06, | |
| "loss": 0.1008, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.693956455613159e-06, | |
| "loss": 0.1086, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.692894920859079e-06, | |
| "loss": 0.1036, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 4.691831668747018e-06, | |
| "loss": 0.1158, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.690766700109659e-06, | |
| "loss": 0.0978, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.689700015781029e-06, | |
| "loss": 0.117, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6886316165964965e-06, | |
| "loss": 0.1127, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6875615033927756e-06, | |
| "loss": 0.1045, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.68648967700792e-06, | |
| "loss": 0.0919, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.685416138281329e-06, | |
| "loss": 0.1059, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.684340888053741e-06, | |
| "loss": 0.1065, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.683263927167232e-06, | |
| "loss": 0.1096, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.682185256465222e-06, | |
| "loss": 0.111, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.681104876792467e-06, | |
| "loss": 0.1261, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.680022788995065e-06, | |
| "loss": 0.1067, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.678938993920446e-06, | |
| "loss": 0.0861, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.677853492417382e-06, | |
| "loss": 0.1064, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.676766285335979e-06, | |
| "loss": 0.1167, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.67567737352768e-06, | |
| "loss": 0.1042, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.674586757845264e-06, | |
| "loss": 0.1014, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.67349443914284e-06, | |
| "loss": 0.1069, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.672400418275855e-06, | |
| "loss": 0.1142, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.671304696101087e-06, | |
| "loss": 0.0736, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.670207273476648e-06, | |
| "loss": 0.113, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.669108151261979e-06, | |
| "loss": 0.1174, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.668007330317856e-06, | |
| "loss": 0.1124, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.666904811506382e-06, | |
| "loss": 0.1099, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.66580059569099e-06, | |
| "loss": 0.1145, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.664694683736443e-06, | |
| "loss": 0.1475, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6635870765088345e-06, | |
| "loss": 0.0848, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.662477774875579e-06, | |
| "loss": 0.1034, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.661366779705425e-06, | |
| "loss": 0.1078, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.660254091868444e-06, | |
| "loss": 0.0925, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.659139712236033e-06, | |
| "loss": 0.1161, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.658023641680916e-06, | |
| "loss": 0.0876, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.6569058810771375e-06, | |
| "loss": 0.0867, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.65578643130007e-06, | |
| "loss": 0.1099, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.654665293226405e-06, | |
| "loss": 0.0874, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.653542467734159e-06, | |
| "loss": 0.1071, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.652417955702668e-06, | |
| "loss": 0.1303, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.65129175801259e-06, | |
| "loss": 0.1209, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.650163875545903e-06, | |
| "loss": 0.1121, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.649034309185904e-06, | |
| "loss": 0.0959, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.647903059817208e-06, | |
| "loss": 0.1195, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.646770128325751e-06, | |
| "loss": 0.1229, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.645635515598784e-06, | |
| "loss": 0.1127, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.644499222524873e-06, | |
| "loss": 0.1239, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.643361249993903e-06, | |
| "loss": 0.1158, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.642221598897074e-06, | |
| "loss": 0.1063, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6410802701269e-06, | |
| "loss": 0.1072, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.639937264577207e-06, | |
| "loss": 0.1117, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.638792583143136e-06, | |
| "loss": 0.1041, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.637646226721142e-06, | |
| "loss": 0.1101, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.636498196208989e-06, | |
| "loss": 0.1155, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.635348492505751e-06, | |
| "loss": 0.1101, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6341971165118185e-06, | |
| "loss": 0.1365, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6330440691288846e-06, | |
| "loss": 0.1245, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.631889351259955e-06, | |
| "loss": 0.1019, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.630732963809343e-06, | |
| "loss": 0.1083, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.62957490768267e-06, | |
| "loss": 0.1191, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.628415183786863e-06, | |
| "loss": 0.1246, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.627253793030155e-06, | |
| "loss": 0.1132, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.626090736322088e-06, | |
| "loss": 0.1028, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.624926014573502e-06, | |
| "loss": 0.1015, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.623759628696547e-06, | |
| "loss": 0.0882, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.622591579604674e-06, | |
| "loss": 0.094, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.6214218682126345e-06, | |
| "loss": 0.1, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.620250495436487e-06, | |
| "loss": 0.1081, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.619077462193586e-06, | |
| "loss": 0.0996, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.617902769402589e-06, | |
| "loss": 0.125, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.616726417983452e-06, | |
| "loss": 0.1122, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.615548408857431e-06, | |
| "loss": 0.1135, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.61436874294708e-06, | |
| "loss": 0.1106, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.613187421176249e-06, | |
| "loss": 0.1052, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.612004444470088e-06, | |
| "loss": 0.1095, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.610819813755038e-06, | |
| "loss": 0.1137, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.609633529958841e-06, | |
| "loss": 0.1151, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.608445594010531e-06, | |
| "loss": 0.1202, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.607256006840435e-06, | |
| "loss": 0.1233, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.606064769380174e-06, | |
| "loss": 0.0917, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.604871882562661e-06, | |
| "loss": 0.1231, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.603677347322103e-06, | |
| "loss": 0.1008, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.602481164593994e-06, | |
| "loss": 0.1126, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.601283335315121e-06, | |
| "loss": 0.1187, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.600083860423561e-06, | |
| "loss": 0.1033, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.598882740858677e-06, | |
| "loss": 0.113, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.597679977561122e-06, | |
| "loss": 0.1118, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.596475571472836e-06, | |
| "loss": 0.1001, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5952695235370455e-06, | |
| "loss": 0.093, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.594061834698263e-06, | |
| "loss": 0.1197, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5928525059022846e-06, | |
| "loss": 0.0989, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.591641538096193e-06, | |
| "loss": 0.1109, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.590428932228354e-06, | |
| "loss": 0.0981, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.589214689248414e-06, | |
| "loss": 0.1102, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5879988101073035e-06, | |
| "loss": 0.1083, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.586781295757235e-06, | |
| "loss": 0.0964, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5855621471517e-06, | |
| "loss": 0.1119, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5843413652454705e-06, | |
| "loss": 0.1111, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.583118950994598e-06, | |
| "loss": 0.0883, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.58189490535641e-06, | |
| "loss": 0.0961, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.580669229289516e-06, | |
| "loss": 0.096, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.579441923753799e-06, | |
| "loss": 0.1227, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5782129897104185e-06, | |
| "loss": 0.1078, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.576982428121811e-06, | |
| "loss": 0.0975, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5757502399516855e-06, | |
| "loss": 0.1171, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.574516426165026e-06, | |
| "loss": 0.1109, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5732809877280905e-06, | |
| "loss": 0.1275, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.572043925608406e-06, | |
| "loss": 0.1398, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.570805240774776e-06, | |
| "loss": 0.1183, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.569564934197272e-06, | |
| "loss": 0.1009, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.568323006847233e-06, | |
| "loss": 0.0945, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.5670794596972724e-06, | |
| "loss": 0.1098, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.565834293721269e-06, | |
| "loss": 0.1033, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.564587509894371e-06, | |
| "loss": 0.1344, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.563339109192992e-06, | |
| "loss": 0.109, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5620890925948125e-06, | |
| "loss": 0.0949, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.560837461078779e-06, | |
| "loss": 0.0958, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.559584215625102e-06, | |
| "loss": 0.0885, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.558329357215258e-06, | |
| "loss": 0.1107, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.557072886831982e-06, | |
| "loss": 0.1149, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.555814805459277e-06, | |
| "loss": 0.1079, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5545551140824044e-06, | |
| "loss": 0.0937, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.553293813687885e-06, | |
| "loss": 0.1281, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.552030905263504e-06, | |
| "loss": 0.112, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.550766389798304e-06, | |
| "loss": 0.0989, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.549500268282584e-06, | |
| "loss": 0.1119, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5482325417079045e-06, | |
| "loss": 0.1143, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.54696321106708e-06, | |
| "loss": 0.1174, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.545692277354182e-06, | |
| "loss": 0.099, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.544419741564539e-06, | |
| "loss": 0.116, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5431456046947335e-06, | |
| "loss": 0.1223, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.541869867742599e-06, | |
| "loss": 0.1119, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.540592531707225e-06, | |
| "loss": 0.1178, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.539313597588954e-06, | |
| "loss": 0.1246, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5380330663893775e-06, | |
| "loss": 0.1066, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.53675093911134e-06, | |
| "loss": 0.0964, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.535467216758936e-06, | |
| "loss": 0.0996, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.534181900337505e-06, | |
| "loss": 0.1165, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.532894990853642e-06, | |
| "loss": 0.1113, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.531606489315181e-06, | |
| "loss": 0.1069, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.530316396731212e-06, | |
| "loss": 0.127, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.529024714112064e-06, | |
| "loss": 0.0937, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.527731442469313e-06, | |
| "loss": 0.0998, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.526436582815783e-06, | |
| "loss": 0.1273, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5251401361655365e-06, | |
| "loss": 0.0991, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.523842103533882e-06, | |
| "loss": 0.1028, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.522542485937369e-06, | |
| "loss": 0.0858, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.5212412843937884e-06, | |
| "loss": 0.1191, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.519938499922172e-06, | |
| "loss": 0.116, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.51863413354279e-06, | |
| "loss": 0.1062, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.517328186277154e-06, | |
| "loss": 0.1232, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.516020659148011e-06, | |
| "loss": 0.1008, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.514711553179345e-06, | |
| "loss": 0.1095, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.5134008693963804e-06, | |
| "loss": 0.0973, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.512088608825573e-06, | |
| "loss": 0.0949, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.510774772494615e-06, | |
| "loss": 0.1255, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.509459361432433e-06, | |
| "loss": 0.129, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.508142376669188e-06, | |
| "loss": 0.11, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.5068238192362696e-06, | |
| "loss": 0.1075, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.505503690166303e-06, | |
| "loss": 0.0926, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.504181990493142e-06, | |
| "loss": 0.1175, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.5028587212518705e-06, | |
| "loss": 0.0951, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.501533883478803e-06, | |
| "loss": 0.1059, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.500207478211483e-06, | |
| "loss": 0.1007, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.498879506488677e-06, | |
| "loss": 0.108, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.4975499693503845e-06, | |
| "loss": 0.0938, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.496218867837825e-06, | |
| "loss": 0.1037, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.494886202993449e-06, | |
| "loss": 0.1138, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.4935519758609256e-06, | |
| "loss": 0.1058, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.492216187485151e-06, | |
| "loss": 0.1038, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.490878838912244e-06, | |
| "loss": 0.0882, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.489539931189543e-06, | |
| "loss": 0.1066, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.4881994653656105e-06, | |
| "loss": 0.116, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.486857442490227e-06, | |
| "loss": 0.1119, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.4855138636143925e-06, | |
| "loss": 0.0825, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.484168729790327e-06, | |
| "loss": 0.1035, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.482822042071466e-06, | |
| "loss": 0.1021, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.481473801512465e-06, | |
| "loss": 0.0983, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.480124009169191e-06, | |
| "loss": 0.1064, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.478772666098733e-06, | |
| "loss": 0.106, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.477419773359389e-06, | |
| "loss": 0.1257, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.47606533201067e-06, | |
| "loss": 0.1076, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.474709343113306e-06, | |
| "loss": 0.1159, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.4733518077292314e-06, | |
| "loss": 0.1129, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.471992726921598e-06, | |
| "loss": 0.1078, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.470632101754763e-06, | |
| "loss": 0.1171, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.469269933294296e-06, | |
| "loss": 0.1006, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.4679062226069744e-06, | |
| "loss": 0.1015, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.466540970760785e-06, | |
| "loss": 0.0954, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.465174178824918e-06, | |
| "loss": 0.1175, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.4638058478697725e-06, | |
| "loss": 0.1159, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.462435978966952e-06, | |
| "loss": 0.1125, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.461064573189264e-06, | |
| "loss": 0.1152, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.459691631610722e-06, | |
| "loss": 0.1322, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.458317155306538e-06, | |
| "loss": 0.1114, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.456941145353129e-06, | |
| "loss": 0.0938, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.455563602828114e-06, | |
| "loss": 0.1129, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.454184528810308e-06, | |
| "loss": 0.108, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.45280392437973e-06, | |
| "loss": 0.1135, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.451421790617595e-06, | |
| "loss": 0.104, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.450038128606316e-06, | |
| "loss": 0.1145, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.448652939429503e-06, | |
| "loss": 0.1026, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.447266224171963e-06, | |
| "loss": 0.1061, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.445877983919695e-06, | |
| "loss": 0.1084, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.444488219759897e-06, | |
| "loss": 0.1145, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.443096932780957e-06, | |
| "loss": 0.1065, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.4417041240724555e-06, | |
| "loss": 0.1229, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.440309794725167e-06, | |
| "loss": 0.0975, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.438913945831055e-06, | |
| "loss": 0.1282, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.437516578483275e-06, | |
| "loss": 0.1154, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.436117693776168e-06, | |
| "loss": 0.0893, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.434717292805267e-06, | |
| "loss": 0.118, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.433315376667292e-06, | |
| "loss": 0.1106, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.431911946460147e-06, | |
| "loss": 0.1, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.4305070032829245e-06, | |
| "loss": 0.1134, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.429100548235901e-06, | |
| "loss": 0.1015, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.427692582420536e-06, | |
| "loss": 0.1022, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.426283106939474e-06, | |
| "loss": 0.1048, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.424872122896539e-06, | |
| "loss": 0.0946, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.423459631396741e-06, | |
| "loss": 0.1121, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.422045633546265e-06, | |
| "loss": 0.1164, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.420630130452479e-06, | |
| "loss": 0.0879, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.419213123223931e-06, | |
| "loss": 0.1241, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.417794612970344e-06, | |
| "loss": 0.1036, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.4163746008026196e-06, | |
| "loss": 0.1271, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.4149530878328365e-06, | |
| "loss": 0.106, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.413530075174245e-06, | |
| "loss": 0.1129, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.412105563941274e-06, | |
| "loss": 0.1179, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.410679555249524e-06, | |
| "loss": 0.1259, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.4092520502157696e-06, | |
| "loss": 0.1241, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.407823049957957e-06, | |
| "loss": 0.1085, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.4063925555952e-06, | |
| "loss": 0.1103, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.404960568247787e-06, | |
| "loss": 0.1068, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.403527089037174e-06, | |
| "loss": 0.0957, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.402092119085985e-06, | |
| "loss": 0.1026, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.400655659518013e-06, | |
| "loss": 0.0987, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.399217711458212e-06, | |
| "loss": 0.1094, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.397778276032712e-06, | |
| "loss": 0.0953, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.396337354368797e-06, | |
| "loss": 0.1256, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.394894947594922e-06, | |
| "loss": 0.1097, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.393451056840702e-06, | |
| "loss": 0.1044, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.392005683236915e-06, | |
| "loss": 0.1104, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.390558827915501e-06, | |
| "loss": 0.0894, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.389110492009559e-06, | |
| "loss": 0.1071, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.387660676653349e-06, | |
| "loss": 0.1278, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.386209382982288e-06, | |
| "loss": 0.1004, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.3847566121329525e-06, | |
| "loss": 0.1032, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.383302365243073e-06, | |
| "loss": 0.1118, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.381846643451541e-06, | |
| "loss": 0.1052, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.380389447898396e-06, | |
| "loss": 0.115, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.378930779724838e-06, | |
| "loss": 0.1244, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.377470640073218e-06, | |
| "loss": 0.1128, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.376009030087039e-06, | |
| "loss": 0.1182, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.374545950910955e-06, | |
| "loss": 0.1279, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.373081403690772e-06, | |
| "loss": 0.1102, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.371615389573445e-06, | |
| "loss": 0.1023, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.370147909707078e-06, | |
| "loss": 0.1185, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.368678965240921e-06, | |
| "loss": 0.1023, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.367208557325375e-06, | |
| "loss": 0.1324, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.365736687111985e-06, | |
| "loss": 0.0975, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.364263355753438e-06, | |
| "loss": 0.1093, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.362788564403572e-06, | |
| "loss": 0.1036, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.361312314217362e-06, | |
| "loss": 0.1066, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.359834606350929e-06, | |
| "loss": 0.0934, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.358355441961535e-06, | |
| "loss": 0.0899, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.356874822207582e-06, | |
| "loss": 0.0939, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.355392748248613e-06, | |
| "loss": 0.0891, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.353909221245309e-06, | |
| "loss": 0.1216, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.352424242359486e-06, | |
| "loss": 0.0985, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.350937812754105e-06, | |
| "loss": 0.1062, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.349449933593254e-06, | |
| "loss": 0.088, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.347960606042163e-06, | |
| "loss": 0.1036, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.346469831267192e-06, | |
| "loss": 0.1268, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.344977610435836e-06, | |
| "loss": 0.1042, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.343483944716724e-06, | |
| "loss": 0.0979, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.341988835279614e-06, | |
| "loss": 0.0888, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.340492283295396e-06, | |
| "loss": 0.092, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.33899428993609e-06, | |
| "loss": 0.0946, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.337494856374843e-06, | |
| "loss": 0.13, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.335993983785932e-06, | |
| "loss": 0.1172, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3344916733447606e-06, | |
| "loss": 0.127, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.332987926227856e-06, | |
| "loss": 0.1066, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.331482743612875e-06, | |
| "loss": 0.0909, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3299761266785946e-06, | |
| "loss": 0.117, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.328468076604916e-06, | |
| "loss": 0.1175, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.326958594572865e-06, | |
| "loss": 0.1134, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.325447681764586e-06, | |
| "loss": 0.117, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3239353393633456e-06, | |
| "loss": 0.1151, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.322421568553529e-06, | |
| "loss": 0.107, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.320906370520641e-06, | |
| "loss": 0.1146, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.319389746451304e-06, | |
| "loss": 0.1137, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3178716975332554e-06, | |
| "loss": 0.091, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.316352224955352e-06, | |
| "loss": 0.0918, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3148313299075604e-06, | |
| "loss": 0.1495, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3133090135809674e-06, | |
| "loss": 0.1009, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.311785277167767e-06, | |
| "loss": 0.0946, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.3102601218612704e-06, | |
| "loss": 0.1108, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.308733548855896e-06, | |
| "loss": 0.1127, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.307205559347176e-06, | |
| "loss": 0.1334, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.3056761545317485e-06, | |
| "loss": 0.1014, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.3041453356073606e-06, | |
| "loss": 0.1072, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.302613103772869e-06, | |
| "loss": 0.097, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.301079460228237e-06, | |
| "loss": 0.0876, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.29954440617453e-06, | |
| "loss": 0.1105, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.298007942813921e-06, | |
| "loss": 0.117, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.2964700713496855e-06, | |
| "loss": 0.1161, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.294930792986203e-06, | |
| "loss": 0.1071, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.293390108928954e-06, | |
| "loss": 0.1073, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.291848020384519e-06, | |
| "loss": 0.1001, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.29030452856058e-06, | |
| "loss": 0.1028, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.288759634665916e-06, | |
| "loss": 0.0895, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.287213339910407e-06, | |
| "loss": 0.1047, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.2856656455050266e-06, | |
| "loss": 0.1161, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.284116552661847e-06, | |
| "loss": 0.1348, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.2825660625940354e-06, | |
| "loss": 0.1273, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.281014176515852e-06, | |
| "loss": 0.1221, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.279460895642652e-06, | |
| "loss": 0.0941, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.27790622119088e-06, | |
| "loss": 0.1073, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.276350154378075e-06, | |
| "loss": 0.1046, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.274792696422865e-06, | |
| "loss": 0.0952, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.273233848544968e-06, | |
| "loss": 0.1057, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.27167361196519e-06, | |
| "loss": 0.1243, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.270111987905425e-06, | |
| "loss": 0.0918, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.268548977588655e-06, | |
| "loss": 0.1118, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.266984582238942e-06, | |
| "loss": 0.1187, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.26541880308144e-06, | |
| "loss": 0.1317, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.263851641342383e-06, | |
| "loss": 0.1222, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.262283098249088e-06, | |
| "loss": 0.0996, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.260713175029953e-06, | |
| "loss": 0.1005, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.259141872914459e-06, | |
| "loss": 0.109, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.257569193133166e-06, | |
| "loss": 0.119, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.2559951369177125e-06, | |
| "loss": 0.1041, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.254419705500814e-06, | |
| "loss": 0.1061, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.252842900116265e-06, | |
| "loss": 0.1185, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.251264721998935e-06, | |
| "loss": 0.1111, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.249685172384769e-06, | |
| "loss": 0.1184, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.248104252510786e-06, | |
| "loss": 0.1094, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.2465219636150764e-06, | |
| "loss": 0.1179, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.244938306936805e-06, | |
| "loss": 0.1103, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.243353283716207e-06, | |
| "loss": 0.0973, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.241766895194589e-06, | |
| "loss": 0.1173, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.240179142614324e-06, | |
| "loss": 0.1103, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.2385900272188546e-06, | |
| "loss": 0.1027, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.2369995502526925e-06, | |
| "loss": 0.1053, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.235407712961413e-06, | |
| "loss": 0.1148, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.233814516591658e-06, | |
| "loss": 0.0961, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.232219962391135e-06, | |
| "loss": 0.1122, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.230624051608612e-06, | |
| "loss": 0.1159, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.229026785493922e-06, | |
| "loss": 0.1041, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.2274281652979565e-06, | |
| "loss": 0.0967, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.225828192272671e-06, | |
| "loss": 0.111, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.224226867671079e-06, | |
| "loss": 0.1096, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.222624192747251e-06, | |
| "loss": 0.1081, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.221020168756318e-06, | |
| "loss": 0.1144, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.219414796954463e-06, | |
| "loss": 0.102, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.21780807859893e-06, | |
| "loss": 0.0988, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.216200014948014e-06, | |
| "loss": 0.1217, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.214590607261065e-06, | |
| "loss": 0.1087, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.212979856798482e-06, | |
| "loss": 0.1374, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.211367764821722e-06, | |
| "loss": 0.1051, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.209754332593288e-06, | |
| "loss": 0.1114, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.208139561376734e-06, | |
| "loss": 0.0947, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.206523452436663e-06, | |
| "loss": 0.101, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.204906007038723e-06, | |
| "loss": 0.1166, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.203287226449612e-06, | |
| "loss": 0.0977, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.201667111937073e-06, | |
| "loss": 0.1131, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.200045664769892e-06, | |
| "loss": 0.0979, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.1984228862179015e-06, | |
| "loss": 0.0982, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.196798777551971e-06, | |
| "loss": 0.1084, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.195173340044019e-06, | |
| "loss": 0.121, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.193546574967e-06, | |
| "loss": 0.0937, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.19191848359491e-06, | |
| "loss": 0.1092, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.190289067202782e-06, | |
| "loss": 0.0975, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.188658327066689e-06, | |
| "loss": 0.1144, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.187026264463738e-06, | |
| "loss": 0.1055, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.185392880672074e-06, | |
| "loss": 0.1029, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1837581769708755e-06, | |
| "loss": 0.1034, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.182122154640355e-06, | |
| "loss": 0.1123, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.180484814961759e-06, | |
| "loss": 0.088, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.178846159217361e-06, | |
| "loss": 0.1082, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.177206188690472e-06, | |
| "loss": 0.0965, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1755649046654274e-06, | |
| "loss": 0.1164, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.173922308427593e-06, | |
| "loss": 0.1045, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1722784012633635e-06, | |
| "loss": 0.0894, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.170633184460158e-06, | |
| "loss": 0.1075, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.168986659306423e-06, | |
| "loss": 0.1164, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.167338827091627e-06, | |
| "loss": 0.0974, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.165689689106266e-06, | |
| "loss": 0.0974, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1640392466418576e-06, | |
| "loss": 0.1083, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.162387500990937e-06, | |
| "loss": 0.09, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.160734453447065e-06, | |
| "loss": 0.0946, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.15908010530482e-06, | |
| "loss": 0.1348, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.157424457859798e-06, | |
| "loss": 0.1004, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.155767512408614e-06, | |
| "loss": 0.1066, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1541092702488995e-06, | |
| "loss": 0.1171, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1524497326793e-06, | |
| "loss": 0.1045, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.150788900999477e-06, | |
| "loss": 0.1099, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1491267765101065e-06, | |
| "loss": 0.1092, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.147463360512873e-06, | |
| "loss": 0.1319, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.145798654310476e-06, | |
| "loss": 0.1026, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.144132659206625e-06, | |
| "loss": 0.1122, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.142465376506037e-06, | |
| "loss": 0.099, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.14079680751444e-06, | |
| "loss": 0.1045, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.139126953538566e-06, | |
| "loss": 0.1016, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1374558158861575e-06, | |
| "loss": 0.0978, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.1357833958659585e-06, | |
| "loss": 0.0935, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.13410969478772e-06, | |
| "loss": 0.1221, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.132434713962194e-06, | |
| "loss": 0.0947, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.130758454701137e-06, | |
| "loss": 0.084, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.129080918317305e-06, | |
| "loss": 0.0949, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.127402106124455e-06, | |
| "loss": 0.0996, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.125722019437342e-06, | |
| "loss": 0.0917, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.124040659571722e-06, | |
| "loss": 0.0896, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.122358027844344e-06, | |
| "loss": 0.0898, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.120674125572955e-06, | |
| "loss": 0.1102, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.118988954076299e-06, | |
| "loss": 0.1069, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.117302514674112e-06, | |
| "loss": 0.1047, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.115614808687122e-06, | |
| "loss": 0.1111, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.11392583743705e-06, | |
| "loss": 0.1112, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.112235602246608e-06, | |
| "loss": 0.0989, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.110544104439498e-06, | |
| "loss": 0.0957, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.108851345340411e-06, | |
| "loss": 0.1071, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.1071573262750255e-06, | |
| "loss": 0.0951, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.105462048570006e-06, | |
| "loss": 0.0829, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.103765513553003e-06, | |
| "loss": 0.0955, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.102067722552653e-06, | |
| "loss": 0.101, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.100368676898575e-06, | |
| "loss": 0.1186, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.098668377921369e-06, | |
| "loss": 0.0867, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.0969668269526195e-06, | |
| "loss": 0.1138, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.095264025324891e-06, | |
| "loss": 0.116, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.093559974371725e-06, | |
| "loss": 0.1081, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.091854675427643e-06, | |
| "loss": 0.1327, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.090148129828145e-06, | |
| "loss": 0.0909, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.0884403389097076e-06, | |
| "loss": 0.1114, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.0867313040097775e-06, | |
| "loss": 0.1018, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.085021026466781e-06, | |
| "loss": 0.106, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.083309507620118e-06, | |
| "loss": 0.1091, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.081596748810156e-06, | |
| "loss": 0.0852, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.079882751378237e-06, | |
| "loss": 0.0929, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.078167516666672e-06, | |
| "loss": 0.116, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.076451046018741e-06, | |
| "loss": 0.1043, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.074733340778692e-06, | |
| "loss": 0.1057, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.07301440229174e-06, | |
| "loss": 0.0923, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0712942319040665e-06, | |
| "loss": 0.1257, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.069572830962815e-06, | |
| "loss": 0.1011, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0678502008160945e-06, | |
| "loss": 0.1116, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.06612634281298e-06, | |
| "loss": 0.0973, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.064401258303502e-06, | |
| "loss": 0.0941, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.062674948638654e-06, | |
| "loss": 0.1079, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.060947415170392e-06, | |
| "loss": 0.1089, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.059218659251625e-06, | |
| "loss": 0.1066, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.057488682236224e-06, | |
| "loss": 0.1271, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.055757485479014e-06, | |
| "loss": 0.104, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.054025070335775e-06, | |
| "loss": 0.1041, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.052291438163244e-06, | |
| "loss": 0.0983, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.050556590319105e-06, | |
| "loss": 0.1326, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.048820528162002e-06, | |
| "loss": 0.1037, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.047083253051525e-06, | |
| "loss": 0.0968, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.045344766348213e-06, | |
| "loss": 0.0909, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.043605069413556e-06, | |
| "loss": 0.1165, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.041864163609995e-06, | |
| "loss": 0.1051, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.040122050300911e-06, | |
| "loss": 0.1399, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.038378730850634e-06, | |
| "loss": 0.0908, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0366342066244405e-06, | |
| "loss": 0.1001, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.034888478988548e-06, | |
| "loss": 0.1055, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.033141549310114e-06, | |
| "loss": 0.1174, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.031393418957246e-06, | |
| "loss": 0.0987, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.029644089298981e-06, | |
| "loss": 0.1617, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.027893561705302e-06, | |
| "loss": 0.12, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.026141837547129e-06, | |
| "loss": 0.1063, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.02438891819632e-06, | |
| "loss": 0.116, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0226348050256644e-06, | |
| "loss": 0.1004, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.020879499408893e-06, | |
| "loss": 0.1005, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.019123002720665e-06, | |
| "loss": 0.0944, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.017365316336575e-06, | |
| "loss": 0.0859, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0156064416331505e-06, | |
| "loss": 0.1057, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.013846379987847e-06, | |
| "loss": 0.0952, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.012085132779051e-06, | |
| "loss": 0.1056, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.0103227013860755e-06, | |
| "loss": 0.1142, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.008559087189163e-06, | |
| "loss": 0.1136, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.006794291569485e-06, | |
| "loss": 0.1222, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.005028315909131e-06, | |
| "loss": 0.1083, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.003261161591119e-06, | |
| "loss": 0.1055, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.001492829999393e-06, | |
| "loss": 0.1178, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.999723322518812e-06, | |
| "loss": 0.1078, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.997952640535161e-06, | |
| "loss": 0.0884, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.9961807854351446e-06, | |
| "loss": 0.0966, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.994407758606382e-06, | |
| "loss": 0.1027, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.992633561437416e-06, | |
| "loss": 0.1076, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.990858195317702e-06, | |
| "loss": 0.1183, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.989081661637611e-06, | |
| "loss": 0.1139, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.9873039617884295e-06, | |
| "loss": 0.1185, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.985525097162357e-06, | |
| "loss": 0.0961, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.983745069152505e-06, | |
| "loss": 0.0903, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.981963879152897e-06, | |
| "loss": 0.0954, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.9801815285584644e-06, | |
| "loss": 0.0839, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.97839801876505e-06, | |
| "loss": 0.0987, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.976613351169402e-06, | |
| "loss": 0.1357, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.974827527169177e-06, | |
| "loss": 0.1028, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.973040548162938e-06, | |
| "loss": 0.1086, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.97125241555015e-06, | |
| "loss": 0.1028, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.969463130731183e-06, | |
| "loss": 0.0978, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.96767269510731e-06, | |
| "loss": 0.1209, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.965881110080703e-06, | |
| "loss": 0.0816, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.964088377054437e-06, | |
| "loss": 0.0981, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.962294497432483e-06, | |
| "loss": 0.118, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.960499472619713e-06, | |
| "loss": 0.1142, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.958703304021895e-06, | |
| "loss": 0.1034, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.956905993045689e-06, | |
| "loss": 0.1064, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.955107541098655e-06, | |
| "loss": 0.0933, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.953307949589246e-06, | |
| "loss": 0.1081, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.9515072199268025e-06, | |
| "loss": 0.0983, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.949705353521561e-06, | |
| "loss": 0.1039, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.947902351784647e-06, | |
| "loss": 0.1183, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.9460982161280744e-06, | |
| "loss": 0.1193, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.944292947964747e-06, | |
| "loss": 0.1059, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.942486548708454e-06, | |
| "loss": 0.1065, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.94067901977387e-06, | |
| "loss": 0.0941, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.9388703625765545e-06, | |
| "loss": 0.106, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.937060578532952e-06, | |
| "loss": 0.1048, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.935249669060387e-06, | |
| "loss": 0.1046, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.93343763557707e-06, | |
| "loss": 0.102, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.931624479502085e-06, | |
| "loss": 0.128, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.929810202255402e-06, | |
| "loss": 0.0892, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.927994805257864e-06, | |
| "loss": 0.1057, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.926178289931193e-06, | |
| "loss": 0.1, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.924360657697987e-06, | |
| "loss": 0.1115, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.922541909981719e-06, | |
| "loss": 0.1111, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.9207220482067336e-06, | |
| "loss": 0.1104, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.9189010737982505e-06, | |
| "loss": 0.1079, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.9170789881823575e-06, | |
| "loss": 0.1141, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.915255792786015e-06, | |
| "loss": 0.1029, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.913431489037054e-06, | |
| "loss": 0.1202, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.911606078364171e-06, | |
| "loss": 0.1212, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.909779562196929e-06, | |
| "loss": 0.1176, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.907951941965757e-06, | |
| "loss": 0.1099, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.9061232191019525e-06, | |
| "loss": 0.1147, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.904293395037669e-06, | |
| "loss": 0.1017, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.902462471205931e-06, | |
| "loss": 0.103, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.900630449040618e-06, | |
| "loss": 0.1023, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.898797329976473e-06, | |
| "loss": 0.0901, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.896963115449094e-06, | |
| "loss": 0.1067, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.895127806894942e-06, | |
| "loss": 0.0984, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.893291405751332e-06, | |
| "loss": 0.0925, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.8914539134564355e-06, | |
| "loss": 0.1025, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.889615331449278e-06, | |
| "loss": 0.0897, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.887775661169738e-06, | |
| "loss": 0.1057, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.885934904058547e-06, | |
| "loss": 0.1205, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.884093061557288e-06, | |
| "loss": 0.0923, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.882250135108393e-06, | |
| "loss": 0.1072, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.880406126155144e-06, | |
| "loss": 0.1134, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.878561036141669e-06, | |
| "loss": 0.1005, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.876714866512945e-06, | |
| "loss": 0.1176, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.874867618714794e-06, | |
| "loss": 0.1285, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.873019294193879e-06, | |
| "loss": 0.094, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.8711698943977105e-06, | |
| "loss": 0.1255, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.869319420774639e-06, | |
| "loss": 0.1222, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.867467874773857e-06, | |
| "loss": 0.1025, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.865615257845396e-06, | |
| "loss": 0.0957, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.863761571440127e-06, | |
| "loss": 0.0968, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.861906817009758e-06, | |
| "loss": 0.0984, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.860050996006831e-06, | |
| "loss": 0.1138, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.858194109884729e-06, | |
| "loss": 0.1056, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.856336160097664e-06, | |
| "loss": 0.1201, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.854477148100684e-06, | |
| "loss": 0.1104, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.852617075349666e-06, | |
| "loss": 0.0979, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.850755943301321e-06, | |
| "loss": 0.1075, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.848893753413189e-06, | |
| "loss": 0.1083, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.847030507143635e-06, | |
| "loss": 0.1046, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.845166205951854e-06, | |
| "loss": 0.115, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.84330085129787e-06, | |
| "loss": 0.104, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.8414344446425255e-06, | |
| "loss": 0.1114, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.839566987447492e-06, | |
| "loss": 0.112, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.837698481175262e-06, | |
| "loss": 0.1244, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.835828927289149e-06, | |
| "loss": 0.1044, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.833958327253288e-06, | |
| "loss": 0.0966, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.832086682532633e-06, | |
| "loss": 0.1084, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.830213994592955e-06, | |
| "loss": 0.1038, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.828340264900846e-06, | |
| "loss": 0.1066, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.8264654949237065e-06, | |
| "loss": 0.1173, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.824589686129759e-06, | |
| "loss": 0.1147, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.822712839988036e-06, | |
| "loss": 0.1063, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.820834957968383e-06, | |
| "loss": 0.0999, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.818956041541456e-06, | |
| "loss": 0.1004, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.8170760921787235e-06, | |
| "loss": 0.0859, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.815195111352459e-06, | |
| "loss": 0.0993, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.813313100535747e-06, | |
| "loss": 0.0948, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.811430061202479e-06, | |
| "loss": 0.1023, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.8095459948273495e-06, | |
| "loss": 0.1108, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.807660902885858e-06, | |
| "loss": 0.107, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.805774786854307e-06, | |
| "loss": 0.1072, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.8038876482098048e-06, | |
| "loss": 0.1263, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.8019994884302546e-06, | |
| "loss": 0.1063, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.8001103089943637e-06, | |
| "loss": 0.1134, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7982201113816346e-06, | |
| "loss": 0.0829, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.79632889707237e-06, | |
| "loss": 0.1166, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7944366675476684e-06, | |
| "loss": 0.1214, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.792543424289422e-06, | |
| "loss": 0.1079, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7906491687803183e-06, | |
| "loss": 0.0905, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7887539025038354e-06, | |
| "loss": 0.1315, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7868576269442447e-06, | |
| "loss": 0.0884, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.78496034358661e-06, | |
| "loss": 0.1105, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7830620539167795e-06, | |
| "loss": 0.1014, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7811627594213934e-06, | |
| "loss": 0.1137, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7792624615878778e-06, | |
| "loss": 0.1102, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7773611619044424e-06, | |
| "loss": 0.1163, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7754588618600864e-06, | |
| "loss": 0.1138, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7735555629445863e-06, | |
| "loss": 0.1162, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7716512666485065e-06, | |
| "loss": 0.0875, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7697459744631887e-06, | |
| "loss": 0.1056, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7678396878807557e-06, | |
| "loss": 0.1082, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.76593240839411e-06, | |
| "loss": 0.0967, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.76402413749693e-06, | |
| "loss": 0.1192, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.762114876683672e-06, | |
| "loss": 0.0995, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.760204627449566e-06, | |
| "loss": 0.0953, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.758293391290618e-06, | |
| "loss": 0.1031, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.756381169703607e-06, | |
| "loss": 0.108, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7544679641860792e-06, | |
| "loss": 0.0969, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7525537762363577e-06, | |
| "loss": 0.1104, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7506386073535313e-06, | |
| "loss": 0.0914, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.748722459037457e-06, | |
| "loss": 0.0855, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.746805332788761e-06, | |
| "loss": 0.1122, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7448872301088335e-06, | |
| "loss": 0.1144, | |
| "step": 1296 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.742968152499829e-06, | |
| "loss": 0.1056, | |
| "step": 1297 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7410481014646676e-06, | |
| "loss": 0.1002, | |
| "step": 1298 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 3.7391270785070298e-06, | |
| "loss": 0.1168, | |
| "step": 1299 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7372050851313597e-06, | |
| "loss": 0.1002, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7352821228428573e-06, | |
| "loss": 0.1115, | |
| "step": 1301 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7333581931474845e-06, | |
| "loss": 0.1072, | |
| "step": 1302 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.73143329755196e-06, | |
| "loss": 0.0888, | |
| "step": 1303 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7295074375637604e-06, | |
| "loss": 0.1001, | |
| "step": 1304 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7275806146911152e-06, | |
| "loss": 0.1012, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7256528304430084e-06, | |
| "loss": 0.1225, | |
| "step": 1306 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.723724086329179e-06, | |
| "loss": 0.113, | |
| "step": 1307 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7217943838601147e-06, | |
| "loss": 0.096, | |
| "step": 1308 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.719863724547056e-06, | |
| "loss": 0.1135, | |
| "step": 1309 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7179321099019917e-06, | |
| "loss": 0.0808, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7159995414376594e-06, | |
| "loss": 0.0985, | |
| "step": 1311 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7140660206675426e-06, | |
| "loss": 0.1297, | |
| "step": 1312 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7121315491058728e-06, | |
| "loss": 0.106, | |
| "step": 1313 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.710196128267623e-06, | |
| "loss": 0.0921, | |
| "step": 1314 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7082597596685122e-06, | |
| "loss": 0.1001, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.706322444825e-06, | |
| "loss": 0.1117, | |
| "step": 1316 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7043841852542884e-06, | |
| "loss": 0.1244, | |
| "step": 1317 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.7024449824743188e-06, | |
| "loss": 0.1071, | |
| "step": 1318 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.70050483800377e-06, | |
| "loss": 0.1158, | |
| "step": 1319 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6985637533620606e-06, | |
| "loss": 0.1013, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.696621730069343e-06, | |
| "loss": 0.0912, | |
| "step": 1321 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.694678769646506e-06, | |
| "loss": 0.1118, | |
| "step": 1322 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.692734873615174e-06, | |
| "loss": 0.117, | |
| "step": 1323 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6907900434977008e-06, | |
| "loss": 0.1158, | |
| "step": 1324 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.688844280817173e-06, | |
| "loss": 0.1123, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.68689758709741e-06, | |
| "loss": 0.115, | |
| "step": 1326 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6849499638629548e-06, | |
| "loss": 0.0876, | |
| "step": 1327 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6830014126390853e-06, | |
| "loss": 0.1089, | |
| "step": 1328 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6810519349518005e-06, | |
| "loss": 0.1119, | |
| "step": 1329 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.679101532327827e-06, | |
| "loss": 0.1125, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6771502062946164e-06, | |
| "loss": 0.1078, | |
| "step": 1331 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6751979583803415e-06, | |
| "loss": 0.1081, | |
| "step": 1332 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6732447901139013e-06, | |
| "loss": 0.0979, | |
| "step": 1333 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6712907030249097e-06, | |
| "loss": 0.0959, | |
| "step": 1334 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6693356986437047e-06, | |
| "loss": 0.0912, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 3.6673797785013414e-06, | |
| "loss": 0.1193, | |
| "step": 1336 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.66542294412959e-06, | |
| "loss": 0.105, | |
| "step": 1337 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6634651970609415e-06, | |
| "loss": 0.0825, | |
| "step": 1338 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.661506538828596e-06, | |
| "loss": 0.104, | |
| "step": 1339 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6595469709664703e-06, | |
| "loss": 0.0921, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.657586495009194e-06, | |
| "loss": 0.0869, | |
| "step": 1341 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6556251124921065e-06, | |
| "loss": 0.1257, | |
| "step": 1342 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.653662824951258e-06, | |
| "loss": 0.1027, | |
| "step": 1343 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.651699633923407e-06, | |
| "loss": 0.1105, | |
| "step": 1344 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.649735540946018e-06, | |
| "loss": 0.0936, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.647770547557267e-06, | |
| "loss": 0.0944, | |
| "step": 1346 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6458046552960282e-06, | |
| "loss": 0.1045, | |
| "step": 1347 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.643837865701886e-06, | |
| "loss": 0.1115, | |
| "step": 1348 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6418701803151226e-06, | |
| "loss": 0.1123, | |
| "step": 1349 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.639901600676725e-06, | |
| "loss": 0.1051, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.637932128328379e-06, | |
| "loss": 0.1004, | |
| "step": 1351 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6359617648124713e-06, | |
| "loss": 0.1003, | |
| "step": 1352 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6339905116720836e-06, | |
| "loss": 0.0923, | |
| "step": 1353 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6320183704509964e-06, | |
| "loss": 0.1052, | |
| "step": 1354 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6300453426936856e-06, | |
| "loss": 0.0883, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.62807142994532e-06, | |
| "loss": 0.0997, | |
| "step": 1356 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.626096633751764e-06, | |
| "loss": 0.1233, | |
| "step": 1357 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6241209556595715e-06, | |
| "loss": 0.1036, | |
| "step": 1358 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.622144397215987e-06, | |
| "loss": 0.1077, | |
| "step": 1359 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6201669599689466e-06, | |
| "loss": 0.1037, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6181886454670727e-06, | |
| "loss": 0.0935, | |
| "step": 1361 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.616209455259676e-06, | |
| "loss": 0.1159, | |
| "step": 1362 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.614229390896752e-06, | |
| "loss": 0.093, | |
| "step": 1363 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6122484539289802e-06, | |
| "loss": 0.1081, | |
| "step": 1364 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.6102666459077262e-06, | |
| "loss": 0.1003, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.608283968385035e-06, | |
| "loss": 0.0953, | |
| "step": 1366 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.606300422913634e-06, | |
| "loss": 0.1121, | |
| "step": 1367 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.60431601104693e-06, | |
| "loss": 0.1111, | |
| "step": 1368 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.602330734339007e-06, | |
| "loss": 0.1175, | |
| "step": 1369 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.600344594344628e-06, | |
| "loss": 0.1091, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.5983575926192332e-06, | |
| "loss": 0.112, | |
| "step": 1371 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 3.5963697307189337e-06, | |
| "loss": 0.1112, | |
| "step": 1372 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5943810102005183e-06, | |
| "loss": 0.1025, | |
| "step": 1373 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5923914326214464e-06, | |
| "loss": 0.0842, | |
| "step": 1374 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.590400999539847e-06, | |
| "loss": 0.1195, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5884097125145233e-06, | |
| "loss": 0.1114, | |
| "step": 1376 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.586417573104943e-06, | |
| "loss": 0.0904, | |
| "step": 1377 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5844245828712432e-06, | |
| "loss": 0.102, | |
| "step": 1378 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5824307433742277e-06, | |
| "loss": 0.0873, | |
| "step": 1379 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.580436056175365e-06, | |
| "loss": 0.0843, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5784405228367874e-06, | |
| "loss": 0.0873, | |
| "step": 1381 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5764441449212896e-06, | |
| "loss": 0.0958, | |
| "step": 1382 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5744469239923276e-06, | |
| "loss": 0.1114, | |
| "step": 1383 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5724488616140193e-06, | |
| "loss": 0.0943, | |
| "step": 1384 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.570449959351138e-06, | |
| "loss": 0.106, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.56845021876912e-06, | |
| "loss": 0.0949, | |
| "step": 1386 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5664496414340525e-06, | |
| "loss": 0.1208, | |
| "step": 1387 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.564448228912682e-06, | |
| "loss": 0.1127, | |
| "step": 1388 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5624459827724078e-06, | |
| "loss": 0.1299, | |
| "step": 1389 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5604429045812806e-06, | |
| "loss": 0.0924, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5584389959080055e-06, | |
| "loss": 0.1035, | |
| "step": 1391 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.556434258321936e-06, | |
| "loss": 0.0974, | |
| "step": 1392 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5544286933930754e-06, | |
| "loss": 0.0954, | |
| "step": 1393 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.552422302692075e-06, | |
| "loss": 0.1153, | |
| "step": 1394 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5504150877902323e-06, | |
| "loss": 0.1046, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5484070502594912e-06, | |
| "loss": 0.0916, | |
| "step": 1396 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5463981916724382e-06, | |
| "loss": 0.0985, | |
| "step": 1397 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.544388513602305e-06, | |
| "loss": 0.0939, | |
| "step": 1398 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5423780176229635e-06, | |
| "loss": 0.1027, | |
| "step": 1399 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5403667053089263e-06, | |
| "loss": 0.1054, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.538354578235347e-06, | |
| "loss": 0.1259, | |
| "step": 1401 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5363416379780135e-06, | |
| "loss": 0.1236, | |
| "step": 1402 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5343278861133545e-06, | |
| "loss": 0.1113, | |
| "step": 1403 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.532313324218432e-06, | |
| "loss": 0.099, | |
| "step": 1404 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5302979538709443e-06, | |
| "loss": 0.1282, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5282817766492204e-06, | |
| "loss": 0.1021, | |
| "step": 1406 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.526264794132223e-06, | |
| "loss": 0.0884, | |
| "step": 1407 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.524247007899545e-06, | |
| "loss": 0.1262, | |
| "step": 1408 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 3.5222284195314075e-06, | |
| "loss": 0.1166, | |
| "step": 1409 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5202090306086623e-06, | |
| "loss": 0.1102, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.518188842712787e-06, | |
| "loss": 0.11, | |
| "step": 1411 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5161678574258827e-06, | |
| "loss": 0.0898, | |
| "step": 1412 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.514146076330679e-06, | |
| "loss": 0.0995, | |
| "step": 1413 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.512123501010526e-06, | |
| "loss": 0.115, | |
| "step": 1414 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5101001330493956e-06, | |
| "loss": 0.0983, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5080759740318825e-06, | |
| "loss": 0.098, | |
| "step": 1416 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5060510255431996e-06, | |
| "loss": 0.1061, | |
| "step": 1417 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.504025289169178e-06, | |
| "loss": 0.0927, | |
| "step": 1418 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.5019987664962652e-06, | |
| "loss": 0.0992, | |
| "step": 1419 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4999714591115266e-06, | |
| "loss": 0.1164, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4979433686026404e-06, | |
| "loss": 0.1276, | |
| "step": 1421 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4959144965578994e-06, | |
| "loss": 0.1114, | |
| "step": 1422 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.493884844566206e-06, | |
| "loss": 0.1078, | |
| "step": 1423 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.491854414217076e-06, | |
| "loss": 0.1195, | |
| "step": 1424 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4898232071006335e-06, | |
| "loss": 0.1114, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.487791224807612e-06, | |
| "loss": 0.1063, | |
| "step": 1426 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4857584689293513e-06, | |
| "loss": 0.0995, | |
| "step": 1427 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4837249410577943e-06, | |
| "loss": 0.0982, | |
| "step": 1428 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4816906427854958e-06, | |
| "loss": 0.1101, | |
| "step": 1429 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.479655575705606e-06, | |
| "loss": 0.1053, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.477619741411882e-06, | |
| "loss": 0.1122, | |
| "step": 1431 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.47558314149868e-06, | |
| "loss": 0.1033, | |
| "step": 1432 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.473545777560955e-06, | |
| "loss": 0.1047, | |
| "step": 1433 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4715076511942637e-06, | |
| "loss": 0.0993, | |
| "step": 1434 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4694687639947554e-06, | |
| "loss": 0.1034, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.467429117559179e-06, | |
| "loss": 0.124, | |
| "step": 1436 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4653887134848763e-06, | |
| "loss": 0.0955, | |
| "step": 1437 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4633475533697807e-06, | |
| "loss": 0.0998, | |
| "step": 1438 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.461305638812423e-06, | |
| "loss": 0.118, | |
| "step": 1439 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.459262971411918e-06, | |
| "loss": 0.1022, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.457219552767976e-06, | |
| "loss": 0.0956, | |
| "step": 1441 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.455175384480892e-06, | |
| "loss": 0.1346, | |
| "step": 1442 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4531304681515486e-06, | |
| "loss": 0.1078, | |
| "step": 1443 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.4510848053814174e-06, | |
| "loss": 0.0863, | |
| "step": 1444 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 3.449038397772549e-06, | |
| "loss": 0.1162, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.446991246927582e-06, | |
| "loss": 0.113, | |
| "step": 1446 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4449433544497357e-06, | |
| "loss": 0.1162, | |
| "step": 1447 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.442894721942808e-06, | |
| "loss": 0.102, | |
| "step": 1448 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4408453510111813e-06, | |
| "loss": 0.1065, | |
| "step": 1449 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4387952432598102e-06, | |
| "loss": 0.1214, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.436744400294232e-06, | |
| "loss": 0.1077, | |
| "step": 1451 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4346928237205555e-06, | |
| "loss": 0.0982, | |
| "step": 1452 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4326405151454657e-06, | |
| "loss": 0.1107, | |
| "step": 1453 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4305874761762224e-06, | |
| "loss": 0.1065, | |
| "step": 1454 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4285337084206545e-06, | |
| "loss": 0.1036, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4264792134871634e-06, | |
| "loss": 0.0947, | |
| "step": 1456 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.424423992984719e-06, | |
| "loss": 0.1018, | |
| "step": 1457 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.422368048522861e-06, | |
| "loss": 0.0826, | |
| "step": 1458 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4203113817116955e-06, | |
| "loss": 0.0893, | |
| "step": 1459 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4182539941618927e-06, | |
| "loss": 0.1183, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4161958874846884e-06, | |
| "loss": 0.1087, | |
| "step": 1461 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4141370632918824e-06, | |
| "loss": 0.1055, | |
| "step": 1462 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.412077523195836e-06, | |
| "loss": 0.095, | |
| "step": 1463 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4100172688094703e-06, | |
| "loss": 0.1114, | |
| "step": 1464 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4079563017462657e-06, | |
| "loss": 0.1004, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.405894623620262e-06, | |
| "loss": 0.1222, | |
| "step": 1466 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4038322360460553e-06, | |
| "loss": 0.0981, | |
| "step": 1467 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.4017691406387963e-06, | |
| "loss": 0.0918, | |
| "step": 1468 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3997053390141923e-06, | |
| "loss": 0.1033, | |
| "step": 1469 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.397640832788502e-06, | |
| "loss": 0.1253, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3955756235785353e-06, | |
| "loss": 0.1054, | |
| "step": 1471 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.393509713001655e-06, | |
| "loss": 0.0975, | |
| "step": 1472 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3914431026757698e-06, | |
| "loss": 0.0976, | |
| "step": 1473 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3893757942193394e-06, | |
| "loss": 0.1122, | |
| "step": 1474 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3873077892513705e-06, | |
| "loss": 0.114, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3852390893914123e-06, | |
| "loss": 0.1055, | |
| "step": 1476 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3831696962595595e-06, | |
| "loss": 0.0966, | |
| "step": 1477 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.381099611476451e-06, | |
| "loss": 0.0928, | |
| "step": 1478 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3790288366632664e-06, | |
| "loss": 0.0891, | |
| "step": 1479 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.376957373441726e-06, | |
| "loss": 0.1099, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3748852234340882e-06, | |
| "loss": 0.1231, | |
| "step": 1481 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 3.3728123882631504e-06, | |
| "loss": 0.1103, | |
| "step": 1482 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3707388695522465e-06, | |
| "loss": 0.1105, | |
| "step": 1483 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3686646689252444e-06, | |
| "loss": 0.1014, | |
| "step": 1484 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.366589788006549e-06, | |
| "loss": 0.104, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3645142284210936e-06, | |
| "loss": 0.094, | |
| "step": 1486 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.362437991794348e-06, | |
| "loss": 0.0931, | |
| "step": 1487 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3603610797523067e-06, | |
| "loss": 0.0925, | |
| "step": 1488 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.358283493921499e-06, | |
| "loss": 0.1073, | |
| "step": 1489 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3562052359289776e-06, | |
| "loss": 0.124, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3541263074023216e-06, | |
| "loss": 0.1046, | |
| "step": 1491 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3520467099696395e-06, | |
| "loss": 0.1082, | |
| "step": 1492 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3499664452595587e-06, | |
| "loss": 0.1176, | |
| "step": 1493 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.347885514901232e-06, | |
| "loss": 0.1081, | |
| "step": 1494 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3458039205243325e-06, | |
| "loss": 0.1101, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.343721663759053e-06, | |
| "loss": 0.1054, | |
| "step": 1496 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.341638746236107e-06, | |
| "loss": 0.1052, | |
| "step": 1497 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.339555169586723e-06, | |
| "loss": 0.1046, | |
| "step": 1498 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.337470935442647e-06, | |
| "loss": 0.1218, | |
| "step": 1499 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3353860454361398e-06, | |
| "loss": 0.1305, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3333005011999747e-06, | |
| "loss": 0.1186, | |
| "step": 1501 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3312143043674405e-06, | |
| "loss": 0.0991, | |
| "step": 1502 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.329127456572333e-06, | |
| "loss": 0.1042, | |
| "step": 1503 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3270399594489604e-06, | |
| "loss": 0.0991, | |
| "step": 1504 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3249518146321393e-06, | |
| "loss": 0.129, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3228630237571918e-06, | |
| "loss": 0.1261, | |
| "step": 1506 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3207735884599486e-06, | |
| "loss": 0.1218, | |
| "step": 1507 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3186835103767422e-06, | |
| "loss": 0.1028, | |
| "step": 1508 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3165927911444114e-06, | |
| "loss": 0.1094, | |
| "step": 1509 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3145014324002945e-06, | |
| "loss": 0.0991, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.312409435782232e-06, | |
| "loss": 0.1028, | |
| "step": 1511 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.310316802928564e-06, | |
| "loss": 0.106, | |
| "step": 1512 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3082235354781284e-06, | |
| "loss": 0.0963, | |
| "step": 1513 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3061296350702606e-06, | |
| "loss": 0.098, | |
| "step": 1514 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.3040351033447915e-06, | |
| "loss": 0.1007, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.301939941942045e-06, | |
| "loss": 0.1063, | |
| "step": 1516 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.29984415250284e-06, | |
| "loss": 0.1039, | |
| "step": 1517 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.2977477366684883e-06, | |
| "loss": 0.1083, | |
| "step": 1518 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 3.2956506960807895e-06, | |
| "loss": 0.099, | |
| "step": 1519 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2935530323820326e-06, | |
| "loss": 0.1002, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2914547472149965e-06, | |
| "loss": 0.0979, | |
| "step": 1521 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2893558422229453e-06, | |
| "loss": 0.0919, | |
| "step": 1522 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.287256319049631e-06, | |
| "loss": 0.0999, | |
| "step": 1523 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2851561793392857e-06, | |
| "loss": 0.0953, | |
| "step": 1524 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.283055424736627e-06, | |
| "loss": 0.0805, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2809540568868535e-06, | |
| "loss": 0.093, | |
| "step": 1526 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.278852077435645e-06, | |
| "loss": 0.096, | |
| "step": 1527 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2767494880291577e-06, | |
| "loss": 0.1057, | |
| "step": 1528 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2746462903140285e-06, | |
| "loss": 0.1005, | |
| "step": 1529 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.272542485937369e-06, | |
| "loss": 0.1162, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.270438076546766e-06, | |
| "loss": 0.1138, | |
| "step": 1531 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.26833306379028e-06, | |
| "loss": 0.0985, | |
| "step": 1532 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2662274493164457e-06, | |
| "loss": 0.1048, | |
| "step": 1533 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2641212347742664e-06, | |
| "loss": 0.1031, | |
| "step": 1534 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2620144218132164e-06, | |
| "loss": 0.107, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2599070120832404e-06, | |
| "loss": 0.11, | |
| "step": 1536 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2577990072347483e-06, | |
| "loss": 0.0983, | |
| "step": 1537 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2556904089186165e-06, | |
| "loss": 0.0825, | |
| "step": 1538 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.253581218786186e-06, | |
| "loss": 0.0813, | |
| "step": 1539 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.251471438489262e-06, | |
| "loss": 0.1128, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2493610696801114e-06, | |
| "loss": 0.0952, | |
| "step": 1541 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2472501140114632e-06, | |
| "loss": 0.0937, | |
| "step": 1542 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.245138573136504e-06, | |
| "loss": 0.1116, | |
| "step": 1543 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2430264487088797e-06, | |
| "loss": 0.1182, | |
| "step": 1544 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.240913742382693e-06, | |
| "loss": 0.103, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2388004558125027e-06, | |
| "loss": 0.1095, | |
| "step": 1546 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2366865906533217e-06, | |
| "loss": 0.1001, | |
| "step": 1547 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2345721485606165e-06, | |
| "loss": 0.1096, | |
| "step": 1548 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.232457131190304e-06, | |
| "loss": 0.1174, | |
| "step": 1549 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2303415401987543e-06, | |
| "loss": 0.0899, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2282253772427837e-06, | |
| "loss": 0.1007, | |
| "step": 1551 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.226108643979658e-06, | |
| "loss": 0.1015, | |
| "step": 1552 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.22399134206709e-06, | |
| "loss": 0.1098, | |
| "step": 1553 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.2218734731632366e-06, | |
| "loss": 0.1149, | |
| "step": 1554 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 3.219755038926701e-06, | |
| "loss": 0.0882, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.2176360410165253e-06, | |
| "loss": 0.113, | |
| "step": 1556 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.2155164810921956e-06, | |
| "loss": 0.0978, | |
| "step": 1557 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.2133963608136403e-06, | |
| "loss": 0.1178, | |
| "step": 1558 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.2112756818412204e-06, | |
| "loss": 0.1052, | |
| "step": 1559 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.209154445835742e-06, | |
| "loss": 0.1052, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.207032654458442e-06, | |
| "loss": 0.1126, | |
| "step": 1561 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.204910309370993e-06, | |
| "loss": 0.0974, | |
| "step": 1562 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.202787412235503e-06, | |
| "loss": 0.1108, | |
| "step": 1563 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.200663964714511e-06, | |
| "loss": 0.0976, | |
| "step": 1564 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1985399684709877e-06, | |
| "loss": 0.1145, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.196415425168335e-06, | |
| "loss": 0.1122, | |
| "step": 1566 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1942903364703787e-06, | |
| "loss": 0.1243, | |
| "step": 1567 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1921647040413767e-06, | |
| "loss": 0.0865, | |
| "step": 1568 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.190038529546009e-06, | |
| "loss": 0.1005, | |
| "step": 1569 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1879118146493828e-06, | |
| "loss": 0.0866, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1857845610170278e-06, | |
| "loss": 0.1008, | |
| "step": 1571 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.183656770314894e-06, | |
| "loss": 0.1104, | |
| "step": 1572 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.181528444209354e-06, | |
| "loss": 0.1017, | |
| "step": 1573 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1793995843671987e-06, | |
| "loss": 0.1275, | |
| "step": 1574 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.177270192455637e-06, | |
| "loss": 0.0833, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1751402701422957e-06, | |
| "loss": 0.0937, | |
| "step": 1576 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.173009819095215e-06, | |
| "loss": 0.0963, | |
| "step": 1577 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1708788409828508e-06, | |
| "loss": 0.0791, | |
| "step": 1578 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1687473374740703e-06, | |
| "loss": 0.0867, | |
| "step": 1579 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1666153102381536e-06, | |
| "loss": 0.1172, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.164482760944791e-06, | |
| "loss": 0.1079, | |
| "step": 1581 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1623496912640796e-06, | |
| "loss": 0.0982, | |
| "step": 1582 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.160216102866526e-06, | |
| "loss": 0.0997, | |
| "step": 1583 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1580819974230435e-06, | |
| "loss": 0.1003, | |
| "step": 1584 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.155947376604948e-06, | |
| "loss": 0.0993, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1538122420839612e-06, | |
| "loss": 0.1089, | |
| "step": 1586 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1516765955322058e-06, | |
| "loss": 0.0977, | |
| "step": 1587 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1495404386222054e-06, | |
| "loss": 0.0927, | |
| "step": 1588 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1474037730268846e-06, | |
| "loss": 0.1161, | |
| "step": 1589 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1452666004195652e-06, | |
| "loss": 0.1032, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.1431289224739676e-06, | |
| "loss": 0.1086, | |
| "step": 1591 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 3.140990740864205e-06, | |
| "loss": 0.1117, | |
| "step": 1592 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1388520572647873e-06, | |
| "loss": 0.1034, | |
| "step": 1593 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.136712873350618e-06, | |
| "loss": 0.1019, | |
| "step": 1594 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.13457319079699e-06, | |
| "loss": 0.1026, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.132433011279591e-06, | |
| "loss": 0.126, | |
| "step": 1596 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1302923364744914e-06, | |
| "loss": 0.0901, | |
| "step": 1597 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1281511680581554e-06, | |
| "loss": 0.1011, | |
| "step": 1598 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1260095077074314e-06, | |
| "loss": 0.1166, | |
| "step": 1599 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1238673570995526e-06, | |
| "loss": 0.1185, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.121724717912138e-06, | |
| "loss": 0.117, | |
| "step": 1601 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.119581591823186e-06, | |
| "loss": 0.1077, | |
| "step": 1602 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1174379805110795e-06, | |
| "loss": 0.1193, | |
| "step": 1603 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.11529388565458e-06, | |
| "loss": 0.1031, | |
| "step": 1604 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.113149308932828e-06, | |
| "loss": 0.1229, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.111004252025342e-06, | |
| "loss": 0.0968, | |
| "step": 1606 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1088587166120137e-06, | |
| "loss": 0.1045, | |
| "step": 1607 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1067127043731144e-06, | |
| "loss": 0.1008, | |
| "step": 1608 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.104566216989283e-06, | |
| "loss": 0.0915, | |
| "step": 1609 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1024192561415364e-06, | |
| "loss": 0.1171, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.1002718235112582e-06, | |
| "loss": 0.0905, | |
| "step": 1611 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0981239207802016e-06, | |
| "loss": 0.1108, | |
| "step": 1612 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.095975549630491e-06, | |
| "loss": 0.0951, | |
| "step": 1613 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0938267117446156e-06, | |
| "loss": 0.106, | |
| "step": 1614 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.09167740880543e-06, | |
| "loss": 0.0989, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0895276424961524e-06, | |
| "loss": 0.1027, | |
| "step": 1616 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0873774145003647e-06, | |
| "loss": 0.1137, | |
| "step": 1617 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0852267265020117e-06, | |
| "loss": 0.1083, | |
| "step": 1618 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0830755801853974e-06, | |
| "loss": 0.1073, | |
| "step": 1619 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.080923977235182e-06, | |
| "loss": 0.0918, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.078771919336389e-06, | |
| "loss": 0.1136, | |
| "step": 1621 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0766194081743922e-06, | |
| "loss": 0.0939, | |
| "step": 1622 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.074466445434925e-06, | |
| "loss": 0.1153, | |
| "step": 1623 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0723130328040728e-06, | |
| "loss": 0.0953, | |
| "step": 1624 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0701591719682722e-06, | |
| "loss": 0.1146, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0680048646143115e-06, | |
| "loss": 0.1146, | |
| "step": 1626 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.0658501124293293e-06, | |
| "loss": 0.114, | |
| "step": 1627 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.063694917100813e-06, | |
| "loss": 0.115, | |
| "step": 1628 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 3.061539280316596e-06, | |
| "loss": 0.0846, | |
| "step": 1629 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.059383203764857e-06, | |
| "loss": 0.0958, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0572266891341203e-06, | |
| "loss": 0.0992, | |
| "step": 1631 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.055069738113253e-06, | |
| "loss": 0.0949, | |
| "step": 1632 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0529123523914626e-06, | |
| "loss": 0.1128, | |
| "step": 1633 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0507545336583e-06, | |
| "loss": 0.1171, | |
| "step": 1634 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.048596283603652e-06, | |
| "loss": 0.111, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0464376039177444e-06, | |
| "loss": 0.1125, | |
| "step": 1636 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.044278496291141e-06, | |
| "loss": 0.1046, | |
| "step": 1637 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0421189624147383e-06, | |
| "loss": 0.1024, | |
| "step": 1638 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0399590039797687e-06, | |
| "loss": 0.0995, | |
| "step": 1639 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0377986226777943e-06, | |
| "loss": 0.0971, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.035637820200712e-06, | |
| "loss": 0.082, | |
| "step": 1641 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0334765982407453e-06, | |
| "loss": 0.1334, | |
| "step": 1642 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.031314958490449e-06, | |
| "loss": 0.1206, | |
| "step": 1643 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0291529026427025e-06, | |
| "loss": 0.1167, | |
| "step": 1644 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.026990432390713e-06, | |
| "loss": 0.0807, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0248275494280105e-06, | |
| "loss": 0.093, | |
| "step": 1646 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0226642554484505e-06, | |
| "loss": 0.1073, | |
| "step": 1647 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0205005521462085e-06, | |
| "loss": 0.11, | |
| "step": 1648 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0183364412157816e-06, | |
| "loss": 0.1112, | |
| "step": 1649 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0161719243519848e-06, | |
| "loss": 0.0876, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0140070032499526e-06, | |
| "loss": 0.108, | |
| "step": 1651 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0118416796051354e-06, | |
| "loss": 0.0947, | |
| "step": 1652 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.009675955113298e-06, | |
| "loss": 0.1039, | |
| "step": 1653 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0075098314705215e-06, | |
| "loss": 0.1182, | |
| "step": 1654 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.0053433103731964e-06, | |
| "loss": 0.0966, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.003176393518027e-06, | |
| "loss": 0.1067, | |
| "step": 1656 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 3.001009082602027e-06, | |
| "loss": 0.099, | |
| "step": 1657 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.998841379322517e-06, | |
| "loss": 0.1374, | |
| "step": 1658 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.996673285377128e-06, | |
| "loss": 0.1104, | |
| "step": 1659 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9945048024637935e-06, | |
| "loss": 0.1087, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9923359322807538e-06, | |
| "loss": 0.1082, | |
| "step": 1661 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9901666765265534e-06, | |
| "loss": 0.1073, | |
| "step": 1662 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9879970369000355e-06, | |
| "loss": 0.1251, | |
| "step": 1663 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9858270151003477e-06, | |
| "loss": 0.1123, | |
| "step": 1664 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.9836566128269317e-06, | |
| "loss": 0.0871, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.981485831779534e-06, | |
| "loss": 0.1044, | |
| "step": 1666 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9793146736581922e-06, | |
| "loss": 0.0931, | |
| "step": 1667 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9771431401632424e-06, | |
| "loss": 0.1038, | |
| "step": 1668 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.974971232995312e-06, | |
| "loss": 0.0852, | |
| "step": 1669 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9727989538553233e-06, | |
| "loss": 0.1266, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9706263044444887e-06, | |
| "loss": 0.1058, | |
| "step": 1671 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9684532864643123e-06, | |
| "loss": 0.0985, | |
| "step": 1672 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9662799016165834e-06, | |
| "loss": 0.0911, | |
| "step": 1673 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9641061516033828e-06, | |
| "loss": 0.1088, | |
| "step": 1674 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9619320381270735e-06, | |
| "loss": 0.1109, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9597575628903058e-06, | |
| "loss": 0.078, | |
| "step": 1676 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.957582727596013e-06, | |
| "loss": 0.1084, | |
| "step": 1677 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9554075339474087e-06, | |
| "loss": 0.1058, | |
| "step": 1678 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.953231983647988e-06, | |
| "loss": 0.107, | |
| "step": 1679 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.951056078401526e-06, | |
| "loss": 0.1173, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.948879819912075e-06, | |
| "loss": 0.0858, | |
| "step": 1681 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9467032098839655e-06, | |
| "loss": 0.1173, | |
| "step": 1682 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9445262500217997e-06, | |
| "loss": 0.1217, | |
| "step": 1683 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.942348942030458e-06, | |
| "loss": 0.1013, | |
| "step": 1684 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9401712876150907e-06, | |
| "loss": 0.1082, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.93799328848112e-06, | |
| "loss": 0.1112, | |
| "step": 1686 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.93581494633424e-06, | |
| "loss": 0.0958, | |
| "step": 1687 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.93363626288041e-06, | |
| "loss": 0.0827, | |
| "step": 1688 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.931457239825858e-06, | |
| "loss": 0.1323, | |
| "step": 1689 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.92927787887708e-06, | |
| "loss": 0.1088, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9270981817408328e-06, | |
| "loss": 0.096, | |
| "step": 1691 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.924918150124141e-06, | |
| "loss": 0.1088, | |
| "step": 1692 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.922737785734287e-06, | |
| "loss": 0.0897, | |
| "step": 1693 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9205570902788153e-06, | |
| "loss": 0.1115, | |
| "step": 1694 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.918376065465531e-06, | |
| "loss": 0.0849, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.916194713002496e-06, | |
| "loss": 0.0927, | |
| "step": 1696 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.914013034598029e-06, | |
| "loss": 0.1403, | |
| "step": 1697 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9118310319607025e-06, | |
| "loss": 0.1216, | |
| "step": 1698 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.909648706799345e-06, | |
| "loss": 0.1022, | |
| "step": 1699 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.907466060823037e-06, | |
| "loss": 0.1243, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.90528309574111e-06, | |
| "loss": 0.0943, | |
| "step": 1701 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.9030998132631455e-06, | |
| "loss": 0.1204, | |
| "step": 1702 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.900916215098973e-06, | |
| "loss": 0.1116, | |
| "step": 1703 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.89873230295867e-06, | |
| "loss": 0.0844, | |
| "step": 1704 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.89654807855256e-06, | |
| "loss": 0.1053, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8943635435912105e-06, | |
| "loss": 0.1089, | |
| "step": 1706 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.892178699785433e-06, | |
| "loss": 0.096, | |
| "step": 1707 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8899935488462793e-06, | |
| "loss": 0.0894, | |
| "step": 1708 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8878080924850422e-06, | |
| "loss": 0.1016, | |
| "step": 1709 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.885622332413256e-06, | |
| "loss": 0.1103, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.88343627034269e-06, | |
| "loss": 0.1052, | |
| "step": 1711 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8812499079853517e-06, | |
| "loss": 0.1084, | |
| "step": 1712 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.879063247053482e-06, | |
| "loss": 0.1024, | |
| "step": 1713 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.876876289259557e-06, | |
| "loss": 0.1161, | |
| "step": 1714 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.874689036316286e-06, | |
| "loss": 0.0919, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8725014899366078e-06, | |
| "loss": 0.108, | |
| "step": 1716 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.870313651833692e-06, | |
| "loss": 0.0809, | |
| "step": 1717 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.868125523720936e-06, | |
| "loss": 0.1254, | |
| "step": 1718 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8659371073119635e-06, | |
| "loss": 0.1003, | |
| "step": 1719 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8637484043206283e-06, | |
| "loss": 0.1096, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8615594164610028e-06, | |
| "loss": 0.1106, | |
| "step": 1721 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8593701454473864e-06, | |
| "loss": 0.1053, | |
| "step": 1722 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.857180592994298e-06, | |
| "loss": 0.0983, | |
| "step": 1723 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.854990760816479e-06, | |
| "loss": 0.1031, | |
| "step": 1724 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.852800650628889e-06, | |
| "loss": 0.0992, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.850610264146704e-06, | |
| "loss": 0.1102, | |
| "step": 1726 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.848419603085318e-06, | |
| "loss": 0.1134, | |
| "step": 1727 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.846228669160339e-06, | |
| "loss": 0.1064, | |
| "step": 1728 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8440374640875906e-06, | |
| "loss": 0.1072, | |
| "step": 1729 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8418459895831067e-06, | |
| "loss": 0.1126, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.839654247363132e-06, | |
| "loss": 0.0985, | |
| "step": 1731 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.837462239144123e-06, | |
| "loss": 0.0997, | |
| "step": 1732 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.835269966642743e-06, | |
| "loss": 0.1234, | |
| "step": 1733 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8330774315758617e-06, | |
| "loss": 0.1082, | |
| "step": 1734 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8308846356605567e-06, | |
| "loss": 0.1014, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.828691580614107e-06, | |
| "loss": 0.1073, | |
| "step": 1736 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8264982681539965e-06, | |
| "loss": 0.0893, | |
| "step": 1737 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.8243046999979107e-06, | |
| "loss": 0.101, | |
| "step": 1738 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.822110877863734e-06, | |
| "loss": 0.0944, | |
| "step": 1739 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8199168034695514e-06, | |
| "loss": 0.0999, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8177224785336436e-06, | |
| "loss": 0.089, | |
| "step": 1741 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8155279047744892e-06, | |
| "loss": 0.1295, | |
| "step": 1742 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.813333083910761e-06, | |
| "loss": 0.0983, | |
| "step": 1743 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8111380176613246e-06, | |
| "loss": 0.1362, | |
| "step": 1744 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.80894270774524e-06, | |
| "loss": 0.0834, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.806747155881754e-06, | |
| "loss": 0.1046, | |
| "step": 1746 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8045513637903072e-06, | |
| "loss": 0.0882, | |
| "step": 1747 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8023553331905258e-06, | |
| "loss": 0.0993, | |
| "step": 1748 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.8001590658022236e-06, | |
| "loss": 0.0957, | |
| "step": 1749 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7979625633454005e-06, | |
| "loss": 0.1151, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7957658275402383e-06, | |
| "loss": 0.1143, | |
| "step": 1751 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7935688601071043e-06, | |
| "loss": 0.1107, | |
| "step": 1752 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7913716627665445e-06, | |
| "loss": 0.1104, | |
| "step": 1753 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.789174237239287e-06, | |
| "loss": 0.1162, | |
| "step": 1754 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7869765852462387e-06, | |
| "loss": 0.1103, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.784778708508482e-06, | |
| "loss": 0.0922, | |
| "step": 1756 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7825806087472756e-06, | |
| "loss": 0.0841, | |
| "step": 1757 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7803822876840565e-06, | |
| "loss": 0.0853, | |
| "step": 1758 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7781837470404293e-06, | |
| "loss": 0.1071, | |
| "step": 1759 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.775984988538175e-06, | |
| "loss": 0.1016, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.773786013899243e-06, | |
| "loss": 0.1021, | |
| "step": 1761 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7715868248457513e-06, | |
| "loss": 0.0972, | |
| "step": 1762 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.76938742309999e-06, | |
| "loss": 0.1081, | |
| "step": 1763 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7671878103844106e-06, | |
| "loss": 0.1087, | |
| "step": 1764 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.764987988421634e-06, | |
| "loss": 0.1032, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.762787958934441e-06, | |
| "loss": 0.1009, | |
| "step": 1766 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7605877236457774e-06, | |
| "loss": 0.1208, | |
| "step": 1767 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.758387284278751e-06, | |
| "loss": 0.1184, | |
| "step": 1768 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7561866425566273e-06, | |
| "loss": 0.0939, | |
| "step": 1769 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7539858002028313e-06, | |
| "loss": 0.1119, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7517847589409457e-06, | |
| "loss": 0.1209, | |
| "step": 1771 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7495835204947065e-06, | |
| "loss": 0.112, | |
| "step": 1772 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7473820865880075e-06, | |
| "loss": 0.0858, | |
| "step": 1773 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7451804589448932e-06, | |
| "loss": 0.1101, | |
| "step": 1774 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.7429786392895607e-06, | |
| "loss": 0.097, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7407766293463585e-06, | |
| "loss": 0.0898, | |
| "step": 1776 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.73857443083978e-06, | |
| "loss": 0.1081, | |
| "step": 1777 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7363720454944727e-06, | |
| "loss": 0.0986, | |
| "step": 1778 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.734169475035225e-06, | |
| "loss": 0.0984, | |
| "step": 1779 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7319667211869725e-06, | |
| "loss": 0.1158, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.729763785674795e-06, | |
| "loss": 0.1114, | |
| "step": 1781 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.727560670223912e-06, | |
| "loss": 0.1111, | |
| "step": 1782 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7253573765596873e-06, | |
| "loss": 0.0982, | |
| "step": 1783 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7231539064076222e-06, | |
| "loss": 0.1004, | |
| "step": 1784 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.720950261493356e-06, | |
| "loss": 0.1043, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.718746443542667e-06, | |
| "loss": 0.122, | |
| "step": 1786 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7165424542814655e-06, | |
| "loss": 0.1279, | |
| "step": 1787 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7143382954358e-06, | |
| "loss": 0.0804, | |
| "step": 1788 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.712133968731848e-06, | |
| "loss": 0.1039, | |
| "step": 1789 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.709929475895921e-06, | |
| "loss": 0.1033, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7077248186544604e-06, | |
| "loss": 0.1121, | |
| "step": 1791 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7055199987340346e-06, | |
| "loss": 0.1233, | |
| "step": 1792 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.703315017861342e-06, | |
| "loss": 0.104, | |
| "step": 1793 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.7011098777632046e-06, | |
| "loss": 0.0964, | |
| "step": 1794 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6989045801665705e-06, | |
| "loss": 0.0922, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6966991267985098e-06, | |
| "loss": 0.1084, | |
| "step": 1796 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6944935193862175e-06, | |
| "loss": 0.1027, | |
| "step": 1797 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.692287759657007e-06, | |
| "loss": 0.0898, | |
| "step": 1798 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6900818493383096e-06, | |
| "loss": 0.1138, | |
| "step": 1799 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6878757901576775e-06, | |
| "loss": 0.0972, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.685669583842778e-06, | |
| "loss": 0.0935, | |
| "step": 1801 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6834632321213936e-06, | |
| "loss": 0.097, | |
| "step": 1802 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6812567367214224e-06, | |
| "loss": 0.1162, | |
| "step": 1803 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.679050099370871e-06, | |
| "loss": 0.0951, | |
| "step": 1804 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6768433217978625e-06, | |
| "loss": 0.1194, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6746364057306244e-06, | |
| "loss": 0.1077, | |
| "step": 1806 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6724293528974964e-06, | |
| "loss": 0.1019, | |
| "step": 1807 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6702221650269256e-06, | |
| "loss": 0.0958, | |
| "step": 1808 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6680148438474614e-06, | |
| "loss": 0.1341, | |
| "step": 1809 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.66580739108776e-06, | |
| "loss": 0.0862, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6635998084765813e-06, | |
| "loss": 0.0967, | |
| "step": 1811 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.6613920977427853e-06, | |
| "loss": 0.1052, | |
| "step": 1812 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.659184260615333e-06, | |
| "loss": 0.0968, | |
| "step": 1813 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6569762988232838e-06, | |
| "loss": 0.0906, | |
| "step": 1814 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6547682140957953e-06, | |
| "loss": 0.1092, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6525600081621234e-06, | |
| "loss": 0.1193, | |
| "step": 1816 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.650351682751614e-06, | |
| "loss": 0.1229, | |
| "step": 1817 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6481432395937107e-06, | |
| "loss": 0.0973, | |
| "step": 1818 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.645934680417948e-06, | |
| "loss": 0.1087, | |
| "step": 1819 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6437260069539504e-06, | |
| "loss": 0.086, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6415172209314354e-06, | |
| "loss": 0.1037, | |
| "step": 1821 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.639308324080203e-06, | |
| "loss": 0.1107, | |
| "step": 1822 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6370993181301445e-06, | |
| "loss": 0.0863, | |
| "step": 1823 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6348902048112352e-06, | |
| "loss": 0.1178, | |
| "step": 1824 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6326809858535337e-06, | |
| "loss": 0.0943, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.630471662987183e-06, | |
| "loss": 0.1051, | |
| "step": 1826 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.628262237942406e-06, | |
| "loss": 0.1132, | |
| "step": 1827 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6260527124495065e-06, | |
| "loss": 0.1088, | |
| "step": 1828 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6238430882388673e-06, | |
| "loss": 0.1187, | |
| "step": 1829 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6216333670409456e-06, | |
| "loss": 0.0874, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6194235505862793e-06, | |
| "loss": 0.1094, | |
| "step": 1831 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6172136406054766e-06, | |
| "loss": 0.0923, | |
| "step": 1832 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.615003638829222e-06, | |
| "loss": 0.0982, | |
| "step": 1833 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.612793546988271e-06, | |
| "loss": 0.1041, | |
| "step": 1834 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6105833668134473e-06, | |
| "loss": 0.0962, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6083731000356475e-06, | |
| "loss": 0.0898, | |
| "step": 1836 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.606162748385834e-06, | |
| "loss": 0.1055, | |
| "step": 1837 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.6039523135950357e-06, | |
| "loss": 0.1133, | |
| "step": 1838 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.601741797394347e-06, | |
| "loss": 0.0958, | |
| "step": 1839 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5995312015149256e-06, | |
| "loss": 0.1236, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5973205276879925e-06, | |
| "loss": 0.1048, | |
| "step": 1841 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5951097776448285e-06, | |
| "loss": 0.1008, | |
| "step": 1842 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5928989531167746e-06, | |
| "loss": 0.0855, | |
| "step": 1843 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5906880558352317e-06, | |
| "loss": 0.1298, | |
| "step": 1844 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.588477087531655e-06, | |
| "loss": 0.0918, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5862660499375576e-06, | |
| "loss": 0.1218, | |
| "step": 1846 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.584054944784504e-06, | |
| "loss": 0.1134, | |
| "step": 1847 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.5818437738041153e-06, | |
| "loss": 0.1145, | |
| "step": 1848 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.579632538728061e-06, | |
| "loss": 0.0836, | |
| "step": 1849 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5774212412880636e-06, | |
| "loss": 0.1084, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.575209883215892e-06, | |
| "loss": 0.0903, | |
| "step": 1851 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.572998466243364e-06, | |
| "loss": 0.0978, | |
| "step": 1852 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5707869921023413e-06, | |
| "loss": 0.0835, | |
| "step": 1853 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5685754625247344e-06, | |
| "loss": 0.1106, | |
| "step": 1854 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5663638792424946e-06, | |
| "loss": 0.1198, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.564152243987615e-06, | |
| "loss": 0.1143, | |
| "step": 1856 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.561940558492129e-06, | |
| "loss": 0.0853, | |
| "step": 1857 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5597288244881122e-06, | |
| "loss": 0.1058, | |
| "step": 1858 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.557517043707676e-06, | |
| "loss": 0.101, | |
| "step": 1859 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5553052178829676e-06, | |
| "loss": 0.1086, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5530933487461723e-06, | |
| "loss": 0.1144, | |
| "step": 1861 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.550881438029506e-06, | |
| "loss": 0.0989, | |
| "step": 1862 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5486694874652195e-06, | |
| "loss": 0.1001, | |
| "step": 1863 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5464574987855946e-06, | |
| "loss": 0.1067, | |
| "step": 1864 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.544245473722942e-06, | |
| "loss": 0.1095, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.542033414009602e-06, | |
| "loss": 0.1247, | |
| "step": 1866 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.539821321377941e-06, | |
| "loss": 0.0835, | |
| "step": 1867 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.537609197560351e-06, | |
| "loss": 0.1017, | |
| "step": 1868 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.53539704428925e-06, | |
| "loss": 0.1071, | |
| "step": 1869 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5331848632970773e-06, | |
| "loss": 0.0949, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5309726563162956e-06, | |
| "loss": 0.0866, | |
| "step": 1871 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.528760425079386e-06, | |
| "loss": 0.1123, | |
| "step": 1872 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5265481713188505e-06, | |
| "loss": 0.0884, | |
| "step": 1873 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5243358967672076e-06, | |
| "loss": 0.1237, | |
| "step": 1874 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.522123603156992e-06, | |
| "loss": 0.0842, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5199112922207547e-06, | |
| "loss": 0.1116, | |
| "step": 1876 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.517698965691058e-06, | |
| "loss": 0.1257, | |
| "step": 1877 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5154866253004786e-06, | |
| "loss": 0.1029, | |
| "step": 1878 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.513274272781603e-06, | |
| "loss": 0.0949, | |
| "step": 1879 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5110619098670265e-06, | |
| "loss": 0.1082, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.508849538289354e-06, | |
| "loss": 0.1108, | |
| "step": 1881 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.506637159781198e-06, | |
| "loss": 0.1051, | |
| "step": 1882 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.504424776075172e-06, | |
| "loss": 0.0958, | |
| "step": 1883 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5022123889038985e-06, | |
| "loss": 0.1049, | |
| "step": 1884 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.1219, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4977876110961023e-06, | |
| "loss": 0.1252, | |
| "step": 1886 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4955752239248294e-06, | |
| "loss": 0.1028, | |
| "step": 1887 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.493362840218803e-06, | |
| "loss": 0.0996, | |
| "step": 1888 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4911504617106464e-06, | |
| "loss": 0.1241, | |
| "step": 1889 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.488938090132974e-06, | |
| "loss": 0.1177, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.486725727218398e-06, | |
| "loss": 0.1104, | |
| "step": 1891 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4845133746995223e-06, | |
| "loss": 0.0893, | |
| "step": 1892 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4823010343089422e-06, | |
| "loss": 0.0832, | |
| "step": 1893 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.480088707779246e-06, | |
| "loss": 0.1061, | |
| "step": 1894 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4778763968430082e-06, | |
| "loss": 0.0929, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.475664103232793e-06, | |
| "loss": 0.1129, | |
| "step": 1896 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4734518286811503e-06, | |
| "loss": 0.0991, | |
| "step": 1897 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.471239574920614e-06, | |
| "loss": 0.1175, | |
| "step": 1898 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4690273436837053e-06, | |
| "loss": 0.1097, | |
| "step": 1899 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4668151367029235e-06, | |
| "loss": 0.0986, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.46460295571075e-06, | |
| "loss": 0.1059, | |
| "step": 1901 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.46239080243965e-06, | |
| "loss": 0.1052, | |
| "step": 1902 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.46017867862206e-06, | |
| "loss": 0.1055, | |
| "step": 1903 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.457966585990399e-06, | |
| "loss": 0.1064, | |
| "step": 1904 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4557545262770584e-06, | |
| "loss": 0.1094, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4535425012144054e-06, | |
| "loss": 0.1049, | |
| "step": 1906 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4513305125347817e-06, | |
| "loss": 0.106, | |
| "step": 1907 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.449118561970495e-06, | |
| "loss": 0.1085, | |
| "step": 1908 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.446906651253829e-06, | |
| "loss": 0.1113, | |
| "step": 1909 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4446947821170333e-06, | |
| "loss": 0.0912, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4424829562923245e-06, | |
| "loss": 0.1185, | |
| "step": 1911 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.440271175511889e-06, | |
| "loss": 0.1004, | |
| "step": 1912 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4380594415078714e-06, | |
| "loss": 0.0986, | |
| "step": 1913 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4358477560123866e-06, | |
| "loss": 0.0923, | |
| "step": 1914 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4336361207575067e-06, | |
| "loss": 0.0988, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.431424537475265e-06, | |
| "loss": 0.1401, | |
| "step": 1916 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.42921300789766e-06, | |
| "loss": 0.1036, | |
| "step": 1917 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4270015337566373e-06, | |
| "loss": 0.099, | |
| "step": 1918 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4247901167841083e-06, | |
| "loss": 0.0961, | |
| "step": 1919 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.4225787587119373e-06, | |
| "loss": 0.0943, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 2.420367461271939e-06, | |
| "loss": 0.0995, | |
| "step": 1921 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4181562261958855e-06, | |
| "loss": 0.1244, | |
| "step": 1922 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4159450552154967e-06, | |
| "loss": 0.097, | |
| "step": 1923 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4137339500624437e-06, | |
| "loss": 0.1185, | |
| "step": 1924 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.411522912468346e-06, | |
| "loss": 0.0996, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4093119441647687e-06, | |
| "loss": 0.107, | |
| "step": 1926 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.407101046883226e-06, | |
| "loss": 0.1176, | |
| "step": 1927 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4048902223551723e-06, | |
| "loss": 0.1257, | |
| "step": 1928 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4026794723120084e-06, | |
| "loss": 0.1116, | |
| "step": 1929 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.4004687984850753e-06, | |
| "loss": 0.1052, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3982582026056533e-06, | |
| "loss": 0.1302, | |
| "step": 1931 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.396047686404965e-06, | |
| "loss": 0.105, | |
| "step": 1932 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.393837251614166e-06, | |
| "loss": 0.1048, | |
| "step": 1933 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.391626899964353e-06, | |
| "loss": 0.1236, | |
| "step": 1934 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3894166331865535e-06, | |
| "loss": 0.0809, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.38720645301173e-06, | |
| "loss": 0.1108, | |
| "step": 1936 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3849963611707786e-06, | |
| "loss": 0.0969, | |
| "step": 1937 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.382786359394524e-06, | |
| "loss": 0.1139, | |
| "step": 1938 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.380576449413722e-06, | |
| "loss": 0.0927, | |
| "step": 1939 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3783666329590556e-06, | |
| "loss": 0.1041, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.376156911761134e-06, | |
| "loss": 0.1181, | |
| "step": 1941 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3739472875504943e-06, | |
| "loss": 0.102, | |
| "step": 1942 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3717377620575945e-06, | |
| "loss": 0.102, | |
| "step": 1943 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3695283370128174e-06, | |
| "loss": 0.1008, | |
| "step": 1944 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.367319014146467e-06, | |
| "loss": 0.1064, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.365109795188765e-06, | |
| "loss": 0.1192, | |
| "step": 1946 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3629006818698564e-06, | |
| "loss": 0.1068, | |
| "step": 1947 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.360691675919798e-06, | |
| "loss": 0.0945, | |
| "step": 1948 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.358482779068565e-06, | |
| "loss": 0.1079, | |
| "step": 1949 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.35627399304605e-06, | |
| "loss": 0.1111, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.354065319582053e-06, | |
| "loss": 0.1078, | |
| "step": 1951 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.35185676040629e-06, | |
| "loss": 0.1111, | |
| "step": 1952 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.349648317248387e-06, | |
| "loss": 0.1062, | |
| "step": 1953 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.347439991837877e-06, | |
| "loss": 0.1009, | |
| "step": 1954 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.345231785904205e-06, | |
| "loss": 0.1046, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3430237011767166e-06, | |
| "loss": 0.1033, | |
| "step": 1956 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3408157393846683e-06, | |
| "loss": 0.0807, | |
| "step": 1957 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 2.3386079022572155e-06, | |
| "loss": 0.105, | |
| "step": 1958 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3364001915234187e-06, | |
| "loss": 0.0902, | |
| "step": 1959 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.334192608912241e-06, | |
| "loss": 0.1066, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.331985156152539e-06, | |
| "loss": 0.0932, | |
| "step": 1961 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3297778349730757e-06, | |
| "loss": 0.1295, | |
| "step": 1962 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.327570647102504e-06, | |
| "loss": 0.0979, | |
| "step": 1963 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.325363594269376e-06, | |
| "loss": 0.0981, | |
| "step": 1964 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.323156678202139e-06, | |
| "loss": 0.0942, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3209499006291293e-06, | |
| "loss": 0.1179, | |
| "step": 1966 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.318743263278579e-06, | |
| "loss": 0.1097, | |
| "step": 1967 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3165367678786068e-06, | |
| "loss": 0.1068, | |
| "step": 1968 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3143304161572223e-06, | |
| "loss": 0.1179, | |
| "step": 1969 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3121242098423233e-06, | |
| "loss": 0.1013, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3099181506616912e-06, | |
| "loss": 0.1039, | |
| "step": 1971 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.307712240342995e-06, | |
| "loss": 0.0877, | |
| "step": 1972 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3055064806137834e-06, | |
| "loss": 0.1197, | |
| "step": 1973 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.30330087320149e-06, | |
| "loss": 0.1064, | |
| "step": 1974 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.3010954198334307e-06, | |
| "loss": 0.1, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.298890122236796e-06, | |
| "loss": 0.1041, | |
| "step": 1976 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2966849821386588e-06, | |
| "loss": 0.124, | |
| "step": 1977 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2944800012659662e-06, | |
| "loss": 0.1031, | |
| "step": 1978 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.29227518134554e-06, | |
| "loss": 0.0979, | |
| "step": 1979 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2900705241040793e-06, | |
| "loss": 0.1043, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2878660312681527e-06, | |
| "loss": 0.0975, | |
| "step": 1981 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2856617045642006e-06, | |
| "loss": 0.1134, | |
| "step": 1982 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.283457545718535e-06, | |
| "loss": 0.108, | |
| "step": 1983 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2812535564573336e-06, | |
| "loss": 0.1003, | |
| "step": 1984 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.279049738506644e-06, | |
| "loss": 0.1043, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.276846093592378e-06, | |
| "loss": 0.1254, | |
| "step": 1986 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.274642623440313e-06, | |
| "loss": 0.1113, | |
| "step": 1987 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2724393297760887e-06, | |
| "loss": 0.1051, | |
| "step": 1988 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2702362143252058e-06, | |
| "loss": 0.0991, | |
| "step": 1989 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2680332788130283e-06, | |
| "loss": 0.1139, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2658305249647758e-06, | |
| "loss": 0.1121, | |
| "step": 1991 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.2636279545055277e-06, | |
| "loss": 0.1067, | |
| "step": 1992 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.261425569160221e-06, | |
| "loss": 0.1152, | |
| "step": 1993 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.259223370653643e-06, | |
| "loss": 0.0829, | |
| "step": 1994 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 2.25702136071044e-06, | |
| "loss": 0.1085, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2548195410551076e-06, | |
| "loss": 0.1025, | |
| "step": 1996 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.252617913411993e-06, | |
| "loss": 0.1097, | |
| "step": 1997 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.250416479505295e-06, | |
| "loss": 0.1085, | |
| "step": 1998 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.248215241059055e-06, | |
| "loss": 0.0989, | |
| "step": 1999 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2460141997971695e-06, | |
| "loss": 0.1171, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.243813357443373e-06, | |
| "loss": 0.0978, | |
| "step": 2001 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.241612715721249e-06, | |
| "loss": 0.1196, | |
| "step": 2002 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2394122763542234e-06, | |
| "loss": 0.1068, | |
| "step": 2003 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.23721204106556e-06, | |
| "loss": 0.1117, | |
| "step": 2004 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2350120115783675e-06, | |
| "loss": 0.1158, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2328121896155898e-06, | |
| "loss": 0.0979, | |
| "step": 2006 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.23061257690001e-06, | |
| "loss": 0.1007, | |
| "step": 2007 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.228413175154249e-06, | |
| "loss": 0.1048, | |
| "step": 2008 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.226213986100758e-06, | |
| "loss": 0.0986, | |
| "step": 2009 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2240150114618262e-06, | |
| "loss": 0.1135, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.221816252959571e-06, | |
| "loss": 0.1016, | |
| "step": 2011 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.219617712315944e-06, | |
| "loss": 0.0935, | |
| "step": 2012 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.217419391252725e-06, | |
| "loss": 0.1008, | |
| "step": 2013 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2152212914915186e-06, | |
| "loss": 0.1043, | |
| "step": 2014 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2130234147537626e-06, | |
| "loss": 0.1127, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2108257627607137e-06, | |
| "loss": 0.0984, | |
| "step": 2016 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.208628337233456e-06, | |
| "loss": 0.1066, | |
| "step": 2017 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2064311398928965e-06, | |
| "loss": 0.1023, | |
| "step": 2018 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.204234172459762e-06, | |
| "loss": 0.1075, | |
| "step": 2019 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.2020374366546007e-06, | |
| "loss": 0.1212, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.199840934197777e-06, | |
| "loss": 0.1141, | |
| "step": 2021 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.1976446668094746e-06, | |
| "loss": 0.1073, | |
| "step": 2022 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.195448636209694e-06, | |
| "loss": 0.105, | |
| "step": 2023 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.193252844118247e-06, | |
| "loss": 0.1133, | |
| "step": 2024 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.191057292254762e-06, | |
| "loss": 0.0809, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.1888619823386762e-06, | |
| "loss": 0.1175, | |
| "step": 2026 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.186666916089239e-06, | |
| "loss": 0.1063, | |
| "step": 2027 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.184472095225511e-06, | |
| "loss": 0.0981, | |
| "step": 2028 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.182277521466357e-06, | |
| "loss": 0.0889, | |
| "step": 2029 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.18008319653045e-06, | |
| "loss": 0.1032, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 2.177889122136267e-06, | |
| "loss": 0.0994, | |
| "step": 2031 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1756953000020897e-06, | |
| "loss": 0.1138, | |
| "step": 2032 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1735017318460043e-06, | |
| "loss": 0.1071, | |
| "step": 2033 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.171308419385894e-06, | |
| "loss": 0.0963, | |
| "step": 2034 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.169115364339444e-06, | |
| "loss": 0.1094, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.166922568424139e-06, | |
| "loss": 0.1008, | |
| "step": 2036 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1647300333572575e-06, | |
| "loss": 0.1001, | |
| "step": 2037 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1625377608558775e-06, | |
| "loss": 0.0955, | |
| "step": 2038 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1603457526368687e-06, | |
| "loss": 0.0928, | |
| "step": 2039 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.158154010416894e-06, | |
| "loss": 0.121, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1559625359124103e-06, | |
| "loss": 0.0928, | |
| "step": 2041 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.153771330839661e-06, | |
| "loss": 0.1008, | |
| "step": 2042 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.151580396914683e-06, | |
| "loss": 0.0953, | |
| "step": 2043 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1493897358532966e-06, | |
| "loss": 0.1084, | |
| "step": 2044 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1471993493711114e-06, | |
| "loss": 0.0999, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1450092391835215e-06, | |
| "loss": 0.1193, | |
| "step": 2046 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1428194070057016e-06, | |
| "loss": 0.1067, | |
| "step": 2047 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1406298545526144e-06, | |
| "loss": 0.1041, | |
| "step": 2048 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1384405835389976e-06, | |
| "loss": 0.1155, | |
| "step": 2049 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1362515956793717e-06, | |
| "loss": 0.0812, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.134062892688037e-06, | |
| "loss": 0.1044, | |
| "step": 2051 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1318744762790648e-06, | |
| "loss": 0.1101, | |
| "step": 2052 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1296863481663096e-06, | |
| "loss": 0.1039, | |
| "step": 2053 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.127498510063393e-06, | |
| "loss": 0.0965, | |
| "step": 2054 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1253109636837143e-06, | |
| "loss": 0.1109, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.123123710740444e-06, | |
| "loss": 0.1127, | |
| "step": 2056 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.120936752946519e-06, | |
| "loss": 0.097, | |
| "step": 2057 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1187500920146496e-06, | |
| "loss": 0.1108, | |
| "step": 2058 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.116563729657311e-06, | |
| "loss": 0.0944, | |
| "step": 2059 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.114377667586744e-06, | |
| "loss": 0.0919, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1121919075149586e-06, | |
| "loss": 0.1114, | |
| "step": 2061 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1100064511537216e-06, | |
| "loss": 0.107, | |
| "step": 2062 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.107821300214568e-06, | |
| "loss": 0.0883, | |
| "step": 2063 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.10563645640879e-06, | |
| "loss": 0.0924, | |
| "step": 2064 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.10345192144744e-06, | |
| "loss": 0.1076, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.1012676970413307e-06, | |
| "loss": 0.0961, | |
| "step": 2066 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.099083784901028e-06, | |
| "loss": 0.1036, | |
| "step": 2067 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 2.0969001867368562e-06, | |
| "loss": 0.1171, | |
| "step": 2068 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.094716904258891e-06, | |
| "loss": 0.0817, | |
| "step": 2069 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0925339391769632e-06, | |
| "loss": 0.111, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.090351293200656e-06, | |
| "loss": 0.1194, | |
| "step": 2071 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0881689680392984e-06, | |
| "loss": 0.1099, | |
| "step": 2072 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0859869654019723e-06, | |
| "loss": 0.1055, | |
| "step": 2073 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0838052869975043e-06, | |
| "loss": 0.0899, | |
| "step": 2074 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.081623934534469e-06, | |
| "loss": 0.1252, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.079442909721185e-06, | |
| "loss": 0.0902, | |
| "step": 2076 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0772622142657138e-06, | |
| "loss": 0.0981, | |
| "step": 2077 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.07508184987586e-06, | |
| "loss": 0.1063, | |
| "step": 2078 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0729018182591676e-06, | |
| "loss": 0.0979, | |
| "step": 2079 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0707221211229206e-06, | |
| "loss": 0.1063, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0685427601741426e-06, | |
| "loss": 0.1021, | |
| "step": 2081 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.066363737119591e-06, | |
| "loss": 0.1165, | |
| "step": 2082 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.064185053665761e-06, | |
| "loss": 0.1068, | |
| "step": 2083 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0620067115188803e-06, | |
| "loss": 0.09, | |
| "step": 2084 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0598287123849097e-06, | |
| "loss": 0.1029, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0576510579695427e-06, | |
| "loss": 0.0906, | |
| "step": 2086 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0554737499782007e-06, | |
| "loss": 0.1161, | |
| "step": 2087 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.053296790116036e-06, | |
| "loss": 0.1079, | |
| "step": 2088 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.051120180087926e-06, | |
| "loss": 0.0963, | |
| "step": 2089 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0489439215984745e-06, | |
| "loss": 0.1065, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.046768016352013e-06, | |
| "loss": 0.0915, | |
| "step": 2091 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.044592466052592e-06, | |
| "loss": 0.0883, | |
| "step": 2092 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.042417272403987e-06, | |
| "loss": 0.0947, | |
| "step": 2093 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0402424371096946e-06, | |
| "loss": 0.1252, | |
| "step": 2094 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0380679618729265e-06, | |
| "loss": 0.1114, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.035893848396618e-06, | |
| "loss": 0.0951, | |
| "step": 2096 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.033720098383417e-06, | |
| "loss": 0.1002, | |
| "step": 2097 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.031546713535688e-06, | |
| "loss": 0.1048, | |
| "step": 2098 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.029373695555512e-06, | |
| "loss": 0.1053, | |
| "step": 2099 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.027201046144677e-06, | |
| "loss": 0.0983, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.025028767004689e-06, | |
| "loss": 0.0876, | |
| "step": 2101 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.022856859836759e-06, | |
| "loss": 0.0984, | |
| "step": 2102 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.0206853263418073e-06, | |
| "loss": 0.112, | |
| "step": 2103 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 2.018514168220467e-06, | |
| "loss": 0.107, | |
| "step": 2104 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.016343387173068e-06, | |
| "loss": 0.1009, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.014172984899654e-06, | |
| "loss": 0.1151, | |
| "step": 2106 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.012002963099965e-06, | |
| "loss": 0.0959, | |
| "step": 2107 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0098333234734466e-06, | |
| "loss": 0.1051, | |
| "step": 2108 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0076640677192466e-06, | |
| "loss": 0.1129, | |
| "step": 2109 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.005495197536207e-06, | |
| "loss": 0.1159, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0033267146228734e-06, | |
| "loss": 0.1206, | |
| "step": 2111 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 2.0011586206774835e-06, | |
| "loss": 0.0955, | |
| "step": 2112 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9989909173979736e-06, | |
| "loss": 0.0985, | |
| "step": 2113 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9968236064819736e-06, | |
| "loss": 0.097, | |
| "step": 2114 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9946566896268044e-06, | |
| "loss": 0.097, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9924901685294798e-06, | |
| "loss": 0.1005, | |
| "step": 2116 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9903240448867027e-06, | |
| "loss": 0.1023, | |
| "step": 2117 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.988158320394865e-06, | |
| "loss": 0.1067, | |
| "step": 2118 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.985992996750048e-06, | |
| "loss": 0.1068, | |
| "step": 2119 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9838280756480156e-06, | |
| "loss": 0.0989, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9816635587842197e-06, | |
| "loss": 0.0921, | |
| "step": 2121 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.979499447853792e-06, | |
| "loss": 0.0999, | |
| "step": 2122 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9773357445515495e-06, | |
| "loss": 0.089, | |
| "step": 2123 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.97517245057199e-06, | |
| "loss": 0.1096, | |
| "step": 2124 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9730095676092882e-06, | |
| "loss": 0.1048, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9708470973572987e-06, | |
| "loss": 0.0945, | |
| "step": 2126 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.968685041509552e-06, | |
| "loss": 0.1179, | |
| "step": 2127 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9665234017592547e-06, | |
| "loss": 0.1087, | |
| "step": 2128 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.964362179799289e-06, | |
| "loss": 0.0844, | |
| "step": 2129 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.962201377322206e-06, | |
| "loss": 0.11, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9600409960202326e-06, | |
| "loss": 0.0954, | |
| "step": 2131 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9578810375852626e-06, | |
| "loss": 0.1142, | |
| "step": 2132 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9557215037088594e-06, | |
| "loss": 0.1255, | |
| "step": 2133 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.953562396082256e-06, | |
| "loss": 0.1236, | |
| "step": 2134 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.951403716396349e-06, | |
| "loss": 0.1158, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9492454663417014e-06, | |
| "loss": 0.1193, | |
| "step": 2136 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9470876476085383e-06, | |
| "loss": 0.0947, | |
| "step": 2137 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9449302618867475e-06, | |
| "loss": 0.1053, | |
| "step": 2138 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.94277331086588e-06, | |
| "loss": 0.1016, | |
| "step": 2139 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9406167962351436e-06, | |
| "loss": 0.1149, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.9384607196834053e-06, | |
| "loss": 0.091, | |
| "step": 2141 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9363050828991873e-06, | |
| "loss": 0.1135, | |
| "step": 2142 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9341498875706703e-06, | |
| "loss": 0.1028, | |
| "step": 2143 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9319951353856894e-06, | |
| "loss": 0.1056, | |
| "step": 2144 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9298408280317286e-06, | |
| "loss": 0.103, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9276869671959285e-06, | |
| "loss": 0.1078, | |
| "step": 2146 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9255335545650756e-06, | |
| "loss": 0.1099, | |
| "step": 2147 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9233805918256078e-06, | |
| "loss": 0.0921, | |
| "step": 2148 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9212280806636123e-06, | |
| "loss": 0.1053, | |
| "step": 2149 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9190760227648183e-06, | |
| "loss": 0.101, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9169244198146043e-06, | |
| "loss": 0.0964, | |
| "step": 2151 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.914773273497989e-06, | |
| "loss": 0.1064, | |
| "step": 2152 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9126225854996357e-06, | |
| "loss": 0.1055, | |
| "step": 2153 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.910472357503849e-06, | |
| "loss": 0.0995, | |
| "step": 2154 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.908322591194571e-06, | |
| "loss": 0.1105, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.906173288255384e-06, | |
| "loss": 0.1109, | |
| "step": 2156 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9040244503695093e-06, | |
| "loss": 0.1055, | |
| "step": 2157 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.9018760792197982e-06, | |
| "loss": 0.1042, | |
| "step": 2158 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8997281764887432e-06, | |
| "loss": 0.0851, | |
| "step": 2159 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8975807438584643e-06, | |
| "loss": 0.1218, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8954337830107166e-06, | |
| "loss": 0.1093, | |
| "step": 2161 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8932872956268867e-06, | |
| "loss": 0.1023, | |
| "step": 2162 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8911412833879863e-06, | |
| "loss": 0.1015, | |
| "step": 2163 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8889957479746594e-06, | |
| "loss": 0.1167, | |
| "step": 2164 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8868506910671725e-06, | |
| "loss": 0.0926, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.88470611434542e-06, | |
| "loss": 0.1037, | |
| "step": 2166 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8825620194889211e-06, | |
| "loss": 0.1138, | |
| "step": 2167 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8804184081768148e-06, | |
| "loss": 0.1103, | |
| "step": 2168 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8782752820878636e-06, | |
| "loss": 0.1247, | |
| "step": 2169 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.876132642900448e-06, | |
| "loss": 0.0938, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.873990492292569e-06, | |
| "loss": 0.1194, | |
| "step": 2171 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8718488319418454e-06, | |
| "loss": 0.1061, | |
| "step": 2172 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8697076635255092e-06, | |
| "loss": 0.1029, | |
| "step": 2173 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8675669887204106e-06, | |
| "loss": 0.1087, | |
| "step": 2174 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8654268092030104e-06, | |
| "loss": 0.1076, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8632871266493824e-06, | |
| "loss": 0.0938, | |
| "step": 2176 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8611479427352133e-06, | |
| "loss": 0.0971, | |
| "step": 2177 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8590092591357958e-06, | |
| "loss": 0.1064, | |
| "step": 2178 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8568710775260337e-06, | |
| "loss": 0.102, | |
| "step": 2179 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8547333995804352e-06, | |
| "loss": 0.0989, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8525962269731154e-06, | |
| "loss": 0.0989, | |
| "step": 2181 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8504595613777954e-06, | |
| "loss": 0.0836, | |
| "step": 2182 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.848323404467795e-06, | |
| "loss": 0.0831, | |
| "step": 2183 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8461877579160398e-06, | |
| "loss": 0.1023, | |
| "step": 2184 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8440526233950528e-06, | |
| "loss": 0.0976, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8419180025769567e-06, | |
| "loss": 0.0935, | |
| "step": 2186 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8397838971334743e-06, | |
| "loss": 0.113, | |
| "step": 2187 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.837650308735921e-06, | |
| "loss": 0.1118, | |
| "step": 2188 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8355172390552107e-06, | |
| "loss": 0.0942, | |
| "step": 2189 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8333846897618474e-06, | |
| "loss": 0.1028, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8312526625259303e-06, | |
| "loss": 0.0878, | |
| "step": 2191 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8291211590171503e-06, | |
| "loss": 0.0983, | |
| "step": 2192 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8269901809047858e-06, | |
| "loss": 0.1133, | |
| "step": 2193 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8248597298577054e-06, | |
| "loss": 0.0934, | |
| "step": 2194 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8227298075443637e-06, | |
| "loss": 0.1034, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8206004156328017e-06, | |
| "loss": 0.1183, | |
| "step": 2196 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.818471555790647e-06, | |
| "loss": 0.101, | |
| "step": 2197 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8163432296851066e-06, | |
| "loss": 0.0929, | |
| "step": 2198 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8142154389829735e-06, | |
| "loss": 0.11, | |
| "step": 2199 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8120881853506179e-06, | |
| "loss": 0.1031, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8099614704539915e-06, | |
| "loss": 0.0978, | |
| "step": 2201 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8078352959586243e-06, | |
| "loss": 0.106, | |
| "step": 2202 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.805709663529622e-06, | |
| "loss": 0.1186, | |
| "step": 2203 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8035845748316666e-06, | |
| "loss": 0.0912, | |
| "step": 2204 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8014600315290125e-06, | |
| "loss": 0.1286, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7993360352854893e-06, | |
| "loss": 0.099, | |
| "step": 2206 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7972125877644982e-06, | |
| "loss": 0.1002, | |
| "step": 2207 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7950896906290082e-06, | |
| "loss": 0.0749, | |
| "step": 2208 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7929673455415597e-06, | |
| "loss": 0.1161, | |
| "step": 2209 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7908455541642583e-06, | |
| "loss": 0.1007, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7887243181587793e-06, | |
| "loss": 0.0899, | |
| "step": 2211 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7866036391863612e-06, | |
| "loss": 0.1038, | |
| "step": 2212 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7844835189078048e-06, | |
| "loss": 0.0946, | |
| "step": 2213 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.7823639589834751e-06, | |
| "loss": 0.1091, | |
| "step": 2214 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7802449610733002e-06, | |
| "loss": 0.0899, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7781265268367634e-06, | |
| "loss": 0.095, | |
| "step": 2216 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7760086579329108e-06, | |
| "loss": 0.0857, | |
| "step": 2217 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7738913560203424e-06, | |
| "loss": 0.1072, | |
| "step": 2218 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7717746227572163e-06, | |
| "loss": 0.0873, | |
| "step": 2219 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.769658459801246e-06, | |
| "loss": 0.1005, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7675428688096962e-06, | |
| "loss": 0.1026, | |
| "step": 2221 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7654278514393846e-06, | |
| "loss": 0.1001, | |
| "step": 2222 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7633134093466791e-06, | |
| "loss": 0.1137, | |
| "step": 2223 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7611995441874977e-06, | |
| "loss": 0.106, | |
| "step": 2224 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.759086257617308e-06, | |
| "loss": 0.1191, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7569735512911212e-06, | |
| "loss": 0.112, | |
| "step": 2226 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7548614268634973e-06, | |
| "loss": 0.0827, | |
| "step": 2227 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7527498859885378e-06, | |
| "loss": 0.1051, | |
| "step": 2228 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7506389303198883e-06, | |
| "loss": 0.0985, | |
| "step": 2229 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7485285615107389e-06, | |
| "loss": 0.0924, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.746418781213815e-06, | |
| "loss": 0.1051, | |
| "step": 2231 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7443095910813848e-06, | |
| "loss": 0.0991, | |
| "step": 2232 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7422009927652528e-06, | |
| "loss": 0.084, | |
| "step": 2233 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7400929879167594e-06, | |
| "loss": 0.1045, | |
| "step": 2234 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7379855781867838e-06, | |
| "loss": 0.1115, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7358787652257342e-06, | |
| "loss": 0.1096, | |
| "step": 2236 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7337725506835554e-06, | |
| "loss": 0.0991, | |
| "step": 2237 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7316669362097205e-06, | |
| "loss": 0.0844, | |
| "step": 2238 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7295619234532344e-06, | |
| "loss": 0.1166, | |
| "step": 2239 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7274575140626318e-06, | |
| "loss": 0.1081, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7253537096859719e-06, | |
| "loss": 0.1095, | |
| "step": 2241 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7232505119708431e-06, | |
| "loss": 0.1106, | |
| "step": 2242 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7211479225643564e-06, | |
| "loss": 0.1104, | |
| "step": 2243 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7190459431131467e-06, | |
| "loss": 0.1078, | |
| "step": 2244 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.716944575263374e-06, | |
| "loss": 0.1007, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7148438206607151e-06, | |
| "loss": 0.0902, | |
| "step": 2246 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.71274368095037e-06, | |
| "loss": 0.0887, | |
| "step": 2247 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7106441577770549e-06, | |
| "loss": 0.0959, | |
| "step": 2248 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7085452527850037e-06, | |
| "loss": 0.1039, | |
| "step": 2249 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7064469676179682e-06, | |
| "loss": 0.114, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.7043493039192115e-06, | |
| "loss": 0.1068, | |
| "step": 2251 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.7022522633315125e-06, | |
| "loss": 0.1001, | |
| "step": 2252 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.7001558474971601e-06, | |
| "loss": 0.1074, | |
| "step": 2253 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6980600580579553e-06, | |
| "loss": 0.1021, | |
| "step": 2254 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6959648966552097e-06, | |
| "loss": 0.1171, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.69387036492974e-06, | |
| "loss": 0.1046, | |
| "step": 2256 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6917764645218727e-06, | |
| "loss": 0.1217, | |
| "step": 2257 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6896831970714367e-06, | |
| "loss": 0.1086, | |
| "step": 2258 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6875905642177686e-06, | |
| "loss": 0.1104, | |
| "step": 2259 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6854985675997065e-06, | |
| "loss": 0.1065, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6834072088555898e-06, | |
| "loss": 0.1011, | |
| "step": 2261 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6813164896232586e-06, | |
| "loss": 0.1036, | |
| "step": 2262 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6792264115400523e-06, | |
| "loss": 0.0982, | |
| "step": 2263 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6771369762428086e-06, | |
| "loss": 0.0886, | |
| "step": 2264 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6750481853678618e-06, | |
| "loss": 0.1234, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6729600405510405e-06, | |
| "loss": 0.1001, | |
| "step": 2266 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6708725434276684e-06, | |
| "loss": 0.1044, | |
| "step": 2267 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6687856956325605e-06, | |
| "loss": 0.1125, | |
| "step": 2268 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6666994988000253e-06, | |
| "loss": 0.1022, | |
| "step": 2269 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6646139545638613e-06, | |
| "loss": 0.0975, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6625290645573538e-06, | |
| "loss": 0.1072, | |
| "step": 2271 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6604448304132773e-06, | |
| "loss": 0.085, | |
| "step": 2272 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6583612537638935e-06, | |
| "loss": 0.1054, | |
| "step": 2273 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.656278336240947e-06, | |
| "loss": 0.0963, | |
| "step": 2274 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6541960794756683e-06, | |
| "loss": 0.0971, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6521144850987689e-06, | |
| "loss": 0.1017, | |
| "step": 2276 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6500335547404415e-06, | |
| "loss": 0.102, | |
| "step": 2277 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6479532900303614e-06, | |
| "loss": 0.1162, | |
| "step": 2278 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6458736925976784e-06, | |
| "loss": 0.0908, | |
| "step": 2279 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6437947640710238e-06, | |
| "loss": 0.1164, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6417165060785018e-06, | |
| "loss": 0.1037, | |
| "step": 2281 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.639638920247693e-06, | |
| "loss": 0.111, | |
| "step": 2282 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.637562008205653e-06, | |
| "loss": 0.1094, | |
| "step": 2283 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6354857715789066e-06, | |
| "loss": 0.1062, | |
| "step": 2284 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6334102119934523e-06, | |
| "loss": 0.0977, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.6313353310747558e-06, | |
| "loss": 0.1084, | |
| "step": 2286 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.629261130447754e-06, | |
| "loss": 0.1004, | |
| "step": 2287 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6271876117368502e-06, | |
| "loss": 0.1043, | |
| "step": 2288 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6251147765659126e-06, | |
| "loss": 0.1055, | |
| "step": 2289 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.623042626558275e-06, | |
| "loss": 0.1177, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6209711633367343e-06, | |
| "loss": 0.0891, | |
| "step": 2291 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6189003885235492e-06, | |
| "loss": 0.0936, | |
| "step": 2292 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6168303037404415e-06, | |
| "loss": 0.1004, | |
| "step": 2293 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6147609106085888e-06, | |
| "loss": 0.1037, | |
| "step": 2294 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6126922107486307e-06, | |
| "loss": 0.1071, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6106242057806608e-06, | |
| "loss": 0.0951, | |
| "step": 2296 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6085568973242304e-06, | |
| "loss": 0.104, | |
| "step": 2297 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6064902869983462e-06, | |
| "loss": 0.0982, | |
| "step": 2298 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6044243764214651e-06, | |
| "loss": 0.0974, | |
| "step": 2299 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6023591672114992e-06, | |
| "loss": 0.1009, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.6002946609858083e-06, | |
| "loss": 0.0983, | |
| "step": 2301 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5982308593612039e-06, | |
| "loss": 0.1001, | |
| "step": 2302 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5961677639539458e-06, | |
| "loss": 0.099, | |
| "step": 2303 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5941053763797388e-06, | |
| "loss": 0.1153, | |
| "step": 2304 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5920436982537358e-06, | |
| "loss": 0.0958, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5899827311905305e-06, | |
| "loss": 0.109, | |
| "step": 2306 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5879224768041645e-06, | |
| "loss": 0.1066, | |
| "step": 2307 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.585862936708118e-06, | |
| "loss": 0.0845, | |
| "step": 2308 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.583804112515312e-06, | |
| "loss": 0.1056, | |
| "step": 2309 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5817460058381088e-06, | |
| "loss": 0.1084, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5796886182883053e-06, | |
| "loss": 0.0927, | |
| "step": 2311 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.577631951477139e-06, | |
| "loss": 0.1049, | |
| "step": 2312 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5755760070152814e-06, | |
| "loss": 0.121, | |
| "step": 2313 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5735207865128376e-06, | |
| "loss": 0.1037, | |
| "step": 2314 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5714662915793465e-06, | |
| "loss": 0.0933, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.569412523823778e-06, | |
| "loss": 0.0931, | |
| "step": 2316 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5673594848545343e-06, | |
| "loss": 0.108, | |
| "step": 2317 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5653071762794453e-06, | |
| "loss": 0.1108, | |
| "step": 2318 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.563255599705769e-06, | |
| "loss": 0.0919, | |
| "step": 2319 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5612047567401906e-06, | |
| "loss": 0.0918, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5591546489888193e-06, | |
| "loss": 0.1074, | |
| "step": 2321 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5571052780571918e-06, | |
| "loss": 0.0967, | |
| "step": 2322 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5550566455502655e-06, | |
| "loss": 0.0955, | |
| "step": 2323 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.5530087530724187e-06, | |
| "loss": 0.1005, | |
| "step": 2324 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5509616022274524e-06, | |
| "loss": 0.0908, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5489151946185837e-06, | |
| "loss": 0.0953, | |
| "step": 2326 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5468695318484516e-06, | |
| "loss": 0.1, | |
| "step": 2327 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5448246155191093e-06, | |
| "loss": 0.1076, | |
| "step": 2328 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5427804472320252e-06, | |
| "loss": 0.1033, | |
| "step": 2329 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.540737028588082e-06, | |
| "loss": 0.1182, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5386943611875782e-06, | |
| "loss": 0.1075, | |
| "step": 2331 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5366524466302193e-06, | |
| "loss": 0.1096, | |
| "step": 2332 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5346112865151252e-06, | |
| "loss": 0.1391, | |
| "step": 2333 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5325708824408218e-06, | |
| "loss": 0.0902, | |
| "step": 2334 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5305312360052444e-06, | |
| "loss": 0.1067, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5284923488057372e-06, | |
| "loss": 0.1078, | |
| "step": 2336 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.526454222439045e-06, | |
| "loss": 0.0875, | |
| "step": 2337 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5244168585013215e-06, | |
| "loss": 0.1009, | |
| "step": 2338 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5223802585881187e-06, | |
| "loss": 0.1149, | |
| "step": 2339 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.520344424294394e-06, | |
| "loss": 0.0938, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.518309357214505e-06, | |
| "loss": 0.1052, | |
| "step": 2341 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5162750589422053e-06, | |
| "loss": 0.1126, | |
| "step": 2342 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5142415310706504e-06, | |
| "loss": 0.1122, | |
| "step": 2343 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.512208775192389e-06, | |
| "loss": 0.1002, | |
| "step": 2344 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5101767928993665e-06, | |
| "loss": 0.0971, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5081455857829247e-06, | |
| "loss": 0.1119, | |
| "step": 2346 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5061151554337949e-06, | |
| "loss": 0.0977, | |
| "step": 2347 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.504085503442102e-06, | |
| "loss": 0.1119, | |
| "step": 2348 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.50205663139736e-06, | |
| "loss": 0.0942, | |
| "step": 2349 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5000285408884734e-06, | |
| "loss": 0.0954, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4980012335037352e-06, | |
| "loss": 0.1357, | |
| "step": 2351 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4959747108308232e-06, | |
| "loss": 0.0967, | |
| "step": 2352 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4939489744568015e-06, | |
| "loss": 0.1092, | |
| "step": 2353 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4919240259681179e-06, | |
| "loss": 0.0955, | |
| "step": 2354 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4898998669506046e-06, | |
| "loss": 0.117, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4878764989894752e-06, | |
| "loss": 0.1187, | |
| "step": 2356 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4858539236693215e-06, | |
| "loss": 0.0947, | |
| "step": 2357 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.483832142574118e-06, | |
| "loss": 0.1085, | |
| "step": 2358 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4818111572872137e-06, | |
| "loss": 0.0865, | |
| "step": 2359 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.4797909693913377e-06, | |
| "loss": 0.0946, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.477771580468593e-06, | |
| "loss": 0.0958, | |
| "step": 2361 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.475752992100456e-06, | |
| "loss": 0.0934, | |
| "step": 2362 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4737352058677781e-06, | |
| "loss": 0.108, | |
| "step": 2363 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4717182233507798e-06, | |
| "loss": 0.1024, | |
| "step": 2364 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4697020461290562e-06, | |
| "loss": 0.1166, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4676866757815683e-06, | |
| "loss": 0.0953, | |
| "step": 2366 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4656721138866465e-06, | |
| "loss": 0.1124, | |
| "step": 2367 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.463658362021988e-06, | |
| "loss": 0.1053, | |
| "step": 2368 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4616454217646542e-06, | |
| "loss": 0.1095, | |
| "step": 2369 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4596332946910739e-06, | |
| "loss": 0.107, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4576219823770372e-06, | |
| "loss": 0.1098, | |
| "step": 2371 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.455611486397695e-06, | |
| "loss": 0.0836, | |
| "step": 2372 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4536018083275622e-06, | |
| "loss": 0.1019, | |
| "step": 2373 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.45159294974051e-06, | |
| "loss": 0.1063, | |
| "step": 2374 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4495849122097683e-06, | |
| "loss": 0.1129, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.447577697307926e-06, | |
| "loss": 0.1099, | |
| "step": 2376 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4455713066069248e-06, | |
| "loss": 0.109, | |
| "step": 2377 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4435657416780647e-06, | |
| "loss": 0.1115, | |
| "step": 2378 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4415610040919955e-06, | |
| "loss": 0.1095, | |
| "step": 2379 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.43955709541872e-06, | |
| "loss": 0.0878, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4375540172275937e-06, | |
| "loss": 0.1142, | |
| "step": 2381 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4355517710873184e-06, | |
| "loss": 0.0916, | |
| "step": 2382 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4335503585659483e-06, | |
| "loss": 0.1077, | |
| "step": 2383 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4315497812308815e-06, | |
| "loss": 0.0889, | |
| "step": 2384 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4295500406488623e-06, | |
| "loss": 0.0925, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.427551138385982e-06, | |
| "loss": 0.1116, | |
| "step": 2386 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4255530760076724e-06, | |
| "loss": 0.0881, | |
| "step": 2387 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4235558550787112e-06, | |
| "loss": 0.1176, | |
| "step": 2388 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4215594771632139e-06, | |
| "loss": 0.1165, | |
| "step": 2389 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4195639438246356e-06, | |
| "loss": 0.0962, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4175692566257732e-06, | |
| "loss": 0.1077, | |
| "step": 2391 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4155754171287572e-06, | |
| "loss": 0.1057, | |
| "step": 2392 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4135824268950581e-06, | |
| "loss": 0.0909, | |
| "step": 2393 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4115902874854782e-06, | |
| "loss": 0.0921, | |
| "step": 2394 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4095990004601532e-06, | |
| "loss": 0.0951, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4076085673785546e-06, | |
| "loss": 0.1097, | |
| "step": 2396 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.4056189897994815e-06, | |
| "loss": 0.1164, | |
| "step": 2397 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.4036302692810667e-06, | |
| "loss": 0.119, | |
| "step": 2398 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.4016424073807682e-06, | |
| "loss": 0.1054, | |
| "step": 2399 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3996554056553723e-06, | |
| "loss": 0.118, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3976692656609942e-06, | |
| "loss": 0.1174, | |
| "step": 2401 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.395683988953071e-06, | |
| "loss": 0.0984, | |
| "step": 2402 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.393699577086367e-06, | |
| "loss": 0.0965, | |
| "step": 2403 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.391716031614966e-06, | |
| "loss": 0.1012, | |
| "step": 2404 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3897333540922744e-06, | |
| "loss": 0.1173, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3877515460710206e-06, | |
| "loss": 0.109, | |
| "step": 2406 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3857706091032488e-06, | |
| "loss": 0.1027, | |
| "step": 2407 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.383790544740325e-06, | |
| "loss": 0.1238, | |
| "step": 2408 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3818113545329285e-06, | |
| "loss": 0.1112, | |
| "step": 2409 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3798330400310538e-06, | |
| "loss": 0.0984, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.377855602784014e-06, | |
| "loss": 0.0994, | |
| "step": 2411 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3758790443404294e-06, | |
| "loss": 0.1002, | |
| "step": 2412 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.373903366248236e-06, | |
| "loss": 0.0932, | |
| "step": 2413 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3719285700546809e-06, | |
| "loss": 0.1123, | |
| "step": 2414 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.369954657306315e-06, | |
| "loss": 0.1015, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3679816295490044e-06, | |
| "loss": 0.1123, | |
| "step": 2416 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3660094883279168e-06, | |
| "loss": 0.1063, | |
| "step": 2417 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3640382351875287e-06, | |
| "loss": 0.1023, | |
| "step": 2418 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3620678716716218e-06, | |
| "loss": 0.1016, | |
| "step": 2419 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3600983993232758e-06, | |
| "loss": 0.1264, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3581298196848785e-06, | |
| "loss": 0.107, | |
| "step": 2421 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3561621342981153e-06, | |
| "loss": 0.1041, | |
| "step": 2422 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3541953447039716e-06, | |
| "loss": 0.1112, | |
| "step": 2423 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3522294524427349e-06, | |
| "loss": 0.0908, | |
| "step": 2424 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.350264459053982e-06, | |
| "loss": 0.095, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3483003660765948e-06, | |
| "loss": 0.0923, | |
| "step": 2426 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3463371750487425e-06, | |
| "loss": 0.1116, | |
| "step": 2427 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3443748875078933e-06, | |
| "loss": 0.0989, | |
| "step": 2428 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3424135049908072e-06, | |
| "loss": 0.1188, | |
| "step": 2429 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3404530290335305e-06, | |
| "loss": 0.0866, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3384934611714055e-06, | |
| "loss": 0.108, | |
| "step": 2431 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3365348029390593e-06, | |
| "loss": 0.0929, | |
| "step": 2432 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.3345770558704094e-06, | |
| "loss": 0.1001, | |
| "step": 2433 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.33262022149866e-06, | |
| "loss": 0.1105, | |
| "step": 2434 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.330664301356296e-06, | |
| "loss": 0.0975, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3287092969750913e-06, | |
| "loss": 0.1056, | |
| "step": 2436 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3267552098860998e-06, | |
| "loss": 0.1025, | |
| "step": 2437 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3248020416196583e-06, | |
| "loss": 0.1081, | |
| "step": 2438 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3228497937053846e-06, | |
| "loss": 0.1149, | |
| "step": 2439 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3208984676721742e-06, | |
| "loss": 0.105, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3189480650482012e-06, | |
| "loss": 0.106, | |
| "step": 2441 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3169985873609153e-06, | |
| "loss": 0.0988, | |
| "step": 2442 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3150500361370444e-06, | |
| "loss": 0.0979, | |
| "step": 2443 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.313102412902591e-06, | |
| "loss": 0.0895, | |
| "step": 2444 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3111557191828272e-06, | |
| "loss": 0.109, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3092099565023003e-06, | |
| "loss": 0.102, | |
| "step": 2446 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3072651263848268e-06, | |
| "loss": 0.1036, | |
| "step": 2447 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3053212303534935e-06, | |
| "loss": 0.1104, | |
| "step": 2448 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3033782699306577e-06, | |
| "loss": 0.0994, | |
| "step": 2449 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.3014362466379407e-06, | |
| "loss": 0.1095, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2994951619962303e-06, | |
| "loss": 0.1081, | |
| "step": 2451 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.297555017525682e-06, | |
| "loss": 0.1002, | |
| "step": 2452 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2956158147457116e-06, | |
| "loss": 0.097, | |
| "step": 2453 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2936775551750002e-06, | |
| "loss": 0.1023, | |
| "step": 2454 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2917402403314888e-06, | |
| "loss": 0.1001, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2898038717323774e-06, | |
| "loss": 0.1027, | |
| "step": 2456 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.287868450894128e-06, | |
| "loss": 0.0984, | |
| "step": 2457 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2859339793324572e-06, | |
| "loss": 0.092, | |
| "step": 2458 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.284000458562341e-06, | |
| "loss": 0.0984, | |
| "step": 2459 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2820678900980093e-06, | |
| "loss": 0.1117, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2801362754529445e-06, | |
| "loss": 0.0973, | |
| "step": 2461 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2782056161398862e-06, | |
| "loss": 0.0987, | |
| "step": 2462 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2762759136708214e-06, | |
| "loss": 0.1039, | |
| "step": 2463 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.274347169556992e-06, | |
| "loss": 0.1073, | |
| "step": 2464 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.272419385308886e-06, | |
| "loss": 0.105, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2704925624362398e-06, | |
| "loss": 0.113, | |
| "step": 2466 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2685667024480403e-06, | |
| "loss": 0.0975, | |
| "step": 2467 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.2666418068525161e-06, | |
| "loss": 0.1126, | |
| "step": 2468 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.264717877157144e-06, | |
| "loss": 0.1046, | |
| "step": 2469 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.262794914868642e-06, | |
| "loss": 0.0998, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.26087292149297e-06, | |
| "loss": 0.0974, | |
| "step": 2471 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.258951898535333e-06, | |
| "loss": 0.1138, | |
| "step": 2472 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2570318475001708e-06, | |
| "loss": 0.0984, | |
| "step": 2473 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2551127698911675e-06, | |
| "loss": 0.1103, | |
| "step": 2474 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.25319466721124e-06, | |
| "loss": 0.0879, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2512775409625433e-06, | |
| "loss": 0.1151, | |
| "step": 2476 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2493613926464695e-06, | |
| "loss": 0.1072, | |
| "step": 2477 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2474462237636425e-06, | |
| "loss": 0.1021, | |
| "step": 2478 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2455320358139212e-06, | |
| "loss": 0.0946, | |
| "step": 2479 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2436188302963946e-06, | |
| "loss": 0.0883, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2417066087093818e-06, | |
| "loss": 0.0981, | |
| "step": 2481 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2397953725504341e-06, | |
| "loss": 0.0982, | |
| "step": 2482 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2378851233163282e-06, | |
| "loss": 0.0991, | |
| "step": 2483 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2359758625030704e-06, | |
| "loss": 0.1153, | |
| "step": 2484 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2340675916058907e-06, | |
| "loss": 0.1189, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2321603121192443e-06, | |
| "loss": 0.0881, | |
| "step": 2486 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.230254025536812e-06, | |
| "loss": 0.1103, | |
| "step": 2487 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2283487333514937e-06, | |
| "loss": 0.0823, | |
| "step": 2488 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2264444370554141e-06, | |
| "loss": 0.0898, | |
| "step": 2489 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.224541138139915e-06, | |
| "loss": 0.1169, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2226388380955576e-06, | |
| "loss": 0.1153, | |
| "step": 2491 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2207375384121235e-06, | |
| "loss": 0.0937, | |
| "step": 2492 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2188372405786068e-06, | |
| "loss": 0.1244, | |
| "step": 2493 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2169379460832211e-06, | |
| "loss": 0.088, | |
| "step": 2494 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2150396564133911e-06, | |
| "loss": 0.1034, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2131423730557555e-06, | |
| "loss": 0.0948, | |
| "step": 2496 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.211246097496166e-06, | |
| "loss": 0.1095, | |
| "step": 2497 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2093508312196825e-06, | |
| "loss": 0.096, | |
| "step": 2498 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2074565757105785e-06, | |
| "loss": 0.1052, | |
| "step": 2499 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2055633324523324e-06, | |
| "loss": 0.1042, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2036711029276303e-06, | |
| "loss": 0.1156, | |
| "step": 2501 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.2017798886183666e-06, | |
| "loss": 0.1051, | |
| "step": 2502 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.1998896910056373e-06, | |
| "loss": 0.105, | |
| "step": 2503 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.1980005115697462e-06, | |
| "loss": 0.092, | |
| "step": 2504 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.1961123517901965e-06, | |
| "loss": 0.1072, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.194225213145693e-06, | |
| "loss": 0.0914, | |
| "step": 2506 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.1923390971141435e-06, | |
| "loss": 0.1004, | |
| "step": 2507 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1904540051726516e-06, | |
| "loss": 0.1251, | |
| "step": 2508 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1885699387975208e-06, | |
| "loss": 0.0898, | |
| "step": 2509 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1866868994642535e-06, | |
| "loss": 0.1102, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1848048886475417e-06, | |
| "loss": 0.1152, | |
| "step": 2511 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1829239078212778e-06, | |
| "loss": 0.1158, | |
| "step": 2512 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1810439584585443e-06, | |
| "loss": 0.1158, | |
| "step": 2513 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1791650420316169e-06, | |
| "loss": 0.1286, | |
| "step": 2514 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1772871600119651e-06, | |
| "loss": 0.0921, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1754103138702413e-06, | |
| "loss": 0.1209, | |
| "step": 2516 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1735345050762944e-06, | |
| "loss": 0.0989, | |
| "step": 2517 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1716597350991553e-06, | |
| "loss": 0.1046, | |
| "step": 2518 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1697860054070442e-06, | |
| "loss": 0.107, | |
| "step": 2519 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1679133174673684e-06, | |
| "loss": 0.0851, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1660416727467128e-06, | |
| "loss": 0.1059, | |
| "step": 2521 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1641710727108524e-06, | |
| "loss": 0.1142, | |
| "step": 2522 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.162301518824739e-06, | |
| "loss": 0.112, | |
| "step": 2523 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.160433012552508e-06, | |
| "loss": 0.1102, | |
| "step": 2524 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1585655553574758e-06, | |
| "loss": 0.1236, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.156699148702131e-06, | |
| "loss": 0.1055, | |
| "step": 2526 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1548337940481463e-06, | |
| "loss": 0.0918, | |
| "step": 2527 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1529694928563657e-06, | |
| "loss": 0.0971, | |
| "step": 2528 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1511062465868114e-06, | |
| "loss": 0.0861, | |
| "step": 2529 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1492440566986794e-06, | |
| "loss": 0.1031, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1473829246503342e-06, | |
| "loss": 0.1049, | |
| "step": 2531 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1455228518993174e-06, | |
| "loss": 0.1068, | |
| "step": 2532 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1436638399023363e-06, | |
| "loss": 0.0931, | |
| "step": 2533 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1418058901152712e-06, | |
| "loss": 0.0967, | |
| "step": 2534 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1399490039931693e-06, | |
| "loss": 0.0912, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1380931829902436e-06, | |
| "loss": 0.1123, | |
| "step": 2536 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.136238428559874e-06, | |
| "loss": 0.0876, | |
| "step": 2537 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1343847421546043e-06, | |
| "loss": 0.0959, | |
| "step": 2538 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1325321252261429e-06, | |
| "loss": 0.0983, | |
| "step": 2539 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1306805792253614e-06, | |
| "loss": 0.0945, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1288301056022905e-06, | |
| "loss": 0.1181, | |
| "step": 2541 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1269807058061225e-06, | |
| "loss": 0.1058, | |
| "step": 2542 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.125132381285207e-06, | |
| "loss": 0.0964, | |
| "step": 2543 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.1232851334870542e-06, | |
| "loss": 0.1071, | |
| "step": 2544 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1214389638583306e-06, | |
| "loss": 0.1204, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1195938738448567e-06, | |
| "loss": 0.0992, | |
| "step": 2546 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1177498648916078e-06, | |
| "loss": 0.1053, | |
| "step": 2547 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1159069384427123e-06, | |
| "loss": 0.0985, | |
| "step": 2548 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1140650959414525e-06, | |
| "loss": 0.0892, | |
| "step": 2549 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1122243388302622e-06, | |
| "loss": 0.0983, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1103846685507227e-06, | |
| "loss": 0.1081, | |
| "step": 2551 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1085460865435653e-06, | |
| "loss": 0.1028, | |
| "step": 2552 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1067085942486683e-06, | |
| "loss": 0.0876, | |
| "step": 2553 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.104872193105058e-06, | |
| "loss": 0.1004, | |
| "step": 2554 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1030368845509065e-06, | |
| "loss": 0.1026, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.1012026700235288e-06, | |
| "loss": 0.1086, | |
| "step": 2556 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.099369550959383e-06, | |
| "loss": 0.1013, | |
| "step": 2557 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0975375287940693e-06, | |
| "loss": 0.0969, | |
| "step": 2558 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0957066049623302e-06, | |
| "loss": 0.1165, | |
| "step": 2559 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0938767808980485e-06, | |
| "loss": 0.1109, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0920480580342431e-06, | |
| "loss": 0.1168, | |
| "step": 2561 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0902204378030722e-06, | |
| "loss": 0.1084, | |
| "step": 2562 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0883939216358296e-06, | |
| "loss": 0.097, | |
| "step": 2563 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0865685109629454e-06, | |
| "loss": 0.094, | |
| "step": 2564 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0847442072139846e-06, | |
| "loss": 0.1059, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0829210118176436e-06, | |
| "loss": 0.098, | |
| "step": 2566 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0810989262017504e-06, | |
| "loss": 0.1058, | |
| "step": 2567 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.079277951793267e-06, | |
| "loss": 0.1062, | |
| "step": 2568 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.077458090018281e-06, | |
| "loss": 0.1166, | |
| "step": 2569 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0756393423020129e-06, | |
| "loss": 0.1, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0738217100688072e-06, | |
| "loss": 0.0886, | |
| "step": 2571 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0720051947421362e-06, | |
| "loss": 0.1116, | |
| "step": 2572 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0701897977445986e-06, | |
| "loss": 0.0982, | |
| "step": 2573 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0683755204979147e-06, | |
| "loss": 0.1109, | |
| "step": 2574 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.066562364422931e-06, | |
| "loss": 0.0948, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0647503309396132e-06, | |
| "loss": 0.1195, | |
| "step": 2576 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0629394214670489e-06, | |
| "loss": 0.108, | |
| "step": 2577 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0611296374234465e-06, | |
| "loss": 0.1001, | |
| "step": 2578 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0593209802261305e-06, | |
| "loss": 0.0928, | |
| "step": 2579 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.0575134512915464e-06, | |
| "loss": 0.1151, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0557070520352534e-06, | |
| "loss": 0.107, | |
| "step": 2581 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0539017838719253e-06, | |
| "loss": 0.1057, | |
| "step": 2582 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0520976482153539e-06, | |
| "loss": 0.0909, | |
| "step": 2583 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0502946464784393e-06, | |
| "loss": 0.1182, | |
| "step": 2584 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0484927800731983e-06, | |
| "loss": 0.0763, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0466920504107555e-06, | |
| "loss": 0.0838, | |
| "step": 2586 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0448924589013448e-06, | |
| "loss": 0.1024, | |
| "step": 2587 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0430940069543117e-06, | |
| "loss": 0.1147, | |
| "step": 2588 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0412966959781059e-06, | |
| "loss": 0.1046, | |
| "step": 2589 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0395005273802872e-06, | |
| "loss": 0.1011, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0377055025675177e-06, | |
| "loss": 0.0864, | |
| "step": 2591 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0359116229455638e-06, | |
| "loss": 0.105, | |
| "step": 2592 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0341188899192978e-06, | |
| "loss": 0.1058, | |
| "step": 2593 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0323273048926903e-06, | |
| "loss": 0.1015, | |
| "step": 2594 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0305368692688175e-06, | |
| "loss": 0.0965, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.028747584449851e-06, | |
| "loss": 0.0955, | |
| "step": 2596 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0269594518370624e-06, | |
| "loss": 0.1209, | |
| "step": 2597 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0251724728308235e-06, | |
| "loss": 0.1048, | |
| "step": 2598 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0233866488305985e-06, | |
| "loss": 0.0976, | |
| "step": 2599 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0216019812349508e-06, | |
| "loss": 0.0913, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0198184714415366e-06, | |
| "loss": 0.0952, | |
| "step": 2601 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0180361208471034e-06, | |
| "loss": 0.097, | |
| "step": 2602 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0162549308474953e-06, | |
| "loss": 0.102, | |
| "step": 2603 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.014474902837643e-06, | |
| "loss": 0.1083, | |
| "step": 2604 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0126960382115711e-06, | |
| "loss": 0.099, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0109183383623899e-06, | |
| "loss": 0.0961, | |
| "step": 2606 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0091418046822984e-06, | |
| "loss": 0.1015, | |
| "step": 2607 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0073664385625846e-06, | |
| "loss": 0.1054, | |
| "step": 2608 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0055922413936178e-06, | |
| "loss": 0.109, | |
| "step": 2609 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0038192145648567e-06, | |
| "loss": 0.0967, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0020473594648403e-06, | |
| "loss": 0.1165, | |
| "step": 2611 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.0002766774811892e-06, | |
| "loss": 0.0925, | |
| "step": 2612 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.985071700006088e-07, | |
| "loss": 0.1044, | |
| "step": 2613 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.96738838408881e-07, | |
| "loss": 0.1104, | |
| "step": 2614 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.949716840908705e-07, | |
| "loss": 0.0984, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.932057084305168e-07, | |
| "loss": 0.0964, | |
| "step": 2616 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 9.914409128108368e-07, | |
| "loss": 0.0886, | |
| "step": 2617 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.896772986139255e-07, | |
| "loss": 0.091, | |
| "step": 2618 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.8791486722095e-07, | |
| "loss": 0.1245, | |
| "step": 2619 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.861536200121535e-07, | |
| "loss": 0.1152, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.843935583668502e-07, | |
| "loss": 0.0961, | |
| "step": 2621 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.826346836634246e-07, | |
| "loss": 0.1227, | |
| "step": 2622 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.808769972793359e-07, | |
| "loss": 0.0956, | |
| "step": 2623 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.791205005911076e-07, | |
| "loss": 0.104, | |
| "step": 2624 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.77365194974335e-07, | |
| "loss": 0.105, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.756110818036815e-07, | |
| "loss": 0.0949, | |
| "step": 2626 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.73858162452871e-07, | |
| "loss": 0.1065, | |
| "step": 2627 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.721064382946987e-07, | |
| "loss": 0.1058, | |
| "step": 2628 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.7035591070102e-07, | |
| "loss": 0.0996, | |
| "step": 2629 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.686065810427548e-07, | |
| "loss": 0.1008, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.668584506898853e-07, | |
| "loss": 0.0951, | |
| "step": 2631 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.651115210114534e-07, | |
| "loss": 0.1023, | |
| "step": 2632 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.633657933755603e-07, | |
| "loss": 0.1134, | |
| "step": 2633 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.61621269149366e-07, | |
| "loss": 0.1133, | |
| "step": 2634 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.598779496990892e-07, | |
| "loss": 0.1055, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.581358363900053e-07, | |
| "loss": 0.0967, | |
| "step": 2636 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.563949305864436e-07, | |
| "loss": 0.0977, | |
| "step": 2637 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.546552336517883e-07, | |
| "loss": 0.1124, | |
| "step": 2638 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.529167469484762e-07, | |
| "loss": 0.1055, | |
| "step": 2639 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.511794718379977e-07, | |
| "loss": 0.0984, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.494434096808946e-07, | |
| "loss": 0.1182, | |
| "step": 2641 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.47708561836757e-07, | |
| "loss": 0.1068, | |
| "step": 2642 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.459749296642254e-07, | |
| "loss": 0.0941, | |
| "step": 2643 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.442425145209863e-07, | |
| "loss": 0.0875, | |
| "step": 2644 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.425113177637757e-07, | |
| "loss": 0.1234, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.407813407483751e-07, | |
| "loss": 0.1109, | |
| "step": 2646 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.390525848296093e-07, | |
| "loss": 0.1017, | |
| "step": 2647 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.373250513613468e-07, | |
| "loss": 0.1331, | |
| "step": 2648 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.355987416964993e-07, | |
| "loss": 0.1343, | |
| "step": 2649 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.338736571870205e-07, | |
| "loss": 0.113, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.321497991839051e-07, | |
| "loss": 0.1149, | |
| "step": 2651 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.304271690371863e-07, | |
| "loss": 0.0949, | |
| "step": 2652 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.287057680959352e-07, | |
| "loss": 0.0959, | |
| "step": 2653 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 9.269855977082604e-07, | |
| "loss": 0.1083, | |
| "step": 2654 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.252666592213077e-07, | |
| "loss": 0.1109, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.235489539812592e-07, | |
| "loss": 0.0906, | |
| "step": 2656 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.21832483333329e-07, | |
| "loss": 0.1006, | |
| "step": 2657 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.201172486217641e-07, | |
| "loss": 0.0853, | |
| "step": 2658 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.184032511898447e-07, | |
| "loss": 0.1045, | |
| "step": 2659 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.166904923798822e-07, | |
| "loss": 0.087, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.14978973533219e-07, | |
| "loss": 0.1122, | |
| "step": 2661 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.132686959902237e-07, | |
| "loss": 0.09, | |
| "step": 2662 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.115596610902943e-07, | |
| "loss": 0.1078, | |
| "step": 2663 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.09851870171855e-07, | |
| "loss": 0.0961, | |
| "step": 2664 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.081453245723568e-07, | |
| "loss": 0.1114, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.064400256282757e-07, | |
| "loss": 0.113, | |
| "step": 2666 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.047359746751103e-07, | |
| "loss": 0.1057, | |
| "step": 2667 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.030331730473813e-07, | |
| "loss": 0.1138, | |
| "step": 2668 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 9.013316220786316e-07, | |
| "loss": 0.1089, | |
| "step": 2669 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.996313231014256e-07, | |
| "loss": 0.0989, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.979322774473473e-07, | |
| "loss": 0.1099, | |
| "step": 2671 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.962344864469974e-07, | |
| "loss": 0.0901, | |
| "step": 2672 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.945379514299951e-07, | |
| "loss": 0.1181, | |
| "step": 2673 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.928426737249748e-07, | |
| "loss": 0.0945, | |
| "step": 2674 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.911486546595888e-07, | |
| "loss": 0.1125, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.89455895560502e-07, | |
| "loss": 0.0959, | |
| "step": 2676 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.877643977533932e-07, | |
| "loss": 0.1089, | |
| "step": 2677 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.860741625629518e-07, | |
| "loss": 0.1092, | |
| "step": 2678 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.843851913128793e-07, | |
| "loss": 0.0919, | |
| "step": 2679 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.826974853258885e-07, | |
| "loss": 0.0976, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.810110459237009e-07, | |
| "loss": 0.0959, | |
| "step": 2681 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.793258744270453e-07, | |
| "loss": 0.114, | |
| "step": 2682 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.776419721556575e-07, | |
| "loss": 0.1184, | |
| "step": 2683 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.75959340428279e-07, | |
| "loss": 0.118, | |
| "step": 2684 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.742779805626575e-07, | |
| "loss": 0.1091, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.725978938755452e-07, | |
| "loss": 0.093, | |
| "step": 2686 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.709190816826954e-07, | |
| "loss": 0.117, | |
| "step": 2687 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.692415452988629e-07, | |
| "loss": 0.0981, | |
| "step": 2688 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.675652860378062e-07, | |
| "loss": 0.0961, | |
| "step": 2689 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 8.658903052122799e-07, | |
| "loss": 0.1007, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.642166041340416e-07, | |
| "loss": 0.0976, | |
| "step": 2691 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.625441841138432e-07, | |
| "loss": 0.1068, | |
| "step": 2692 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.608730464614339e-07, | |
| "loss": 0.0989, | |
| "step": 2693 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.592031924855609e-07, | |
| "loss": 0.0964, | |
| "step": 2694 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.57534623493963e-07, | |
| "loss": 0.104, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.558673407933757e-07, | |
| "loss": 0.105, | |
| "step": 2696 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.542013456895249e-07, | |
| "loss": 0.0943, | |
| "step": 2697 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.525366394871276e-07, | |
| "loss": 0.095, | |
| "step": 2698 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.508732234898948e-07, | |
| "loss": 0.1221, | |
| "step": 2699 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.492110990005228e-07, | |
| "loss": 0.1027, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.475502673207006e-07, | |
| "loss": 0.0975, | |
| "step": 2701 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.458907297511018e-07, | |
| "loss": 0.1184, | |
| "step": 2702 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.442324875913866e-07, | |
| "loss": 0.1048, | |
| "step": 2703 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.425755421402029e-07, | |
| "loss": 0.0892, | |
| "step": 2704 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.409198946951807e-07, | |
| "loss": 0.107, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.392655465529356e-07, | |
| "loss": 0.1094, | |
| "step": 2706 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.37612499009064e-07, | |
| "loss": 0.0976, | |
| "step": 2707 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.359607533581435e-07, | |
| "loss": 0.1131, | |
| "step": 2708 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.343103108937342e-07, | |
| "loss": 0.104, | |
| "step": 2709 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.32661172908373e-07, | |
| "loss": 0.0953, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.310133406935783e-07, | |
| "loss": 0.1116, | |
| "step": 2711 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.293668155398432e-07, | |
| "loss": 0.1025, | |
| "step": 2712 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.277215987366372e-07, | |
| "loss": 0.09, | |
| "step": 2713 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.260776915724075e-07, | |
| "loss": 0.1122, | |
| "step": 2714 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.244350953345731e-07, | |
| "loss": 0.1129, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.227938113095288e-07, | |
| "loss": 0.1065, | |
| "step": 2716 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.211538407826397e-07, | |
| "loss": 0.1007, | |
| "step": 2717 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.195151850382425e-07, | |
| "loss": 0.1276, | |
| "step": 2718 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.178778453596456e-07, | |
| "loss": 0.1325, | |
| "step": 2719 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.16241823029125e-07, | |
| "loss": 0.1109, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.146071193279268e-07, | |
| "loss": 0.0908, | |
| "step": 2721 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.129737355362632e-07, | |
| "loss": 0.092, | |
| "step": 2722 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.113416729333118e-07, | |
| "loss": 0.1175, | |
| "step": 2723 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.097109327972185e-07, | |
| "loss": 0.114, | |
| "step": 2724 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.080815164050903e-07, | |
| "loss": 0.1249, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.064534250330003e-07, | |
| "loss": 0.1078, | |
| "step": 2726 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 8.048266599559809e-07, | |
| "loss": 0.0848, | |
| "step": 2727 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 8.032012224480293e-07, | |
| "loss": 0.0942, | |
| "step": 2728 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 8.015771137821002e-07, | |
| "loss": 0.1262, | |
| "step": 2729 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.999543352301082e-07, | |
| "loss": 0.0945, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.983328880629276e-07, | |
| "loss": 0.0977, | |
| "step": 2731 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.967127735503879e-07, | |
| "loss": 0.1026, | |
| "step": 2732 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.950939929612777e-07, | |
| "loss": 0.0949, | |
| "step": 2733 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.934765475633388e-07, | |
| "loss": 0.1018, | |
| "step": 2734 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.918604386232665e-07, | |
| "loss": 0.0896, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.902456674067127e-07, | |
| "loss": 0.1144, | |
| "step": 2736 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.886322351782782e-07, | |
| "loss": 0.0938, | |
| "step": 2737 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.870201432015187e-07, | |
| "loss": 0.1196, | |
| "step": 2738 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.854093927389373e-07, | |
| "loss": 0.111, | |
| "step": 2739 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.837999850519867e-07, | |
| "loss": 0.1041, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.821919214010707e-07, | |
| "loss": 0.0989, | |
| "step": 2741 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.805852030455371e-07, | |
| "loss": 0.1184, | |
| "step": 2742 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.789798312436833e-07, | |
| "loss": 0.0766, | |
| "step": 2743 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.773758072527498e-07, | |
| "loss": 0.1116, | |
| "step": 2744 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.757731323289216e-07, | |
| "loss": 0.1214, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.741718077273288e-07, | |
| "loss": 0.1129, | |
| "step": 2746 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.725718347020439e-07, | |
| "loss": 0.1255, | |
| "step": 2747 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.709732145060795e-07, | |
| "loss": 0.1075, | |
| "step": 2748 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.693759483913892e-07, | |
| "loss": 0.106, | |
| "step": 2749 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.677800376088657e-07, | |
| "loss": 0.1145, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.661854834083415e-07, | |
| "loss": 0.1102, | |
| "step": 2751 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.645922870385875e-07, | |
| "loss": 0.1029, | |
| "step": 2752 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.630004497473085e-07, | |
| "loss": 0.0957, | |
| "step": 2753 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.614099727811464e-07, | |
| "loss": 0.0975, | |
| "step": 2754 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.598208573856769e-07, | |
| "loss": 0.0881, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.582331048054111e-07, | |
| "loss": 0.1125, | |
| "step": 2756 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.566467162837929e-07, | |
| "loss": 0.0943, | |
| "step": 2757 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.550616930631957e-07, | |
| "loss": 0.1012, | |
| "step": 2758 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.534780363849248e-07, | |
| "loss": 0.1054, | |
| "step": 2759 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.51895747489215e-07, | |
| "loss": 0.0887, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.503148276152309e-07, | |
| "loss": 0.1001, | |
| "step": 2761 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.487352780010651e-07, | |
| "loss": 0.1004, | |
| "step": 2762 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 7.471570998837355e-07, | |
| "loss": 0.0965, | |
| "step": 2763 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.455802944991872e-07, | |
| "loss": 0.106, | |
| "step": 2764 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.440048630822882e-07, | |
| "loss": 0.1003, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.42430806866834e-07, | |
| "loss": 0.1329, | |
| "step": 2766 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.408581270855411e-07, | |
| "loss": 0.0993, | |
| "step": 2767 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.392868249700478e-07, | |
| "loss": 0.0999, | |
| "step": 2768 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.377169017509134e-07, | |
| "loss": 0.1096, | |
| "step": 2769 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.361483586576176e-07, | |
| "loss": 0.1167, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.345811969185598e-07, | |
| "loss": 0.1019, | |
| "step": 2771 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.330154177610582e-07, | |
| "loss": 0.1023, | |
| "step": 2772 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.314510224113466e-07, | |
| "loss": 0.1004, | |
| "step": 2773 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.298880120945756e-07, | |
| "loss": 0.116, | |
| "step": 2774 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.283263880348102e-07, | |
| "loss": 0.0994, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.26766151455032e-07, | |
| "loss": 0.09, | |
| "step": 2776 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.252073035771354e-07, | |
| "loss": 0.0976, | |
| "step": 2777 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.23649845621926e-07, | |
| "loss": 0.0921, | |
| "step": 2778 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.220937788091212e-07, | |
| "loss": 0.1044, | |
| "step": 2779 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.20539104357349e-07, | |
| "loss": 0.1031, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.189858234841479e-07, | |
| "loss": 0.0886, | |
| "step": 2781 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.174339374059647e-07, | |
| "loss": 0.091, | |
| "step": 2782 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.158834473381534e-07, | |
| "loss": 0.1305, | |
| "step": 2783 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.143343544949746e-07, | |
| "loss": 0.1109, | |
| "step": 2784 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.127866600895941e-07, | |
| "loss": 0.11, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.11240365334084e-07, | |
| "loss": 0.0996, | |
| "step": 2786 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.09695471439421e-07, | |
| "loss": 0.1045, | |
| "step": 2787 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.081519796154818e-07, | |
| "loss": 0.1206, | |
| "step": 2788 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.066098910710472e-07, | |
| "loss": 0.0983, | |
| "step": 2789 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.050692070137974e-07, | |
| "loss": 0.1077, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.035299286503145e-07, | |
| "loss": 0.1001, | |
| "step": 2791 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.019920571860797e-07, | |
| "loss": 0.0857, | |
| "step": 2792 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 7.004555938254712e-07, | |
| "loss": 0.1216, | |
| "step": 2793 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.989205397717644e-07, | |
| "loss": 0.1386, | |
| "step": 2794 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.973868962271313e-07, | |
| "loss": 0.0989, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.958546643926397e-07, | |
| "loss": 0.0984, | |
| "step": 2796 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.943238454682527e-07, | |
| "loss": 0.1178, | |
| "step": 2797 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.927944406528247e-07, | |
| "loss": 0.1051, | |
| "step": 2798 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.912664511441045e-07, | |
| "loss": 0.1067, | |
| "step": 2799 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 6.897398781387299e-07, | |
| "loss": 0.1023, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.882147228322328e-07, | |
| "loss": 0.0987, | |
| "step": 2801 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.866909864190335e-07, | |
| "loss": 0.1103, | |
| "step": 2802 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.851686700924404e-07, | |
| "loss": 0.0993, | |
| "step": 2803 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.836477750446493e-07, | |
| "loss": 0.1067, | |
| "step": 2804 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.821283024667452e-07, | |
| "loss": 0.0919, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.806102535486966e-07, | |
| "loss": 0.1112, | |
| "step": 2806 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.790936294793593e-07, | |
| "loss": 0.1251, | |
| "step": 2807 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.775784314464717e-07, | |
| "loss": 0.0948, | |
| "step": 2808 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.76064660636655e-07, | |
| "loss": 0.0956, | |
| "step": 2809 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.745523182354147e-07, | |
| "loss": 0.0917, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.730414054271353e-07, | |
| "loss": 0.106, | |
| "step": 2811 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.715319233950846e-07, | |
| "loss": 0.0983, | |
| "step": 2812 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.700238733214068e-07, | |
| "loss": 0.1038, | |
| "step": 2813 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.685172563871256e-07, | |
| "loss": 0.1031, | |
| "step": 2814 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.670120737721444e-07, | |
| "loss": 0.1089, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.655083266552401e-07, | |
| "loss": 0.1145, | |
| "step": 2816 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.640060162140682e-07, | |
| "loss": 0.106, | |
| "step": 2817 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.625051436251576e-07, | |
| "loss": 0.0983, | |
| "step": 2818 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.610057100639103e-07, | |
| "loss": 0.1037, | |
| "step": 2819 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.595077167046041e-07, | |
| "loss": 0.0915, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.580111647203857e-07, | |
| "loss": 0.1104, | |
| "step": 2821 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.565160552832764e-07, | |
| "loss": 0.0948, | |
| "step": 2822 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.550223895641638e-07, | |
| "loss": 0.1088, | |
| "step": 2823 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.535301687328088e-07, | |
| "loss": 0.1066, | |
| "step": 2824 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.520393939578381e-07, | |
| "loss": 0.0969, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.505500664067463e-07, | |
| "loss": 0.1077, | |
| "step": 2826 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.490621872458963e-07, | |
| "loss": 0.1061, | |
| "step": 2827 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.475757576405137e-07, | |
| "loss": 0.094, | |
| "step": 2828 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.460907787546928e-07, | |
| "loss": 0.1054, | |
| "step": 2829 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.44607251751388e-07, | |
| "loss": 0.0977, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.431251777924183e-07, | |
| "loss": 0.1229, | |
| "step": 2831 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.416445580384659e-07, | |
| "loss": 0.0844, | |
| "step": 2832 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.401653936490712e-07, | |
| "loss": 0.1184, | |
| "step": 2833 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.386876857826388e-07, | |
| "loss": 0.1097, | |
| "step": 2834 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.372114355964292e-07, | |
| "loss": 0.1211, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.35736644246562e-07, | |
| "loss": 0.1156, | |
| "step": 2836 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 6.342633128880163e-07, | |
| "loss": 0.0894, | |
| "step": 2837 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.327914426746249e-07, | |
| "loss": 0.093, | |
| "step": 2838 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.313210347590792e-07, | |
| "loss": 0.1143, | |
| "step": 2839 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.298520902929234e-07, | |
| "loss": 0.1208, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.283846104265554e-07, | |
| "loss": 0.1045, | |
| "step": 2841 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.269185963092286e-07, | |
| "loss": 0.1147, | |
| "step": 2842 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.254540490890451e-07, | |
| "loss": 0.0853, | |
| "step": 2843 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.239909699129615e-07, | |
| "loss": 0.1128, | |
| "step": 2844 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.225293599267826e-07, | |
| "loss": 0.1041, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.21069220275162e-07, | |
| "loss": 0.0975, | |
| "step": 2846 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.196105521016047e-07, | |
| "loss": 0.0827, | |
| "step": 2847 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.181533565484601e-07, | |
| "loss": 0.1016, | |
| "step": 2848 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.16697634756927e-07, | |
| "loss": 0.0964, | |
| "step": 2849 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.152433878670485e-07, | |
| "loss": 0.1315, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.137906170177121e-07, | |
| "loss": 0.0906, | |
| "step": 2851 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.123393233466515e-07, | |
| "loss": 0.0943, | |
| "step": 2852 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.108895079904406e-07, | |
| "loss": 0.1051, | |
| "step": 2853 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.094411720844995e-07, | |
| "loss": 0.1125, | |
| "step": 2854 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.079943167630858e-07, | |
| "loss": 0.0943, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.065489431592989e-07, | |
| "loss": 0.111, | |
| "step": 2856 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.051050524050792e-07, | |
| "loss": 0.1071, | |
| "step": 2857 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.036626456312036e-07, | |
| "loss": 0.1033, | |
| "step": 2858 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.022217239672892e-07, | |
| "loss": 0.0932, | |
| "step": 2859 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 6.007822885417883e-07, | |
| "loss": 0.113, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.993443404819885e-07, | |
| "loss": 0.1005, | |
| "step": 2861 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.979078809140145e-07, | |
| "loss": 0.1123, | |
| "step": 2862 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.964729109628262e-07, | |
| "loss": 0.0992, | |
| "step": 2863 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.950394317522134e-07, | |
| "loss": 0.1087, | |
| "step": 2864 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.936074444048012e-07, | |
| "loss": 0.0978, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.92176950042044e-07, | |
| "loss": 0.1148, | |
| "step": 2866 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.9074794978423e-07, | |
| "loss": 0.1039, | |
| "step": 2867 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.89320444750476e-07, | |
| "loss": 0.101, | |
| "step": 2868 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.878944360587269e-07, | |
| "loss": 0.1152, | |
| "step": 2869 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.864699248257563e-07, | |
| "loss": 0.092, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.850469121671645e-07, | |
| "loss": 0.0874, | |
| "step": 2871 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.8362539919738e-07, | |
| "loss": 0.1127, | |
| "step": 2872 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 5.82205387029656e-07, | |
| "loss": 0.1057, | |
| "step": 2873 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.807868767760694e-07, | |
| "loss": 0.1088, | |
| "step": 2874 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.793698695475214e-07, | |
| "loss": 0.1101, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.77954366453736e-07, | |
| "loss": 0.0928, | |
| "step": 2876 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.7654036860326e-07, | |
| "loss": 0.0835, | |
| "step": 2877 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.751278771034613e-07, | |
| "loss": 0.1106, | |
| "step": 2878 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.737168930605272e-07, | |
| "loss": 0.106, | |
| "step": 2879 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.723074175794649e-07, | |
| "loss": 0.1111, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.708994517640998e-07, | |
| "loss": 0.1019, | |
| "step": 2881 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.694929967170753e-07, | |
| "loss": 0.1129, | |
| "step": 2882 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.68088053539853e-07, | |
| "loss": 0.1176, | |
| "step": 2883 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.666846233327089e-07, | |
| "loss": 0.0889, | |
| "step": 2884 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.652827071947337e-07, | |
| "loss": 0.1004, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.638823062238327e-07, | |
| "loss": 0.0965, | |
| "step": 2886 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.624834215167258e-07, | |
| "loss": 0.1004, | |
| "step": 2887 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.610860541689453e-07, | |
| "loss": 0.0953, | |
| "step": 2888 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.596902052748338e-07, | |
| "loss": 0.1177, | |
| "step": 2889 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.582958759275456e-07, | |
| "loss": 0.1078, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.56903067219044e-07, | |
| "loss": 0.1085, | |
| "step": 2891 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.55511780240103e-07, | |
| "loss": 0.1062, | |
| "step": 2892 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.541220160803048e-07, | |
| "loss": 0.1112, | |
| "step": 2893 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.52733775828038e-07, | |
| "loss": 0.1019, | |
| "step": 2894 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.513470605704976e-07, | |
| "loss": 0.1072, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.499618713936844e-07, | |
| "loss": 0.0951, | |
| "step": 2896 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.48578209382405e-07, | |
| "loss": 0.1035, | |
| "step": 2897 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.471960756202701e-07, | |
| "loss": 0.112, | |
| "step": 2898 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.458154711896923e-07, | |
| "loss": 0.0975, | |
| "step": 2899 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.444363971718875e-07, | |
| "loss": 0.0934, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.430588546468713e-07, | |
| "loss": 0.0887, | |
| "step": 2901 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.416828446934624e-07, | |
| "loss": 0.0979, | |
| "step": 2902 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.40308368389279e-07, | |
| "loss": 0.1061, | |
| "step": 2903 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.389354268107363e-07, | |
| "loss": 0.1079, | |
| "step": 2904 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.375640210330491e-07, | |
| "loss": 0.1176, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.361941521302283e-07, | |
| "loss": 0.1121, | |
| "step": 2906 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.348258211750823e-07, | |
| "loss": 0.1076, | |
| "step": 2907 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.334590292392155e-07, | |
| "loss": 0.1127, | |
| "step": 2908 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.320937773930257e-07, | |
| "loss": 0.0996, | |
| "step": 2909 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 5.307300667057049e-07, | |
| "loss": 0.0948, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.293678982452379e-07, | |
| "loss": 0.1066, | |
| "step": 2911 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.280072730784027e-07, | |
| "loss": 0.1005, | |
| "step": 2912 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.266481922707689e-07, | |
| "loss": 0.0959, | |
| "step": 2913 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.252906568866953e-07, | |
| "loss": 0.103, | |
| "step": 2914 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.239346679893306e-07, | |
| "loss": 0.1133, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.225802266406121e-07, | |
| "loss": 0.0976, | |
| "step": 2916 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.212273339012672e-07, | |
| "loss": 0.1079, | |
| "step": 2917 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.198759908308088e-07, | |
| "loss": 0.0966, | |
| "step": 2918 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.185261984875359e-07, | |
| "loss": 0.1012, | |
| "step": 2919 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.171779579285352e-07, | |
| "loss": 0.1009, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.158312702096744e-07, | |
| "loss": 0.1098, | |
| "step": 2921 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.144861363856079e-07, | |
| "loss": 0.1035, | |
| "step": 2922 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.131425575097737e-07, | |
| "loss": 0.1033, | |
| "step": 2923 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.118005346343893e-07, | |
| "loss": 0.1059, | |
| "step": 2924 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.104600688104571e-07, | |
| "loss": 0.1036, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.09121161087757e-07, | |
| "loss": 0.0975, | |
| "step": 2926 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.077838125148494e-07, | |
| "loss": 0.1016, | |
| "step": 2927 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.064480241390754e-07, | |
| "loss": 0.0963, | |
| "step": 2928 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.051137970065517e-07, | |
| "loss": 0.1026, | |
| "step": 2929 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.037811321621749e-07, | |
| "loss": 0.098, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.024500306496163e-07, | |
| "loss": 0.1088, | |
| "step": 2931 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 5.011204935113228e-07, | |
| "loss": 0.1079, | |
| "step": 2932 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.997925217885177e-07, | |
| "loss": 0.1149, | |
| "step": 2933 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.984661165211965e-07, | |
| "loss": 0.1019, | |
| "step": 2934 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.971412787481298e-07, | |
| "loss": 0.1008, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.958180095068593e-07, | |
| "loss": 0.1038, | |
| "step": 2936 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.944963098336977e-07, | |
| "loss": 0.114, | |
| "step": 2937 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.931761807637312e-07, | |
| "loss": 0.101, | |
| "step": 2938 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.918576233308125e-07, | |
| "loss": 0.1111, | |
| "step": 2939 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.905406385675667e-07, | |
| "loss": 0.1029, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.892252275053854e-07, | |
| "loss": 0.1197, | |
| "step": 2941 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.879113911744276e-07, | |
| "loss": 0.0958, | |
| "step": 2942 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.865991306036203e-07, | |
| "loss": 0.1126, | |
| "step": 2943 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.85288446820655e-07, | |
| "loss": 0.1239, | |
| "step": 2944 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.839793408519903e-07, | |
| "loss": 0.1155, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.82671813722847e-07, | |
| "loss": 0.1087, | |
| "step": 2946 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.813658664572102e-07, | |
| "loss": 0.1083, | |
| "step": 2947 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.800615000778288e-07, | |
| "loss": 0.0927, | |
| "step": 2948 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.787587156062118e-07, | |
| "loss": 0.0944, | |
| "step": 2949 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.774575140626317e-07, | |
| "loss": 0.1096, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.761578964661187e-07, | |
| "loss": 0.1104, | |
| "step": 2951 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.7485986383446397e-07, | |
| "loss": 0.0903, | |
| "step": 2952 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.7356341718421767e-07, | |
| "loss": 0.103, | |
| "step": 2953 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.722685575306868e-07, | |
| "loss": 0.0977, | |
| "step": 2954 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.7097528588793685e-07, | |
| "loss": 0.0953, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.69683603268789e-07, | |
| "loss": 0.0932, | |
| "step": 2956 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.6839351068481894e-07, | |
| "loss": 0.1127, | |
| "step": 2957 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.671050091463594e-07, | |
| "loss": 0.0873, | |
| "step": 2958 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.6581809966249476e-07, | |
| "loss": 0.1215, | |
| "step": 2959 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.645327832410648e-07, | |
| "loss": 0.1045, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.6324906088866e-07, | |
| "loss": 0.1078, | |
| "step": 2961 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.619669336106225e-07, | |
| "loss": 0.1093, | |
| "step": 2962 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.606864024110469e-07, | |
| "loss": 0.1079, | |
| "step": 2963 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5940746829277544e-07, | |
| "loss": 0.1123, | |
| "step": 2964 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.581301322574025e-07, | |
| "loss": 0.1125, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5685439530526816e-07, | |
| "loss": 0.0891, | |
| "step": 2966 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5558025843546127e-07, | |
| "loss": 0.1063, | |
| "step": 2967 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.543077226458184e-07, | |
| "loss": 0.1204, | |
| "step": 2968 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5303678893292043e-07, | |
| "loss": 0.1091, | |
| "step": 2969 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.517674582920961e-07, | |
| "loss": 0.1058, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.5049973171741673e-07, | |
| "loss": 0.1206, | |
| "step": 2971 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.492336102016967e-07, | |
| "loss": 0.0918, | |
| "step": 2972 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.479690947364965e-07, | |
| "loss": 0.0928, | |
| "step": 2973 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.4670618631211527e-07, | |
| "loss": 0.0983, | |
| "step": 2974 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.454448859175969e-07, | |
| "loss": 0.0995, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.4418519454072386e-07, | |
| "loss": 0.1109, | |
| "step": 2976 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.429271131680182e-07, | |
| "loss": 0.0911, | |
| "step": 2977 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.416706427847431e-07, | |
| "loss": 0.1109, | |
| "step": 2978 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.4041578437489766e-07, | |
| "loss": 0.0972, | |
| "step": 2979 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.3916253892122146e-07, | |
| "loss": 0.1211, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.3791090740518847e-07, | |
| "loss": 0.114, | |
| "step": 2981 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.3666089080700883e-07, | |
| "loss": 0.084, | |
| "step": 2982 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.354124901056295e-07, | |
| "loss": 0.1088, | |
| "step": 2983 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.3416570627873135e-07, | |
| "loss": 0.1002, | |
| "step": 2984 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.329205403027284e-07, | |
| "loss": 0.108, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.316769931527681e-07, | |
| "loss": 0.0955, | |
| "step": 2986 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.3043506580272955e-07, | |
| "loss": 0.1048, | |
| "step": 2987 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.291947592252238e-07, | |
| "loss": 0.0849, | |
| "step": 2988 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.2795607439159395e-07, | |
| "loss": 0.1171, | |
| "step": 2989 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.267190122719106e-07, | |
| "loss": 0.0979, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.2548357383497497e-07, | |
| "loss": 0.1059, | |
| "step": 2991 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.242497600483153e-07, | |
| "loss": 0.0941, | |
| "step": 2992 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.2301757187818953e-07, | |
| "loss": 0.0989, | |
| "step": 2993 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.217870102895821e-07, | |
| "loss": 0.1082, | |
| "step": 2994 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.205580762462022e-07, | |
| "loss": 0.0848, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.1933077071048517e-07, | |
| "loss": 0.0985, | |
| "step": 2996 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.181050946435908e-07, | |
| "loss": 0.1122, | |
| "step": 2997 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.168810490054032e-07, | |
| "loss": 0.1034, | |
| "step": 2998 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.156586347545302e-07, | |
| "loss": 0.0908, | |
| "step": 2999 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.144378528483009e-07, | |
| "loss": 0.0974, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.13218704242766e-07, | |
| "loss": 0.1066, | |
| "step": 3001 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.1200118989269677e-07, | |
| "loss": 0.0827, | |
| "step": 3002 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.107853107515866e-07, | |
| "loss": 0.1065, | |
| "step": 3003 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.095710677716469e-07, | |
| "loss": 0.1154, | |
| "step": 3004 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0835846190380725e-07, | |
| "loss": 0.0935, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.071474940977163e-07, | |
| "loss": 0.0969, | |
| "step": 3006 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0593816530173776e-07, | |
| "loss": 0.0922, | |
| "step": 3007 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.047304764629545e-07, | |
| "loss": 0.0989, | |
| "step": 3008 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.035244285271644e-07, | |
| "loss": 0.0963, | |
| "step": 3009 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0232002243887873e-07, | |
| "loss": 0.1071, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.01117259141324e-07, | |
| "loss": 0.0984, | |
| "step": 3011 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9991613957643977e-07, | |
| "loss": 0.1, | |
| "step": 3012 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9871666468487864e-07, | |
| "loss": 0.1099, | |
| "step": 3013 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.975188354060064e-07, | |
| "loss": 0.0961, | |
| "step": 3014 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.963226526778974e-07, | |
| "loss": 0.0864, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.951281174373395e-07, | |
| "loss": 0.1039, | |
| "step": 3016 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9393523061982684e-07, | |
| "loss": 0.1097, | |
| "step": 3017 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.927439931595656e-07, | |
| "loss": 0.1024, | |
| "step": 3018 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9155440598946935e-07, | |
| "loss": 0.0927, | |
| "step": 3019 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9036647004115884e-07, | |
| "loss": 0.0908, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.891801862449629e-07, | |
| "loss": 0.1088, | |
| "step": 3021 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.8799555552991366e-07, | |
| "loss": 0.1014, | |
| "step": 3022 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.8681257882375144e-07, | |
| "loss": 0.0972, | |
| "step": 3023 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.856312570529211e-07, | |
| "loss": 0.0987, | |
| "step": 3024 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.844515911425692e-07, | |
| "loss": 0.0917, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.8327358201654926e-07, | |
| "loss": 0.0951, | |
| "step": 3026 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.820972305974119e-07, | |
| "loss": 0.1085, | |
| "step": 3027 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.8092253780641437e-07, | |
| "loss": 0.097, | |
| "step": 3028 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7974950456351355e-07, | |
| "loss": 0.0844, | |
| "step": 3029 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.785781317873652e-07, | |
| "loss": 0.1127, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7740842039532767e-07, | |
| "loss": 0.1062, | |
| "step": 3031 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7624037130345374e-07, | |
| "loss": 0.1123, | |
| "step": 3032 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7507398542649844e-07, | |
| "loss": 0.085, | |
| "step": 3033 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.739092636779132e-07, | |
| "loss": 0.0931, | |
| "step": 3034 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.727462069698445e-07, | |
| "loss": 0.1037, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.715848162131383e-07, | |
| "loss": 0.0933, | |
| "step": 3036 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7042509231733083e-07, | |
| "loss": 0.1157, | |
| "step": 3037 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.692670361906575e-07, | |
| "loss": 0.1108, | |
| "step": 3038 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6811064874004596e-07, | |
| "loss": 0.096, | |
| "step": 3039 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6695593087111627e-07, | |
| "loss": 0.0993, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.658028834881827e-07, | |
| "loss": 0.1004, | |
| "step": 3041 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6465150749424943e-07, | |
| "loss": 0.1067, | |
| "step": 3042 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6350180379101233e-07, | |
| "loss": 0.0842, | |
| "step": 3043 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.62353773278859e-07, | |
| "loss": 0.1155, | |
| "step": 3044 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.6120741685686413e-07, | |
| "loss": 0.1111, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.60062735422794e-07, | |
| "loss": 0.1113, | |
| "step": 3046 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.589197298731012e-07, | |
| "loss": 0.1211, | |
| "step": 3047 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.5777840110292587e-07, | |
| "loss": 0.0964, | |
| "step": 3048 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.566387500060972e-07, | |
| "loss": 0.1089, | |
| "step": 3049 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.5550077747512734e-07, | |
| "loss": 0.1152, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.54364484401217e-07, | |
| "loss": 0.0947, | |
| "step": 3051 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.532298716742494e-07, | |
| "loss": 0.1077, | |
| "step": 3052 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.520969401827917e-07, | |
| "loss": 0.0927, | |
| "step": 3053 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.509656908140971e-07, | |
| "loss": 0.0998, | |
| "step": 3054 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.4983612445409763e-07, | |
| "loss": 0.0892, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.487082419874108e-07, | |
| "loss": 0.0952, | |
| "step": 3056 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.4758204429733297e-07, | |
| "loss": 0.1129, | |
| "step": 3057 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.464575322658417e-07, | |
| "loss": 0.1113, | |
| "step": 3058 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.4533470677359573e-07, | |
| "loss": 0.0979, | |
| "step": 3059 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.442135686999304e-07, | |
| "loss": 0.0944, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.430941189228626e-07, | |
| "loss": 0.0902, | |
| "step": 3061 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.4197635831908495e-07, | |
| "loss": 0.0989, | |
| "step": 3062 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.408602877639669e-07, | |
| "loss": 0.0986, | |
| "step": 3063 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.3974590813155654e-07, | |
| "loss": 0.0996, | |
| "step": 3064 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.3863322029457523e-07, | |
| "loss": 0.1037, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.3752222512442146e-07, | |
| "loss": 0.1014, | |
| "step": 3066 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.36412923491167e-07, | |
| "loss": 0.1004, | |
| "step": 3067 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.353053162635567e-07, | |
| "loss": 0.1202, | |
| "step": 3068 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.341994043090105e-07, | |
| "loss": 0.0828, | |
| "step": 3069 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.3309518849361814e-07, | |
| "loss": 0.0967, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.3199266968214405e-07, | |
| "loss": 0.11, | |
| "step": 3071 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.308918487380211e-07, | |
| "loss": 0.1272, | |
| "step": 3072 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.2979272652335247e-07, | |
| "loss": 0.1012, | |
| "step": 3073 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.2869530389891354e-07, | |
| "loss": 0.0966, | |
| "step": 3074 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.2759958172414563e-07, | |
| "loss": 0.0993, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.265055608571607e-07, | |
| "loss": 0.0908, | |
| "step": 3076 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.254132421547371e-07, | |
| "loss": 0.1215, | |
| "step": 3077 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.243226264723195e-07, | |
| "loss": 0.1038, | |
| "step": 3078 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.232337146640213e-07, | |
| "loss": 0.0995, | |
| "step": 3079 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.221465075826186e-07, | |
| "loss": 0.1054, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.210610060795549e-07, | |
| "loss": 0.1229, | |
| "step": 3081 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.199772110049362e-07, | |
| "loss": 0.0961, | |
| "step": 3082 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.188951232075327e-07, | |
| "loss": 0.097, | |
| "step": 3083 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.1781474353477866e-07, | |
| "loss": 0.0977, | |
| "step": 3084 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.167360728327681e-07, | |
| "loss": 0.0985, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.1565911194625985e-07, | |
| "loss": 0.0925, | |
| "step": 3086 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.145838617186711e-07, | |
| "loss": 0.0938, | |
| "step": 3087 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.1351032299208026e-07, | |
| "loss": 0.0965, | |
| "step": 3088 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.124384966072258e-07, | |
| "loss": 0.1141, | |
| "step": 3089 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.113683834035042e-07, | |
| "loss": 0.1327, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.102999842189719e-07, | |
| "loss": 0.1152, | |
| "step": 3091 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.092332998903416e-07, | |
| "loss": 0.099, | |
| "step": 3092 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.0816833125298234e-07, | |
| "loss": 0.1064, | |
| "step": 3093 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.071050791409222e-07, | |
| "loss": 0.1094, | |
| "step": 3094 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.06043544386842e-07, | |
| "loss": 0.1133, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.0498372782207993e-07, | |
| "loss": 0.1005, | |
| "step": 3096 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.039256302766275e-07, | |
| "loss": 0.105, | |
| "step": 3097 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.028692525791291e-07, | |
| "loss": 0.0916, | |
| "step": 3098 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.018145955568841e-07, | |
| "loss": 0.0969, | |
| "step": 3099 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.007616600358437e-07, | |
| "loss": 0.1027, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.997104468406103e-07, | |
| "loss": 0.0867, | |
| "step": 3101 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.986609567944379e-07, | |
| "loss": 0.1013, | |
| "step": 3102 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9761319071923035e-07, | |
| "loss": 0.1099, | |
| "step": 3103 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9656714943554264e-07, | |
| "loss": 0.1141, | |
| "step": 3104 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9552283376257846e-07, | |
| "loss": 0.0873, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.944802445181899e-07, | |
| "loss": 0.0846, | |
| "step": 3106 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.934393825188769e-07, | |
| "loss": 0.0842, | |
| "step": 3107 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.924002485797864e-07, | |
| "loss": 0.1027, | |
| "step": 3108 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.913628435147131e-07, | |
| "loss": 0.1059, | |
| "step": 3109 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.9032716813609726e-07, | |
| "loss": 0.093, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8929322325502384e-07, | |
| "loss": 0.0978, | |
| "step": 3111 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.882610096812247e-07, | |
| "loss": 0.0899, | |
| "step": 3112 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8723052822307197e-07, | |
| "loss": 0.094, | |
| "step": 3113 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.862017796875846e-07, | |
| "loss": 0.0993, | |
| "step": 3114 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.851747648804243e-07, | |
| "loss": 0.1346, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.841494846058929e-07, | |
| "loss": 0.0903, | |
| "step": 3116 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.831259396669361e-07, | |
| "loss": 0.0988, | |
| "step": 3117 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8210413086513845e-07, | |
| "loss": 0.1089, | |
| "step": 3118 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.8108405900072605e-07, | |
| "loss": 0.1007, | |
| "step": 3119 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.800657248725658e-07, | |
| "loss": 0.1054, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.790491292781611e-07, | |
| "loss": 0.1069, | |
| "step": 3121 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7803427301365676e-07, | |
| "loss": 0.1043, | |
| "step": 3122 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7702115687383205e-07, | |
| "loss": 0.1095, | |
| "step": 3123 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.760097816521065e-07, | |
| "loss": 0.1076, | |
| "step": 3124 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.750001481405351e-07, | |
| "loss": 0.0997, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7399225712980813e-07, | |
| "loss": 0.0975, | |
| "step": 3126 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.729861094092534e-07, | |
| "loss": 0.1089, | |
| "step": 3127 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.7198170576682975e-07, | |
| "loss": 0.0993, | |
| "step": 3128 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.709790469891335e-07, | |
| "loss": 0.082, | |
| "step": 3129 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.699781338613938e-07, | |
| "loss": 0.1015, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.689789671674717e-07, | |
| "loss": 0.0949, | |
| "step": 3131 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.67981547689862e-07, | |
| "loss": 0.0955, | |
| "step": 3132 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6698587620968857e-07, | |
| "loss": 0.0986, | |
| "step": 3133 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6599195350670904e-07, | |
| "loss": 0.1, | |
| "step": 3134 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.64999780359311e-07, | |
| "loss": 0.105, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.640093575445102e-07, | |
| "loss": 0.0971, | |
| "step": 3136 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6302068583795384e-07, | |
| "loss": 0.0883, | |
| "step": 3137 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6203376601391635e-07, | |
| "loss": 0.1002, | |
| "step": 3138 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.6104859884530015e-07, | |
| "loss": 0.1066, | |
| "step": 3139 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.600651851036362e-07, | |
| "loss": 0.0873, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.590835255590807e-07, | |
| "loss": 0.1051, | |
| "step": 3141 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5810362098041747e-07, | |
| "loss": 0.0979, | |
| "step": 3142 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.571254721350555e-07, | |
| "loss": 0.091, | |
| "step": 3143 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5614907978902757e-07, | |
| "loss": 0.0994, | |
| "step": 3144 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.551744447069934e-07, | |
| "loss": 0.103, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5420156765223385e-07, | |
| "loss": 0.1063, | |
| "step": 3146 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5323044938665496e-07, | |
| "loss": 0.1079, | |
| "step": 3147 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.5226109067078423e-07, | |
| "loss": 0.0898, | |
| "step": 3148 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.512934922637714e-07, | |
| "loss": 0.0918, | |
| "step": 3149 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.503276549233882e-07, | |
| "loss": 0.1044, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.4936357940602616e-07, | |
| "loss": 0.1117, | |
| "step": 3151 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.484012664666985e-07, | |
| "loss": 0.1151, | |
| "step": 3152 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.474407168590368e-07, | |
| "loss": 0.0943, | |
| "step": 3153 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.4648193133529173e-07, | |
| "loss": 0.1104, | |
| "step": 3154 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.455249106463334e-07, | |
| "loss": 0.0838, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.445696555416485e-07, | |
| "loss": 0.1115, | |
| "step": 3156 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.436161667693429e-07, | |
| "loss": 0.1027, | |
| "step": 3157 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.42664445076137e-07, | |
| "loss": 0.1116, | |
| "step": 3158 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.417144912073682e-07, | |
| "loss": 0.1089, | |
| "step": 3159 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.4076630590699065e-07, | |
| "loss": 0.1084, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.3981988991757105e-07, | |
| "loss": 0.1184, | |
| "step": 3161 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.388752439802927e-07, | |
| "loss": 0.12, | |
| "step": 3162 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.3793236883495164e-07, | |
| "loss": 0.1189, | |
| "step": 3163 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.3699126521995631e-07, | |
| "loss": 0.1095, | |
| "step": 3164 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.3605193387232995e-07, | |
| "loss": 0.1082, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 2.3511437552770577e-07, | |
| "loss": 0.1109, | |
| "step": 3166 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.3417859092033006e-07, | |
| "loss": 0.1059, | |
| "step": 3167 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.3324458078305883e-07, | |
| "loss": 0.0933, | |
| "step": 3168 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.3231234584735834e-07, | |
| "loss": 0.0985, | |
| "step": 3169 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.3138188684330599e-07, | |
| "loss": 0.0837, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.3045320449958692e-07, | |
| "loss": 0.0881, | |
| "step": 3171 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2952629954349603e-07, | |
| "loss": 0.1018, | |
| "step": 3172 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2860117270093518e-07, | |
| "loss": 0.093, | |
| "step": 3173 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2767782469641398e-07, | |
| "loss": 0.1058, | |
| "step": 3174 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2675625625305009e-07, | |
| "loss": 0.1095, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2583646809256565e-07, | |
| "loss": 0.123, | |
| "step": 3176 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.249184609352903e-07, | |
| "loss": 0.1108, | |
| "step": 3177 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2400223550015782e-07, | |
| "loss": 0.0932, | |
| "step": 3178 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2308779250470647e-07, | |
| "loss": 0.1182, | |
| "step": 3179 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2217513266508007e-07, | |
| "loss": 0.1018, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.212642566960241e-07, | |
| "loss": 0.0997, | |
| "step": 3181 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.2035516531088873e-07, | |
| "loss": 0.1233, | |
| "step": 3182 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.194478592216251e-07, | |
| "loss": 0.0948, | |
| "step": 3183 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.185423391387864e-07, | |
| "loss": 0.117, | |
| "step": 3184 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1763860577152851e-07, | |
| "loss": 0.0948, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1673665982760606e-07, | |
| "loss": 0.1169, | |
| "step": 3186 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1583650201337597e-07, | |
| "loss": 0.0783, | |
| "step": 3187 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1493813303379296e-07, | |
| "loss": 0.0875, | |
| "step": 3188 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.140415535924112e-07, | |
| "loss": 0.1155, | |
| "step": 3189 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1314676439138482e-07, | |
| "loss": 0.1101, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1225376613146358e-07, | |
| "loss": 0.1104, | |
| "step": 3191 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1136255951199724e-07, | |
| "loss": 0.1169, | |
| "step": 3192 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.1047314523093028e-07, | |
| "loss": 0.1028, | |
| "step": 3193 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0958552398480436e-07, | |
| "loss": 0.1085, | |
| "step": 3194 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0869969646875766e-07, | |
| "loss": 0.1041, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0781566337652188e-07, | |
| "loss": 0.1056, | |
| "step": 3196 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0693342540042543e-07, | |
| "loss": 0.11, | |
| "step": 3197 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0605298323138957e-07, | |
| "loss": 0.0916, | |
| "step": 3198 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0517433755892879e-07, | |
| "loss": 0.0957, | |
| "step": 3199 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0429748907115243e-07, | |
| "loss": 0.0969, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0342243845476061e-07, | |
| "loss": 0.1147, | |
| "step": 3201 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.0254918639504657e-07, | |
| "loss": 0.109, | |
| "step": 3202 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.016777335758946e-07, | |
| "loss": 0.1032, | |
| "step": 3203 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.0080808067977936e-07, | |
| "loss": 0.0956, | |
| "step": 3204 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9994022838776717e-07, | |
| "loss": 0.1168, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9907417737951296e-07, | |
| "loss": 0.1111, | |
| "step": 3206 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.982099283332617e-07, | |
| "loss": 0.1069, | |
| "step": 3207 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.973474819258478e-07, | |
| "loss": 0.0829, | |
| "step": 3208 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.964868388326918e-07, | |
| "loss": 0.1137, | |
| "step": 3209 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9562799972780433e-07, | |
| "loss": 0.0869, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9477096528378147e-07, | |
| "loss": 0.0974, | |
| "step": 3211 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.939157361718072e-07, | |
| "loss": 0.1112, | |
| "step": 3212 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9306231306165186e-07, | |
| "loss": 0.1046, | |
| "step": 3213 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.922106966216694e-07, | |
| "loss": 0.1043, | |
| "step": 3214 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.9136088751880138e-07, | |
| "loss": 0.0939, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.905128864185718e-07, | |
| "loss": 0.0964, | |
| "step": 3216 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8966669398509024e-07, | |
| "loss": 0.1064, | |
| "step": 3217 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.888223108810505e-07, | |
| "loss": 0.0945, | |
| "step": 3218 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8797973776772614e-07, | |
| "loss": 0.1234, | |
| "step": 3219 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8713897530497655e-07, | |
| "loss": 0.0949, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8630002415124194e-07, | |
| "loss": 0.1196, | |
| "step": 3221 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8546288496354341e-07, | |
| "loss": 0.0904, | |
| "step": 3222 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8462755839748492e-07, | |
| "loss": 0.0978, | |
| "step": 3223 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8379404510724757e-07, | |
| "loss": 0.0979, | |
| "step": 3224 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8296234574559535e-07, | |
| "loss": 0.0986, | |
| "step": 3225 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8213246096387112e-07, | |
| "loss": 0.1144, | |
| "step": 3226 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.813043914119955e-07, | |
| "loss": 0.1025, | |
| "step": 3227 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8047813773846956e-07, | |
| "loss": 0.1118, | |
| "step": 3228 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7965370059036897e-07, | |
| "loss": 0.0991, | |
| "step": 3229 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7883108061335002e-07, | |
| "loss": 0.0973, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7801027845164492e-07, | |
| "loss": 0.0993, | |
| "step": 3231 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7719129474806111e-07, | |
| "loss": 0.1103, | |
| "step": 3232 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7637413014398365e-07, | |
| "loss": 0.1013, | |
| "step": 3233 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7555878527937164e-07, | |
| "loss": 0.1036, | |
| "step": 3234 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7474526079275937e-07, | |
| "loss": 0.1006, | |
| "step": 3235 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7393355732125666e-07, | |
| "loss": 0.09, | |
| "step": 3236 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7312367550054498e-07, | |
| "loss": 0.1068, | |
| "step": 3237 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7231561596488143e-07, | |
| "loss": 0.109, | |
| "step": 3238 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.7150937934709454e-07, | |
| "loss": 0.0912, | |
| "step": 3239 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.7070496627858562e-07, | |
| "loss": 0.0966, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6990237738932824e-07, | |
| "loss": 0.0981, | |
| "step": 3241 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6910161330786683e-07, | |
| "loss": 0.0985, | |
| "step": 3242 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6830267466131727e-07, | |
| "loss": 0.1033, | |
| "step": 3243 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6750556207536544e-07, | |
| "loss": 0.1018, | |
| "step": 3244 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6671027617426698e-07, | |
| "loss": 0.0923, | |
| "step": 3245 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6591681758084788e-07, | |
| "loss": 0.1067, | |
| "step": 3246 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.651251869165016e-07, | |
| "loss": 0.1062, | |
| "step": 3247 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6433538480119164e-07, | |
| "loss": 0.0931, | |
| "step": 3248 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6354741185344886e-07, | |
| "loss": 0.0957, | |
| "step": 3249 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.627612686903704e-07, | |
| "loss": 0.1019, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6197695592762296e-07, | |
| "loss": 0.1152, | |
| "step": 3251 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6119447417943745e-07, | |
| "loss": 0.1054, | |
| "step": 3252 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.60413824058612e-07, | |
| "loss": 0.0905, | |
| "step": 3253 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5963500617651005e-07, | |
| "loss": 0.1012, | |
| "step": 3254 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5885802114305987e-07, | |
| "loss": 0.1053, | |
| "step": 3255 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5808286956675496e-07, | |
| "loss": 0.1086, | |
| "step": 3256 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5730955205465164e-07, | |
| "loss": 0.104, | |
| "step": 3257 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5653806921237218e-07, | |
| "loss": 0.0942, | |
| "step": 3258 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5576842164409993e-07, | |
| "loss": 0.1138, | |
| "step": 3259 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5500060995258136e-07, | |
| "loss": 0.0983, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.542346347391263e-07, | |
| "loss": 0.1286, | |
| "step": 3261 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5347049660360463e-07, | |
| "loss": 0.0863, | |
| "step": 3262 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.527081961444493e-07, | |
| "loss": 0.1044, | |
| "step": 3263 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5194773395865309e-07, | |
| "loss": 0.0974, | |
| "step": 3264 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5118911064176872e-07, | |
| "loss": 0.1074, | |
| "step": 3265 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.5043232678791015e-07, | |
| "loss": 0.0874, | |
| "step": 3266 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4967738298974909e-07, | |
| "loss": 0.1157, | |
| "step": 3267 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.489242798385182e-07, | |
| "loss": 0.0738, | |
| "step": 3268 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4817301792400706e-07, | |
| "loss": 0.0897, | |
| "step": 3269 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4742359783456367e-07, | |
| "loss": 0.0863, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.466760201570938e-07, | |
| "loss": 0.1033, | |
| "step": 3271 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.459302854770603e-07, | |
| "loss": 0.1219, | |
| "step": 3272 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4518639437848313e-07, | |
| "loss": 0.0965, | |
| "step": 3273 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.444443474439375e-07, | |
| "loss": 0.0867, | |
| "step": 3274 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4370414525455457e-07, | |
| "loss": 0.0976, | |
| "step": 3275 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.4296578839002207e-07, | |
| "loss": 0.0939, | |
| "step": 3276 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.422292774285805e-07, | |
| "loss": 0.1054, | |
| "step": 3277 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.4149461294702683e-07, | |
| "loss": 0.1098, | |
| "step": 3278 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.407617955207105e-07, | |
| "loss": 0.1118, | |
| "step": 3279 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.4003082572353472e-07, | |
| "loss": 0.1042, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3930170412795653e-07, | |
| "loss": 0.1095, | |
| "step": 3281 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3857443130498388e-07, | |
| "loss": 0.0973, | |
| "step": 3282 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3784900782417926e-07, | |
| "loss": 0.0953, | |
| "step": 3283 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3712543425365489e-07, | |
| "loss": 0.1051, | |
| "step": 3284 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.364037111600744e-07, | |
| "loss": 0.0981, | |
| "step": 3285 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3568383910865345e-07, | |
| "loss": 0.1, | |
| "step": 3286 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3496581866315694e-07, | |
| "loss": 0.1142, | |
| "step": 3287 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3424965038590032e-07, | |
| "loss": 0.0956, | |
| "step": 3288 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3353533483774828e-07, | |
| "loss": 0.1108, | |
| "step": 3289 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3282287257811416e-07, | |
| "loss": 0.1109, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3211226416496053e-07, | |
| "loss": 0.1007, | |
| "step": 3291 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3140351015479778e-07, | |
| "loss": 0.0858, | |
| "step": 3292 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.3069661110268444e-07, | |
| "loss": 0.1123, | |
| "step": 3293 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2999156756222596e-07, | |
| "loss": 0.0997, | |
| "step": 3294 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2928838008557403e-07, | |
| "loss": 0.101, | |
| "step": 3295 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.285870492234287e-07, | |
| "loss": 0.1093, | |
| "step": 3296 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2788757552503333e-07, | |
| "loss": 0.0836, | |
| "step": 3297 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2718995953817954e-07, | |
| "loss": 0.11, | |
| "step": 3298 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2649420180920245e-07, | |
| "loss": 0.0984, | |
| "step": 3299 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2580030288298183e-07, | |
| "loss": 0.0999, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2510826330294267e-07, | |
| "loss": 0.1163, | |
| "step": 3301 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2441808361105312e-07, | |
| "loss": 0.111, | |
| "step": 3302 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2372976434782524e-07, | |
| "loss": 0.1034, | |
| "step": 3303 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2304330605231423e-07, | |
| "loss": 0.0866, | |
| "step": 3304 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.223587092621162e-07, | |
| "loss": 0.0971, | |
| "step": 3305 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2167597451337192e-07, | |
| "loss": 0.1077, | |
| "step": 3306 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2099510234076213e-07, | |
| "loss": 0.0995, | |
| "step": 3307 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.2031609327750938e-07, | |
| "loss": 0.0876, | |
| "step": 3308 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.1963894785537834e-07, | |
| "loss": 0.1043, | |
| "step": 3309 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.1896366660467174e-07, | |
| "loss": 0.1068, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.1829025005423461e-07, | |
| "loss": 0.1162, | |
| "step": 3311 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.1761869873144981e-07, | |
| "loss": 0.1119, | |
| "step": 3312 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.169490131622411e-07, | |
| "loss": 0.1104, | |
| "step": 3313 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1628119387107118e-07, | |
| "loss": 0.1032, | |
| "step": 3314 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1561524138093861e-07, | |
| "loss": 0.1053, | |
| "step": 3315 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1495115621338316e-07, | |
| "loss": 0.119, | |
| "step": 3316 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1428893888847992e-07, | |
| "loss": 0.1109, | |
| "step": 3317 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1362858992484238e-07, | |
| "loss": 0.0969, | |
| "step": 3318 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1297010983962131e-07, | |
| "loss": 0.0989, | |
| "step": 3319 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1231349914850142e-07, | |
| "loss": 0.0878, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1165875836570666e-07, | |
| "loss": 0.0961, | |
| "step": 3321 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1100588800399381e-07, | |
| "loss": 0.1005, | |
| "step": 3322 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1035488857465665e-07, | |
| "loss": 0.1189, | |
| "step": 3323 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0970576058752375e-07, | |
| "loss": 0.1032, | |
| "step": 3324 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0905850455095595e-07, | |
| "loss": 0.1022, | |
| "step": 3325 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0841312097185059e-07, | |
| "loss": 0.1125, | |
| "step": 3326 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0776961035563665e-07, | |
| "loss": 0.1149, | |
| "step": 3327 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0712797320627799e-07, | |
| "loss": 0.0973, | |
| "step": 3328 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0648821002627069e-07, | |
| "loss": 0.0976, | |
| "step": 3329 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0585032131664258e-07, | |
| "loss": 0.1144, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0521430757695405e-07, | |
| "loss": 0.0849, | |
| "step": 3331 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0458016930529669e-07, | |
| "loss": 0.1199, | |
| "step": 3332 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0394790699829404e-07, | |
| "loss": 0.0945, | |
| "step": 3333 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0331752115110034e-07, | |
| "loss": 0.0985, | |
| "step": 3334 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.026890122573998e-07, | |
| "loss": 0.0962, | |
| "step": 3335 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0206238080940678e-07, | |
| "loss": 0.1074, | |
| "step": 3336 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0143762729786566e-07, | |
| "loss": 0.1134, | |
| "step": 3337 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0081475221205006e-07, | |
| "loss": 0.1034, | |
| "step": 3338 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.0019375603976255e-07, | |
| "loss": 0.1011, | |
| "step": 3339 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.957463926733385e-08, | |
| "loss": 0.0999, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.895740237962276e-08, | |
| "loss": 0.1009, | |
| "step": 3341 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.834204586001705e-08, | |
| "loss": 0.1052, | |
| "step": 3342 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.772857019043014e-08, | |
| "loss": 0.0892, | |
| "step": 3343 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.711697585130408e-08, | |
| "loss": 0.1035, | |
| "step": 3344 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.65072633216066e-08, | |
| "loss": 0.0962, | |
| "step": 3345 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.589943307883154e-08, | |
| "loss": 0.0989, | |
| "step": 3346 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.529348559899981e-08, | |
| "loss": 0.1025, | |
| "step": 3347 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.468942135665598e-08, | |
| "loss": 0.0908, | |
| "step": 3348 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 9.408724082487192e-08, | |
| "loss": 0.1032, | |
| "step": 3349 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.34869444752426e-08, | |
| "loss": 0.1045, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.288853277788806e-08, | |
| "loss": 0.0946, | |
| "step": 3351 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.229200620145262e-08, | |
| "loss": 0.1125, | |
| "step": 3352 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.169736521310341e-08, | |
| "loss": 0.0945, | |
| "step": 3353 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.110461027853206e-08, | |
| "loss": 0.0936, | |
| "step": 3354 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 9.051374186195256e-08, | |
| "loss": 0.1087, | |
| "step": 3355 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.992476042610054e-08, | |
| "loss": 0.0989, | |
| "step": 3356 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.933766643223568e-08, | |
| "loss": 0.0948, | |
| "step": 3357 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.875246034013823e-08, | |
| "loss": 0.1116, | |
| "step": 3358 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.816914260811022e-08, | |
| "loss": 0.0975, | |
| "step": 3359 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.758771369297536e-08, | |
| "loss": 0.1072, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.700817405007695e-08, | |
| "loss": 0.1079, | |
| "step": 3361 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.643052413327995e-08, | |
| "loss": 0.0994, | |
| "step": 3362 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.585476439496837e-08, | |
| "loss": 0.1025, | |
| "step": 3363 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.528089528604705e-08, | |
| "loss": 0.1059, | |
| "step": 3364 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.470891725593955e-08, | |
| "loss": 0.1164, | |
| "step": 3365 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.413883075258756e-08, | |
| "loss": 0.1018, | |
| "step": 3366 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.357063622245365e-08, | |
| "loss": 0.0899, | |
| "step": 3367 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.300433411051606e-08, | |
| "loss": 0.0955, | |
| "step": 3368 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.243992486027336e-08, | |
| "loss": 0.1028, | |
| "step": 3369 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.18774089137403e-08, | |
| "loss": 0.0968, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.131678671144893e-08, | |
| "loss": 0.1093, | |
| "step": 3371 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.075805869244918e-08, | |
| "loss": 0.1028, | |
| "step": 3372 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 8.020122529430657e-08, | |
| "loss": 0.098, | |
| "step": 3373 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.964628695310339e-08, | |
| "loss": 0.1066, | |
| "step": 3374 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.909324410343755e-08, | |
| "loss": 0.1195, | |
| "step": 3375 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.854209717842231e-08, | |
| "loss": 0.1013, | |
| "step": 3376 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.799284660968686e-08, | |
| "loss": 0.0889, | |
| "step": 3377 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.74454928273749e-08, | |
| "loss": 0.1037, | |
| "step": 3378 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.690003626014464e-08, | |
| "loss": 0.1064, | |
| "step": 3379 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.635647733516827e-08, | |
| "loss": 0.1083, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.581481647813194e-08, | |
| "loss": 0.1011, | |
| "step": 3381 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.527505411323605e-08, | |
| "loss": 0.0996, | |
| "step": 3382 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.473719066319273e-08, | |
| "loss": 0.1053, | |
| "step": 3383 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.420122654922867e-08, | |
| "loss": 0.0991, | |
| "step": 3384 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.3667162191082e-08, | |
| "loss": 0.0987, | |
| "step": 3385 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 7.313499800700285e-08, | |
| "loss": 0.0943, | |
| "step": 3386 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.260473441375482e-08, | |
| "loss": 0.1086, | |
| "step": 3387 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.20763718266107e-08, | |
| "loss": 0.0971, | |
| "step": 3388 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.154991065935702e-08, | |
| "loss": 0.109, | |
| "step": 3389 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.102535132428951e-08, | |
| "loss": 0.1082, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 7.050269423221429e-08, | |
| "loss": 0.1029, | |
| "step": 3391 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.99819397924495e-08, | |
| "loss": 0.097, | |
| "step": 3392 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.946308841282168e-08, | |
| "loss": 0.0939, | |
| "step": 3393 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.894614049966747e-08, | |
| "loss": 0.107, | |
| "step": 3394 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.843109645783302e-08, | |
| "loss": 0.1109, | |
| "step": 3395 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.791795669067264e-08, | |
| "loss": 0.1117, | |
| "step": 3396 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.740672160005068e-08, | |
| "loss": 0.0909, | |
| "step": 3397 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.689739158633885e-08, | |
| "loss": 0.0941, | |
| "step": 3398 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.638996704841693e-08, | |
| "loss": 0.1006, | |
| "step": 3399 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.588444838367347e-08, | |
| "loss": 0.1157, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.538083598800232e-08, | |
| "loss": 0.0904, | |
| "step": 3401 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.487913025580717e-08, | |
| "loss": 0.1004, | |
| "step": 3402 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.437933157999593e-08, | |
| "loss": 0.0911, | |
| "step": 3403 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.388144035198468e-08, | |
| "loss": 0.104, | |
| "step": 3404 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.338545696169595e-08, | |
| "loss": 0.0948, | |
| "step": 3405 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.289138179755599e-08, | |
| "loss": 0.0983, | |
| "step": 3406 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.239921524649888e-08, | |
| "loss": 0.0939, | |
| "step": 3407 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.190895769396272e-08, | |
| "loss": 0.1043, | |
| "step": 3408 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.142060952389146e-08, | |
| "loss": 0.0929, | |
| "step": 3409 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.093417111873306e-08, | |
| "loss": 0.0883, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 6.044964285944005e-08, | |
| "loss": 0.1071, | |
| "step": 3411 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.996702512546887e-08, | |
| "loss": 0.1238, | |
| "step": 3412 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.948631829477997e-08, | |
| "loss": 0.1083, | |
| "step": 3413 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.9007522743837475e-08, | |
| "loss": 0.1068, | |
| "step": 3414 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.85306388476084e-08, | |
| "loss": 0.1137, | |
| "step": 3415 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.805566697956233e-08, | |
| "loss": 0.0938, | |
| "step": 3416 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.758260751167227e-08, | |
| "loss": 0.1072, | |
| "step": 3417 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.711146081441271e-08, | |
| "loss": 0.1012, | |
| "step": 3418 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.664222725676044e-08, | |
| "loss": 0.1082, | |
| "step": 3419 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.6174907206195115e-08, | |
| "loss": 0.1065, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.5709501028695376e-08, | |
| "loss": 0.0978, | |
| "step": 3421 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.524600908874356e-08, | |
| "loss": 0.0914, | |
| "step": 3422 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.478443174932069e-08, | |
| "loss": 0.1009, | |
| "step": 3423 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.432476937191039e-08, | |
| "loss": 0.107, | |
| "step": 3424 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.3867022316495266e-08, | |
| "loss": 0.1161, | |
| "step": 3425 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.341119094155856e-08, | |
| "loss": 0.1037, | |
| "step": 3426 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.295727560408248e-08, | |
| "loss": 0.1035, | |
| "step": 3427 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.250527665954963e-08, | |
| "loss": 0.0976, | |
| "step": 3428 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.20551944619413e-08, | |
| "loss": 0.1227, | |
| "step": 3429 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.1607029363738016e-08, | |
| "loss": 0.1108, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.1160781715918484e-08, | |
| "loss": 0.0922, | |
| "step": 3431 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.071645186796009e-08, | |
| "loss": 0.108, | |
| "step": 3432 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 5.027404016783782e-08, | |
| "loss": 0.0973, | |
| "step": 3433 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.9833546962025084e-08, | |
| "loss": 0.1034, | |
| "step": 3434 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.9394972595492866e-08, | |
| "loss": 0.0812, | |
| "step": 3435 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.89583174117092e-08, | |
| "loss": 0.1164, | |
| "step": 3436 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.8523581752638603e-08, | |
| "loss": 0.0959, | |
| "step": 3437 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.809076595874262e-08, | |
| "loss": 0.1052, | |
| "step": 3438 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.765987036898012e-08, | |
| "loss": 0.0938, | |
| "step": 3439 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.723089532080505e-08, | |
| "loss": 0.1084, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.6803841150168114e-08, | |
| "loss": 0.0885, | |
| "step": 3441 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.637870819151485e-08, | |
| "loss": 0.0973, | |
| "step": 3442 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.595549677778643e-08, | |
| "loss": 0.0986, | |
| "step": 3443 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.553420724041996e-08, | |
| "loss": 0.1047, | |
| "step": 3444 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.511483990934679e-08, | |
| "loss": 0.1022, | |
| "step": 3445 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.4697395112992815e-08, | |
| "loss": 0.0927, | |
| "step": 3446 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.428187317827848e-08, | |
| "loss": 0.1053, | |
| "step": 3447 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.38682744306182e-08, | |
| "loss": 0.1018, | |
| "step": 3448 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.345659919392037e-08, | |
| "loss": 0.0999, | |
| "step": 3449 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.3046847790587653e-08, | |
| "loss": 0.1018, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.2639020541514756e-08, | |
| "loss": 0.1037, | |
| "step": 3451 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.2233117766090346e-08, | |
| "loss": 0.1116, | |
| "step": 3452 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.1829139782195717e-08, | |
| "loss": 0.1036, | |
| "step": 3453 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.142708690620473e-08, | |
| "loss": 0.0988, | |
| "step": 3454 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.1026959452984125e-08, | |
| "loss": 0.1011, | |
| "step": 3455 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.0628757735891866e-08, | |
| "loss": 0.0987, | |
| "step": 3456 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.023248206677821e-08, | |
| "loss": 0.1045, | |
| "step": 3457 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.983813275598547e-08, | |
| "loss": 0.0979, | |
| "step": 3458 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.944571011234632e-08, | |
| "loss": 0.0953, | |
| "step": 3459 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.905521444318605e-08, | |
| "loss": 0.0797, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.866664605431919e-08, | |
| "loss": 0.0802, | |
| "step": 3461 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.828000525005204e-08, | |
| "loss": 0.0946, | |
| "step": 3462 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.789529233318101e-08, | |
| "loss": 0.0904, | |
| "step": 3463 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.751250760499231e-08, | |
| "loss": 0.1008, | |
| "step": 3464 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.713165136526309e-08, | |
| "loss": 0.1174, | |
| "step": 3465 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.675272391225948e-08, | |
| "loss": 0.1094, | |
| "step": 3466 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.637572554273633e-08, | |
| "loss": 0.1156, | |
| "step": 3467 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.600065655193968e-08, | |
| "loss": 0.1031, | |
| "step": 3468 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.5627517233602913e-08, | |
| "loss": 0.0742, | |
| "step": 3469 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.5256307879948936e-08, | |
| "loss": 0.1015, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.488702878168909e-08, | |
| "loss": 0.1261, | |
| "step": 3471 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.45196802280226e-08, | |
| "loss": 0.0852, | |
| "step": 3472 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.415426250663795e-08, | |
| "loss": 0.0962, | |
| "step": 3473 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.3790775903710105e-08, | |
| "loss": 0.1038, | |
| "step": 3474 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.3429220703902476e-08, | |
| "loss": 0.1091, | |
| "step": 3475 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.306959719036607e-08, | |
| "loss": 0.093, | |
| "step": 3476 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.271190564473836e-08, | |
| "loss": 0.1074, | |
| "step": 3477 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.2356146347144736e-08, | |
| "loss": 0.0943, | |
| "step": 3478 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.2002319576196204e-08, | |
| "loss": 0.103, | |
| "step": 3479 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.16504256089914e-08, | |
| "loss": 0.0997, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.130046472111487e-08, | |
| "loss": 0.114, | |
| "step": 3481 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.0952437186637094e-08, | |
| "loss": 0.1208, | |
| "step": 3482 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.0606343278114516e-08, | |
| "loss": 0.147, | |
| "step": 3483 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 3.026218326658947e-08, | |
| "loss": 0.0858, | |
| "step": 3484 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.9919957421589985e-08, | |
| "loss": 0.0965, | |
| "step": 3485 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.9579666011128604e-08, | |
| "loss": 0.0847, | |
| "step": 3486 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.924130930170327e-08, | |
| "loss": 0.1004, | |
| "step": 3487 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.890488755829729e-08, | |
| "loss": 0.1056, | |
| "step": 3488 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.857040104437797e-08, | |
| "loss": 0.1181, | |
| "step": 3489 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.823785002189716e-08, | |
| "loss": 0.0969, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7907234751291256e-08, | |
| "loss": 0.1111, | |
| "step": 3491 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7578555491480087e-08, | |
| "loss": 0.1008, | |
| "step": 3492 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.7251812499868302e-08, | |
| "loss": 0.0861, | |
| "step": 3493 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.6927006032342605e-08, | |
| "loss": 0.0849, | |
| "step": 3494 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.660413634327508e-08, | |
| "loss": 0.0823, | |
| "step": 3495 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.628320368551929e-08, | |
| "loss": 0.1056, | |
| "step": 3496 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.596420831041252e-08, | |
| "loss": 0.0957, | |
| "step": 3497 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.564715046777522e-08, | |
| "loss": 0.0947, | |
| "step": 3498 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.533203040590959e-08, | |
| "loss": 0.1093, | |
| "step": 3499 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.5018848371601846e-08, | |
| "loss": 0.1075, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.4707604610118287e-08, | |
| "loss": 0.0863, | |
| "step": 3501 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.4398299365208933e-08, | |
| "loss": 0.0997, | |
| "step": 3502 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.409093287910502e-08, | |
| "loss": 0.1126, | |
| "step": 3503 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.3785505392519557e-08, | |
| "loss": 0.0919, | |
| "step": 3504 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.348201714464704e-08, | |
| "loss": 0.0887, | |
| "step": 3505 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.3180468373163177e-08, | |
| "loss": 0.0987, | |
| "step": 3506 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.2880859314224623e-08, | |
| "loss": 0.1334, | |
| "step": 3507 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.2583190202469517e-08, | |
| "loss": 0.1106, | |
| "step": 3508 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.2287461271015832e-08, | |
| "loss": 0.0856, | |
| "step": 3509 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.199367275146358e-08, | |
| "loss": 0.0955, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.1701824873891497e-08, | |
| "loss": 0.1121, | |
| "step": 3511 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.1411917866859256e-08, | |
| "loss": 0.0944, | |
| "step": 3512 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.1123951957406908e-08, | |
| "loss": 0.105, | |
| "step": 3513 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.0837927371053222e-08, | |
| "loss": 0.0995, | |
| "step": 3514 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.055384433179819e-08, | |
| "loss": 0.1082, | |
| "step": 3515 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 2.027170306212023e-08, | |
| "loss": 0.0944, | |
| "step": 3516 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.999150378297676e-08, | |
| "loss": 0.0997, | |
| "step": 3517 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.9713246713805588e-08, | |
| "loss": 0.0962, | |
| "step": 3518 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.943693207252212e-08, | |
| "loss": 0.0978, | |
| "step": 3519 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.9162560075521586e-08, | |
| "loss": 0.0908, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.8890130937677388e-08, | |
| "loss": 0.0976, | |
| "step": 3521 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.8619644872341636e-08, | |
| "loss": 0.1011, | |
| "step": 3522 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.835110209134433e-08, | |
| "loss": 0.0886, | |
| "step": 3523 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.8084502804993355e-08, | |
| "loss": 0.1156, | |
| "step": 3524 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.781984722207586e-08, | |
| "loss": 0.099, | |
| "step": 3525 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7557135549855497e-08, | |
| "loss": 0.1073, | |
| "step": 3526 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.729636799407408e-08, | |
| "loss": 0.0999, | |
| "step": 3527 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.7037544758950752e-08, | |
| "loss": 0.0935, | |
| "step": 3528 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.678066604718198e-08, | |
| "loss": 0.1019, | |
| "step": 3529 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.652573205994157e-08, | |
| "loss": 0.1053, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6272742996880365e-08, | |
| "loss": 0.1075, | |
| "step": 3531 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.602169905612516e-08, | |
| "loss": 0.0985, | |
| "step": 3532 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.5772600434280916e-08, | |
| "loss": 0.0988, | |
| "step": 3533 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.5525447326427968e-08, | |
| "loss": 0.1132, | |
| "step": 3534 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.5280239926123152e-08, | |
| "loss": 0.0868, | |
| "step": 3535 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.503697842540036e-08, | |
| "loss": 0.0959, | |
| "step": 3536 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4795663014768869e-08, | |
| "loss": 0.0985, | |
| "step": 3537 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4556293883213613e-08, | |
| "loss": 0.1035, | |
| "step": 3538 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4318871218195751e-08, | |
| "loss": 0.1027, | |
| "step": 3539 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.4083395205652383e-08, | |
| "loss": 0.0926, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.3849866029995718e-08, | |
| "loss": 0.1079, | |
| "step": 3541 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.3618283874112792e-08, | |
| "loss": 0.1253, | |
| "step": 3542 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.3388648919366865e-08, | |
| "loss": 0.0915, | |
| "step": 3543 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.3160961345595468e-08, | |
| "loss": 0.0992, | |
| "step": 3544 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.2935221331111524e-08, | |
| "loss": 0.1076, | |
| "step": 3545 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.271142905270223e-08, | |
| "loss": 0.095, | |
| "step": 3546 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.2489584685630163e-08, | |
| "loss": 0.1032, | |
| "step": 3547 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.2269688403631907e-08, | |
| "loss": 0.0877, | |
| "step": 3548 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.2051740378917764e-08, | |
| "loss": 0.099, | |
| "step": 3549 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.18357407821737e-08, | |
| "loss": 0.1035, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.162168978255912e-08, | |
| "loss": 0.1003, | |
| "step": 3551 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.140958754770688e-08, | |
| "loss": 0.0892, | |
| "step": 3552 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.1199434243724105e-08, | |
| "loss": 0.112, | |
| "step": 3553 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0991230035191924e-08, | |
| "loss": 0.1123, | |
| "step": 3554 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0784975085164351e-08, | |
| "loss": 0.104, | |
| "step": 3555 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0580669555169676e-08, | |
| "loss": 0.0916, | |
| "step": 3556 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.03783136052088e-08, | |
| "loss": 0.0817, | |
| "step": 3557 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.017790739375607e-08, | |
| "loss": 0.0995, | |
| "step": 3558 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.979451077758995e-09, | |
| "loss": 0.1034, | |
| "step": 3559 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.782944812637973e-09, | |
| "loss": 0.0906, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.588388752286015e-09, | |
| "loss": 0.1083, | |
| "step": 3561 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.395783049069296e-09, | |
| "loss": 0.0961, | |
| "step": 3562 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.205127853826045e-09, | |
| "loss": 0.0944, | |
| "step": 3563 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.016423315867106e-09, | |
| "loss": 0.1201, | |
| "step": 3564 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.829669582976209e-09, | |
| "loss": 0.104, | |
| "step": 3565 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.64486680140858e-09, | |
| "loss": 0.0911, | |
| "step": 3566 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.462015115891785e-09, | |
| "loss": 0.1078, | |
| "step": 3567 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.281114669626e-09, | |
| "loss": 0.1145, | |
| "step": 3568 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 8.102165604282064e-09, | |
| "loss": 0.0938, | |
| "step": 3569 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.925168060003707e-09, | |
| "loss": 0.0921, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.750122175405883e-09, | |
| "loss": 0.1003, | |
| "step": 3571 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.577028087575322e-09, | |
| "loss": 0.098, | |
| "step": 3572 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.405885932069978e-09, | |
| "loss": 0.1014, | |
| "step": 3573 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.236695842919028e-09, | |
| "loss": 0.0843, | |
| "step": 3574 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.069457952623148e-09, | |
| "loss": 0.1055, | |
| "step": 3575 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.90417239215424e-09, | |
| "loss": 0.0959, | |
| "step": 3576 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.740839290955148e-09, | |
| "loss": 0.1064, | |
| "step": 3577 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.5794587769393845e-09, | |
| "loss": 0.1083, | |
| "step": 3578 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.420030976491687e-09, | |
| "loss": 0.0945, | |
| "step": 3579 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.262556014467181e-09, | |
| "loss": 0.1128, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 6.107034014192215e-09, | |
| "loss": 0.1123, | |
| "step": 3581 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.953465097463251e-09, | |
| "loss": 0.1028, | |
| "step": 3582 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.8018493845468645e-09, | |
| "loss": 0.0867, | |
| "step": 3583 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.652186994180853e-09, | |
| "loss": 0.0958, | |
| "step": 3584 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.504478043572292e-09, | |
| "loss": 0.0947, | |
| "step": 3585 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.3587226483992065e-09, | |
| "loss": 0.1016, | |
| "step": 3586 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.21492092280973e-09, | |
| "loss": 0.0945, | |
| "step": 3587 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 5.073072979421001e-09, | |
| "loss": 0.106, | |
| "step": 3588 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.933178929321103e-09, | |
| "loss": 0.0984, | |
| "step": 3589 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.7952388820676765e-09, | |
| "loss": 0.1075, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.65925294568792e-09, | |
| "loss": 0.1025, | |
| "step": 3591 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.525221226678589e-09, | |
| "loss": 0.0888, | |
| "step": 3592 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.393143830005997e-09, | |
| "loss": 0.1119, | |
| "step": 3593 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.263020859106848e-09, | |
| "loss": 0.1089, | |
| "step": 3594 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.134852415885737e-09, | |
| "loss": 0.0917, | |
| "step": 3595 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.0086386007176514e-09, | |
| "loss": 0.1039, | |
| "step": 3596 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.884379512446301e-09, | |
| "loss": 0.0905, | |
| "step": 3597 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.7620752483849555e-09, | |
| "loss": 0.1132, | |
| "step": 3598 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.6417259043158847e-09, | |
| "loss": 0.0901, | |
| "step": 3599 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.523331574489808e-09, | |
| "loss": 0.0987, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.406892351627278e-09, | |
| "loss": 0.1006, | |
| "step": 3601 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.292408326917018e-09, | |
| "loss": 0.098, | |
| "step": 3602 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.179879590017032e-09, | |
| "loss": 0.0907, | |
| "step": 3603 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 3.0693062290534924e-09, | |
| "loss": 0.0947, | |
| "step": 3604 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 2.960688330621575e-09, | |
| "loss": 0.111, | |
| "step": 3605 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.8540259797854577e-09, | |
| "loss": 0.1021, | |
| "step": 3606 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.749319260076655e-09, | |
| "loss": 0.1345, | |
| "step": 3607 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.6465682534965174e-09, | |
| "loss": 0.0891, | |
| "step": 3608 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.545773040513733e-09, | |
| "loss": 0.0802, | |
| "step": 3609 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.44693370006599e-09, | |
| "loss": 0.1012, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.3500503095588713e-09, | |
| "loss": 0.0964, | |
| "step": 3611 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.2551229448661284e-09, | |
| "loss": 0.1154, | |
| "step": 3612 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.1621516803299603e-09, | |
| "loss": 0.093, | |
| "step": 3613 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.0711365887604585e-09, | |
| "loss": 0.1065, | |
| "step": 3614 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.9820777414356064e-09, | |
| "loss": 0.1017, | |
| "step": 3615 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.8949752081018347e-09, | |
| "loss": 0.0967, | |
| "step": 3616 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.8098290569729116e-09, | |
| "loss": 0.1131, | |
| "step": 3617 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.7266393547307747e-09, | |
| "loss": 0.0948, | |
| "step": 3618 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.6454061665255316e-09, | |
| "loss": 0.1103, | |
| "step": 3619 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.566129555974072e-09, | |
| "loss": 0.096, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.48880958516201e-09, | |
| "loss": 0.0964, | |
| "step": 3621 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.4134463146420197e-09, | |
| "loss": 0.103, | |
| "step": 3622 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.3400398034346673e-09, | |
| "loss": 0.1072, | |
| "step": 3623 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.2685901090281338e-09, | |
| "loss": 0.1326, | |
| "step": 3624 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.1990972873779372e-09, | |
| "loss": 0.1156, | |
| "step": 3625 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.1315613929069325e-09, | |
| "loss": 0.1088, | |
| "step": 3626 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0659824785055894e-09, | |
| "loss": 0.103, | |
| "step": 3627 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.0023605955319926e-09, | |
| "loss": 0.0966, | |
| "step": 3628 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 9.406957938115634e-10, | |
| "loss": 0.0965, | |
| "step": 3629 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.809881216365057e-10, | |
| "loss": 0.105, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 8.23237625766915e-10, | |
| "loss": 0.09, | |
| "step": 3631 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.674443514299467e-10, | |
| "loss": 0.0923, | |
| "step": 3632 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.136083423195383e-10, | |
| "loss": 0.1017, | |
| "step": 3633 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.617296405975193e-10, | |
| "loss": 0.0962, | |
| "step": 3634 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.118082868925012e-10, | |
| "loss": 0.0886, | |
| "step": 3635 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.638443202998778e-10, | |
| "loss": 0.0921, | |
| "step": 3636 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.178377783829347e-10, | |
| "loss": 0.1017, | |
| "step": 3637 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.737886971714623e-10, | |
| "loss": 0.1045, | |
| "step": 3638 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.316971111617552e-10, | |
| "loss": 0.1001, | |
| "step": 3639 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.915630533185555e-10, | |
| "loss": 0.0966, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.5338655507199947e-10, | |
| "loss": 0.1067, | |
| "step": 3641 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 3.1716764632011564e-10, | |
| "loss": 0.1039, | |
| "step": 3642 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.8290635542799204e-10, | |
| "loss": 0.0986, | |
| "step": 3643 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.5060270922666607e-10, | |
| "loss": 0.088, | |
| "step": 3644 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.2025673301478977e-10, | |
| "loss": 0.104, | |
| "step": 3645 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.9186845055779723e-10, | |
| "loss": 0.1028, | |
| "step": 3646 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.6543788408818207e-10, | |
| "loss": 0.1109, | |
| "step": 3647 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.4096505430438722e-10, | |
| "loss": 0.1096, | |
| "step": 3648 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.1844998037247036e-10, | |
| "loss": 0.1, | |
| "step": 3649 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.789267992499352e-11, | |
| "loss": 0.1006, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 7.92931690613008e-11, | |
| "loss": 0.1053, | |
| "step": 3651 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 6.265146234779585e-11, | |
| "loss": 0.1019, | |
| "step": 3652 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.7967572817109176e-11, | |
| "loss": 0.0839, | |
| "step": 3653 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.524151196893089e-11, | |
| "loss": 0.1087, | |
| "step": 3654 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 2.4473289769733067e-11, | |
| "loss": 0.0968, | |
| "step": 3655 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.56629146522147e-11, | |
| "loss": 0.0946, | |
| "step": 3656 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 8.810393516966997e-12, | |
| "loss": 0.1053, | |
| "step": 3657 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 3.915731729697836e-12, | |
| "loss": 0.0868, | |
| "step": 3658 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 9.789331240073196e-13, | |
| "loss": 0.1056, | |
| "step": 3659 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.1444, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 3660, | |
| "total_flos": 1.0352562943295488e+18, | |
| "train_loss": 0.10560117050231806, | |
| "train_runtime": 9480.7961, | |
| "train_samples_per_second": 49.422, | |
| "train_steps_per_second": 0.386 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 3660, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "total_flos": 1.0352562943295488e+18, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |