Model save
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- README.md +71 -0
- config.json +47 -0
- model.safetensors +3 -0
- training_args.bin +3 -0
- trial-0/checkpoint-3011/config.json +47 -0
- trial-0/checkpoint-3011/model.safetensors +3 -0
- trial-0/checkpoint-3011/optimizer.pt +3 -0
- trial-0/checkpoint-3011/rng_state.pth +3 -0
- trial-0/checkpoint-3011/scheduler.pt +3 -0
- trial-0/checkpoint-3011/trainer_state.json +465 -0
- trial-0/checkpoint-3011/training_args.bin +3 -0
- trial-1/checkpoint-15055/config.json +47 -0
- trial-1/checkpoint-15055/model.safetensors +3 -0
- trial-1/checkpoint-15055/optimizer.pt +3 -0
- trial-1/checkpoint-15055/rng_state.pth +3 -0
- trial-1/checkpoint-15055/scheduler.pt +3 -0
- trial-1/checkpoint-15055/trainer_state.json +2200 -0
- trial-1/checkpoint-15055/training_args.bin +3 -0
- trial-2/checkpoint-9033/config.json +47 -0
- trial-2/checkpoint-9033/model.safetensors +3 -0
- trial-2/checkpoint-9033/optimizer.pt +3 -0
- trial-2/checkpoint-9033/rng_state.pth +3 -0
- trial-2/checkpoint-9033/scheduler.pt +3 -0
- trial-2/checkpoint-9033/trainer_state.json +1329 -0
- trial-2/checkpoint-9033/training_args.bin +3 -0
- trial-3/checkpoint-1506/config.json +47 -0
- trial-3/checkpoint-1506/model.safetensors +3 -0
- trial-3/checkpoint-1506/optimizer.pt +3 -0
- trial-3/checkpoint-1506/rng_state.pth +3 -0
- trial-3/checkpoint-1506/scheduler.pt +3 -0
- trial-3/checkpoint-1506/trainer_state.json +255 -0
- trial-3/checkpoint-1506/training_args.bin +3 -0
- trial-4/checkpoint-3012/config.json +47 -0
- trial-4/checkpoint-3012/model.safetensors +3 -0
- trial-4/checkpoint-3012/optimizer.pt +3 -0
- trial-4/checkpoint-3012/rng_state.pth +3 -0
- trial-4/checkpoint-3012/scheduler.pt +3 -0
- trial-4/checkpoint-3012/trainer_state.json +477 -0
- trial-4/checkpoint-3012/training_args.bin +3 -0
- trial-5/checkpoint-1506/config.json +47 -0
- trial-5/checkpoint-1506/model.safetensors +3 -0
- trial-5/checkpoint-1506/optimizer.pt +3 -0
- trial-5/checkpoint-1506/rng_state.pth +3 -0
- trial-5/checkpoint-1506/scheduler.pt +3 -0
- trial-5/checkpoint-1506/trainer_state.json +255 -0
- trial-5/checkpoint-1506/training_args.bin +3 -0
- trial-6/checkpoint-1506/config.json +47 -0
- trial-6/checkpoint-1506/model.safetensors +3 -0
- trial-6/checkpoint-1506/optimizer.pt +3 -0
- trial-6/checkpoint-1506/rng_state.pth +3 -0
README.md
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
license: apache-2.0
|
| 4 |
+
base_model: answerdotai/ModernBERT-large
|
| 5 |
+
tags:
|
| 6 |
+
- generated_from_trainer
|
| 7 |
+
metrics:
|
| 8 |
+
- accuracy
|
| 9 |
+
- precision
|
| 10 |
+
- recall
|
| 11 |
+
- f1
|
| 12 |
+
model-index:
|
| 13 |
+
- name: answerdotai-ModernBERT-large-finetuned
|
| 14 |
+
results: []
|
| 15 |
+
---
|
| 16 |
+
|
| 17 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 18 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 19 |
+
|
| 20 |
+
# answerdotai-ModernBERT-large-finetuned
|
| 21 |
+
|
| 22 |
+
This model is a fine-tuned version of [answerdotai/ModernBERT-large](https://huggingface.co/answerdotai/ModernBERT-large) on the None dataset.
|
| 23 |
+
It achieves the following results on the evaluation set:
|
| 24 |
+
- Loss: 0.0153
|
| 25 |
+
- Accuracy: 0.9980
|
| 26 |
+
- Precision: 0.9980
|
| 27 |
+
- Recall: 0.9980
|
| 28 |
+
- F1: 0.9980
|
| 29 |
+
|
| 30 |
+
## Model description
|
| 31 |
+
|
| 32 |
+
More information needed
|
| 33 |
+
|
| 34 |
+
## Intended uses & limitations
|
| 35 |
+
|
| 36 |
+
More information needed
|
| 37 |
+
|
| 38 |
+
## Training and evaluation data
|
| 39 |
+
|
| 40 |
+
More information needed
|
| 41 |
+
|
| 42 |
+
## Training procedure
|
| 43 |
+
|
| 44 |
+
### Training hyperparameters
|
| 45 |
+
|
| 46 |
+
The following hyperparameters were used during training:
|
| 47 |
+
- learning_rate: 4.1905207188250686e-05
|
| 48 |
+
- train_batch_size: 16
|
| 49 |
+
- eval_batch_size: 16
|
| 50 |
+
- seed: 42
|
| 51 |
+
- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 52 |
+
- lr_scheduler_type: linear
|
| 53 |
+
- num_epochs: 5
|
| 54 |
+
|
| 55 |
+
### Training results
|
| 56 |
+
|
| 57 |
+
| Training Loss | Epoch | Step | Validation Loss | Accuracy | Precision | Recall | F1 |
|
| 58 |
+
|:-------------:|:-----:|:-----:|:---------------:|:--------:|:---------:|:------:|:------:|
|
| 59 |
+
| 0.0046 | 1.0 | 3011 | 0.0257 | 0.9962 | 0.9962 | 0.9962 | 0.9962 |
|
| 60 |
+
| 0.021 | 2.0 | 6022 | 0.0234 | 0.9959 | 0.9960 | 0.9959 | 0.9960 |
|
| 61 |
+
| 0.0001 | 3.0 | 9033 | 0.0194 | 0.9979 | 0.9978 | 0.9979 | 0.9978 |
|
| 62 |
+
| 0.0002 | 4.0 | 12044 | 0.0181 | 0.9979 | 0.9978 | 0.9979 | 0.9978 |
|
| 63 |
+
| 0.0 | 5.0 | 15055 | 0.0177 | 0.9980 | 0.9980 | 0.9980 | 0.9980 |
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
### Framework versions
|
| 67 |
+
|
| 68 |
+
- Transformers 4.48.0.dev0
|
| 69 |
+
- Pytorch 2.5.1+cu124
|
| 70 |
+
- Datasets 3.2.0
|
| 71 |
+
- Tokenizers 0.21.0
|
config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e73302dfe575e2fae3ec24034f72088b2adb6e6f5c4e4a35e949d0d8c4b78dbb
|
| 3 |
+
size 1583351632
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d77dcef85d4aa9fa4696da036cb727ea773883db77c9535ae75875fc4a5a5c11
|
| 3 |
+
size 5432
|
trial-0/checkpoint-3011/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-0/checkpoint-3011/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e79c24376368ad25cd3c338648c1e61fcbf3c81c96c917991d54a7bad08de0de
|
| 3 |
+
size 1583351632
|
trial-0/checkpoint-3011/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:022244260a3503756427f87fda63355cfa09334e6a2699f150afdd10b1cc0cb3
|
| 3 |
+
size 3166813178
|
trial-0/checkpoint-3011/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:568428d80a25211a390c359ca51b0b20b38ca0607fbc196f106c9841c02d3e59
|
| 3 |
+
size 14244
|
trial-0/checkpoint-3011/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3f8a6aef7194053a2f13849a184537c69f114fdea257f4aa4eef32c5d0218928
|
| 3 |
+
size 1064
|
trial-0/checkpoint-3011/trainer_state.json
ADDED
|
@@ -0,0 +1,465 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.02747241035103798,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-0/checkpoint-3011",
|
| 4 |
+
"epoch": 1.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 3011,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.016605778811026237,
|
| 13 |
+
"grad_norm": 8.580768585205078,
|
| 14 |
+
"learning_rate": 5.396357633246935e-06,
|
| 15 |
+
"loss": 0.4089,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.033211557622052475,
|
| 20 |
+
"grad_norm": 4.984262466430664,
|
| 21 |
+
"learning_rate": 5.385132993856705e-06,
|
| 22 |
+
"loss": 0.1911,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.04981733643307871,
|
| 27 |
+
"grad_norm": 145.00692749023438,
|
| 28 |
+
"learning_rate": 5.373908354466475e-06,
|
| 29 |
+
"loss": 0.1545,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.06642311524410495,
|
| 34 |
+
"grad_norm": 2.051618814468384,
|
| 35 |
+
"learning_rate": 5.3626837150762455e-06,
|
| 36 |
+
"loss": 0.0941,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.08302889405513118,
|
| 41 |
+
"grad_norm": 0.19901786744594574,
|
| 42 |
+
"learning_rate": 5.351459075686016e-06,
|
| 43 |
+
"loss": 0.0327,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.09963467286615742,
|
| 48 |
+
"grad_norm": 0.003997017629444599,
|
| 49 |
+
"learning_rate": 5.340234436295786e-06,
|
| 50 |
+
"loss": 0.0585,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.11624045167718366,
|
| 55 |
+
"grad_norm": 0.016547370702028275,
|
| 56 |
+
"learning_rate": 5.329009796905555e-06,
|
| 57 |
+
"loss": 0.0461,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.1328462304882099,
|
| 62 |
+
"grad_norm": 5.060605049133301,
|
| 63 |
+
"learning_rate": 5.317785157515326e-06,
|
| 64 |
+
"loss": 0.0634,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.14945200929923613,
|
| 69 |
+
"grad_norm": 0.013804232701659203,
|
| 70 |
+
"learning_rate": 5.306560518125096e-06,
|
| 71 |
+
"loss": 0.0538,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.16605778811026237,
|
| 76 |
+
"grad_norm": 0.056022025644779205,
|
| 77 |
+
"learning_rate": 5.295335878734865e-06,
|
| 78 |
+
"loss": 0.0283,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.1826635669212886,
|
| 83 |
+
"grad_norm": 0.0030180325265973806,
|
| 84 |
+
"learning_rate": 5.284111239344636e-06,
|
| 85 |
+
"loss": 0.0278,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.19926934573231483,
|
| 90 |
+
"grad_norm": 0.14190103113651276,
|
| 91 |
+
"learning_rate": 5.272886599954406e-06,
|
| 92 |
+
"loss": 0.0486,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.2158751245433411,
|
| 97 |
+
"grad_norm": 23.922273635864258,
|
| 98 |
+
"learning_rate": 5.261661960564176e-06,
|
| 99 |
+
"loss": 0.0757,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.23248090335436733,
|
| 104 |
+
"grad_norm": 0.017755718901753426,
|
| 105 |
+
"learning_rate": 5.250437321173947e-06,
|
| 106 |
+
"loss": 0.0503,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.24908668216539356,
|
| 111 |
+
"grad_norm": 0.1306287795305252,
|
| 112 |
+
"learning_rate": 5.239212681783716e-06,
|
| 113 |
+
"loss": 0.0369,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.2656924609764198,
|
| 118 |
+
"grad_norm": 0.0014153249794617295,
|
| 119 |
+
"learning_rate": 5.227988042393486e-06,
|
| 120 |
+
"loss": 0.0284,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.282298239787446,
|
| 125 |
+
"grad_norm": 13.222740173339844,
|
| 126 |
+
"learning_rate": 5.216763403003257e-06,
|
| 127 |
+
"loss": 0.0685,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.29890401859847227,
|
| 132 |
+
"grad_norm": 0.01663159392774105,
|
| 133 |
+
"learning_rate": 5.205538763613026e-06,
|
| 134 |
+
"loss": 0.0377,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.3155097974094985,
|
| 139 |
+
"grad_norm": 9.587738037109375,
|
| 140 |
+
"learning_rate": 5.194314124222796e-06,
|
| 141 |
+
"loss": 0.0284,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.33211557622052473,
|
| 146 |
+
"grad_norm": 0.0031755813397467136,
|
| 147 |
+
"learning_rate": 5.183089484832566e-06,
|
| 148 |
+
"loss": 0.0287,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.348721355031551,
|
| 153 |
+
"grad_norm": 0.002594751538708806,
|
| 154 |
+
"learning_rate": 5.1718648454423365e-06,
|
| 155 |
+
"loss": 0.0239,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.3653271338425772,
|
| 160 |
+
"grad_norm": 0.0008383135427720845,
|
| 161 |
+
"learning_rate": 5.160640206052107e-06,
|
| 162 |
+
"loss": 0.0163,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.38193291265360346,
|
| 167 |
+
"grad_norm": 0.0018693436868488789,
|
| 168 |
+
"learning_rate": 5.149415566661877e-06,
|
| 169 |
+
"loss": 0.0173,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.39853869146462967,
|
| 174 |
+
"grad_norm": 3.5105800634482875e-05,
|
| 175 |
+
"learning_rate": 5.138190927271647e-06,
|
| 176 |
+
"loss": 0.0144,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.41514447027565593,
|
| 181 |
+
"grad_norm": 0.04262986406683922,
|
| 182 |
+
"learning_rate": 5.126966287881418e-06,
|
| 183 |
+
"loss": 0.0225,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.4317502490866822,
|
| 188 |
+
"grad_norm": 0.257622092962265,
|
| 189 |
+
"learning_rate": 5.115741648491187e-06,
|
| 190 |
+
"loss": 0.0458,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.4483560278977084,
|
| 195 |
+
"grad_norm": 0.057092875242233276,
|
| 196 |
+
"learning_rate": 5.104517009100957e-06,
|
| 197 |
+
"loss": 0.015,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.46496180670873466,
|
| 202 |
+
"grad_norm": 0.009775767102837563,
|
| 203 |
+
"learning_rate": 5.093292369710727e-06,
|
| 204 |
+
"loss": 0.0002,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.48156758551976087,
|
| 209 |
+
"grad_norm": 0.16755907237529755,
|
| 210 |
+
"learning_rate": 5.082067730320497e-06,
|
| 211 |
+
"loss": 0.0119,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.4981733643307871,
|
| 216 |
+
"grad_norm": 0.0004505734541453421,
|
| 217 |
+
"learning_rate": 5.0708430909302675e-06,
|
| 218 |
+
"loss": 0.0107,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.5147791431418134,
|
| 223 |
+
"grad_norm": 0.00015653851733077317,
|
| 224 |
+
"learning_rate": 5.059618451540038e-06,
|
| 225 |
+
"loss": 0.0002,
|
| 226 |
+
"step": 1550
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"epoch": 0.5313849219528396,
|
| 230 |
+
"grad_norm": 0.1482504904270172,
|
| 231 |
+
"learning_rate": 5.048393812149808e-06,
|
| 232 |
+
"loss": 0.0221,
|
| 233 |
+
"step": 1600
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"epoch": 0.5479907007638658,
|
| 237 |
+
"grad_norm": 0.00016298597620334476,
|
| 238 |
+
"learning_rate": 5.037169172759578e-06,
|
| 239 |
+
"loss": 0.0281,
|
| 240 |
+
"step": 1650
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"epoch": 0.564596479574892,
|
| 244 |
+
"grad_norm": 0.0018974934937432408,
|
| 245 |
+
"learning_rate": 5.025944533369348e-06,
|
| 246 |
+
"loss": 0.0152,
|
| 247 |
+
"step": 1700
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"epoch": 0.5812022583859183,
|
| 251 |
+
"grad_norm": 0.020836347714066505,
|
| 252 |
+
"learning_rate": 5.014719893979118e-06,
|
| 253 |
+
"loss": 0.0298,
|
| 254 |
+
"step": 1750
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"epoch": 0.5978080371969445,
|
| 258 |
+
"grad_norm": 0.0017976267263293266,
|
| 259 |
+
"learning_rate": 5.003495254588888e-06,
|
| 260 |
+
"loss": 0.0163,
|
| 261 |
+
"step": 1800
|
| 262 |
+
},
|
| 263 |
+
{
|
| 264 |
+
"epoch": 0.6144138160079707,
|
| 265 |
+
"grad_norm": 0.007485950365662575,
|
| 266 |
+
"learning_rate": 4.992270615198658e-06,
|
| 267 |
+
"loss": 0.0196,
|
| 268 |
+
"step": 1850
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.631019594818997,
|
| 272 |
+
"grad_norm": 0.00043688452569767833,
|
| 273 |
+
"learning_rate": 4.981045975808428e-06,
|
| 274 |
+
"loss": 0.0118,
|
| 275 |
+
"step": 1900
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"epoch": 0.6476253736300233,
|
| 279 |
+
"grad_norm": 0.0006113427225500345,
|
| 280 |
+
"learning_rate": 4.9698213364181985e-06,
|
| 281 |
+
"loss": 0.0084,
|
| 282 |
+
"step": 1950
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 0.6642311524410495,
|
| 286 |
+
"grad_norm": 0.001146040391176939,
|
| 287 |
+
"learning_rate": 4.958596697027969e-06,
|
| 288 |
+
"loss": 0.0001,
|
| 289 |
+
"step": 2000
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 0.6808369312520757,
|
| 293 |
+
"grad_norm": 0.022079484537243843,
|
| 294 |
+
"learning_rate": 4.947372057637739e-06,
|
| 295 |
+
"loss": 0.0135,
|
| 296 |
+
"step": 2050
|
| 297 |
+
},
|
| 298 |
+
{
|
| 299 |
+
"epoch": 0.697442710063102,
|
| 300 |
+
"grad_norm": 0.000534936785697937,
|
| 301 |
+
"learning_rate": 4.936147418247508e-06,
|
| 302 |
+
"loss": 0.0013,
|
| 303 |
+
"step": 2100
|
| 304 |
+
},
|
| 305 |
+
{
|
| 306 |
+
"epoch": 0.7140484888741282,
|
| 307 |
+
"grad_norm": 0.19137096405029297,
|
| 308 |
+
"learning_rate": 4.924922778857279e-06,
|
| 309 |
+
"loss": 0.0242,
|
| 310 |
+
"step": 2150
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"epoch": 0.7306542676851544,
|
| 314 |
+
"grad_norm": 2.9649052521563135e-05,
|
| 315 |
+
"learning_rate": 4.913698139467049e-06,
|
| 316 |
+
"loss": 0.0048,
|
| 317 |
+
"step": 2200
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"epoch": 0.7472600464961807,
|
| 321 |
+
"grad_norm": 0.0003890593070536852,
|
| 322 |
+
"learning_rate": 4.902473500076818e-06,
|
| 323 |
+
"loss": 0.0055,
|
| 324 |
+
"step": 2250
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"epoch": 0.7638658253072069,
|
| 328 |
+
"grad_norm": 7.795329293003306e-05,
|
| 329 |
+
"learning_rate": 4.891248860686589e-06,
|
| 330 |
+
"loss": 0.0062,
|
| 331 |
+
"step": 2300
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"epoch": 0.7804716041182331,
|
| 335 |
+
"grad_norm": 3.511108661768958e-05,
|
| 336 |
+
"learning_rate": 4.8800242212963586e-06,
|
| 337 |
+
"loss": 0.0032,
|
| 338 |
+
"step": 2350
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.7970773829292593,
|
| 342 |
+
"grad_norm": 0.0023988874163478613,
|
| 343 |
+
"learning_rate": 4.868799581906129e-06,
|
| 344 |
+
"loss": 0.0256,
|
| 345 |
+
"step": 2400
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"epoch": 0.8136831617402857,
|
| 349 |
+
"grad_norm": 2.8709350772260223e-06,
|
| 350 |
+
"learning_rate": 4.8575749425159e-06,
|
| 351 |
+
"loss": 0.0,
|
| 352 |
+
"step": 2450
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"epoch": 0.8302889405513119,
|
| 356 |
+
"grad_norm": 6.131248665042222e-05,
|
| 357 |
+
"learning_rate": 4.846350303125669e-06,
|
| 358 |
+
"loss": 0.0002,
|
| 359 |
+
"step": 2500
|
| 360 |
+
},
|
| 361 |
+
{
|
| 362 |
+
"epoch": 0.8468947193623381,
|
| 363 |
+
"grad_norm": 0.4060337245464325,
|
| 364 |
+
"learning_rate": 4.835125663735439e-06,
|
| 365 |
+
"loss": 0.0113,
|
| 366 |
+
"step": 2550
|
| 367 |
+
},
|
| 368 |
+
{
|
| 369 |
+
"epoch": 0.8635004981733644,
|
| 370 |
+
"grad_norm": 0.14530286192893982,
|
| 371 |
+
"learning_rate": 4.823901024345209e-06,
|
| 372 |
+
"loss": 0.0177,
|
| 373 |
+
"step": 2600
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"epoch": 0.8801062769843906,
|
| 377 |
+
"grad_norm": 0.00015053209790494293,
|
| 378 |
+
"learning_rate": 4.812676384954979e-06,
|
| 379 |
+
"loss": 0.0083,
|
| 380 |
+
"step": 2650
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"epoch": 0.8967120557954168,
|
| 384 |
+
"grad_norm": 2.9383112632785924e-05,
|
| 385 |
+
"learning_rate": 4.801451745564749e-06,
|
| 386 |
+
"loss": 0.0,
|
| 387 |
+
"step": 2700
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"epoch": 0.913317834606443,
|
| 391 |
+
"grad_norm": 0.0002987831539940089,
|
| 392 |
+
"learning_rate": 4.7902271061745195e-06,
|
| 393 |
+
"loss": 0.0013,
|
| 394 |
+
"step": 2750
|
| 395 |
+
},
|
| 396 |
+
{
|
| 397 |
+
"epoch": 0.9299236134174693,
|
| 398 |
+
"grad_norm": 9.85327574198891e-07,
|
| 399 |
+
"learning_rate": 4.7790024667842896e-06,
|
| 400 |
+
"loss": 0.0009,
|
| 401 |
+
"step": 2800
|
| 402 |
+
},
|
| 403 |
+
{
|
| 404 |
+
"epoch": 0.9465293922284955,
|
| 405 |
+
"grad_norm": 0.0004204445576760918,
|
| 406 |
+
"learning_rate": 4.76777782739406e-06,
|
| 407 |
+
"loss": 0.0,
|
| 408 |
+
"step": 2850
|
| 409 |
+
},
|
| 410 |
+
{
|
| 411 |
+
"epoch": 0.9631351710395217,
|
| 412 |
+
"grad_norm": 0.02836577780544758,
|
| 413 |
+
"learning_rate": 4.75655318800383e-06,
|
| 414 |
+
"loss": 0.0017,
|
| 415 |
+
"step": 2900
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"epoch": 0.9797409498505479,
|
| 419 |
+
"grad_norm": 12.173595428466797,
|
| 420 |
+
"learning_rate": 4.7453285486136e-06,
|
| 421 |
+
"loss": 0.0284,
|
| 422 |
+
"step": 2950
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"epoch": 0.9963467286615743,
|
| 426 |
+
"grad_norm": 0.0006245356635190547,
|
| 427 |
+
"learning_rate": 4.73410390922337e-06,
|
| 428 |
+
"loss": 0.0003,
|
| 429 |
+
"step": 3000
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"epoch": 1.0,
|
| 433 |
+
"eval_accuracy": 0.9964217557251909,
|
| 434 |
+
"eval_f1": 0.9964322542520268,
|
| 435 |
+
"eval_loss": 0.02747241035103798,
|
| 436 |
+
"eval_precision": 0.9964456382767726,
|
| 437 |
+
"eval_recall": 0.9964217557251909,
|
| 438 |
+
"eval_runtime": 70.9146,
|
| 439 |
+
"eval_samples_per_second": 118.227,
|
| 440 |
+
"eval_steps_per_second": 7.389,
|
| 441 |
+
"step": 3011
|
| 442 |
+
}
|
| 443 |
+
],
|
| 444 |
+
"logging_steps": 50,
|
| 445 |
+
"max_steps": 24088,
|
| 446 |
+
"num_input_tokens_seen": 0,
|
| 447 |
+
"num_train_epochs": 8,
|
| 448 |
+
"save_steps": 500,
|
| 449 |
+
"stateful_callbacks": {
|
| 450 |
+
"TrainerControl": {
|
| 451 |
+
"args": {
|
| 452 |
+
"should_epoch_stop": false,
|
| 453 |
+
"should_evaluate": false,
|
| 454 |
+
"should_log": false,
|
| 455 |
+
"should_save": true,
|
| 456 |
+
"should_training_stop": false
|
| 457 |
+
},
|
| 458 |
+
"attributes": {}
|
| 459 |
+
}
|
| 460 |
+
},
|
| 461 |
+
"total_flos": 5.094247305025536e+16,
|
| 462 |
+
"train_batch_size": 16,
|
| 463 |
+
"trial_name": null,
|
| 464 |
+
"trial_params": null
|
| 465 |
+
}
|
trial-0/checkpoint-3011/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c2d43680079f69ace9b061a9cddc7889b3b2058a373dcb9cabe67e7a0b586646
|
| 3 |
+
size 5368
|
trial-1/checkpoint-15055/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-1/checkpoint-15055/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ce6a1060ef93e1b5dc13c4ed9a3b6c77d2c792f107b20ff17933b9d4c15ecf0a
|
| 3 |
+
size 1583351632
|
trial-1/checkpoint-15055/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:92795446fb40fea40977d388996f597e156f884cc50faed369041fd42add878a
|
| 3 |
+
size 3166813178
|
trial-1/checkpoint-15055/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:aeb45f9d32a7f047001ed27e329de64c1bf9ebcb398e7f4734ee49c0dcd24d49
|
| 3 |
+
size 14244
|
trial-1/checkpoint-15055/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1f4201dceb29ec1f3f21db3295b8c8fbe60d56c450ea048b5457e0cc4bfa0108
|
| 3 |
+
size 1064
|
trial-1/checkpoint-15055/trainer_state.json
ADDED
|
@@ -0,0 +1,2200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.026163995265960693,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-1/checkpoint-15055",
|
| 4 |
+
"epoch": 5.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 15055,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.016605778811026237,
|
| 13 |
+
"grad_norm": 97.8153076171875,
|
| 14 |
+
"learning_rate": 4.1822378752775576e-05,
|
| 15 |
+
"loss": 0.332,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.033211557622052475,
|
| 20 |
+
"grad_norm": 0.13549020886421204,
|
| 21 |
+
"learning_rate": 4.1722929523801454e-05,
|
| 22 |
+
"loss": 0.1561,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.04981733643307871,
|
| 27 |
+
"grad_norm": 30.323139190673828,
|
| 28 |
+
"learning_rate": 4.162348029482734e-05,
|
| 29 |
+
"loss": 0.1222,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.06642311524410495,
|
| 34 |
+
"grad_norm": 0.0052522895857691765,
|
| 35 |
+
"learning_rate": 4.1524031065853216e-05,
|
| 36 |
+
"loss": 0.0938,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.08302889405513118,
|
| 41 |
+
"grad_norm": 8.804322242736816,
|
| 42 |
+
"learning_rate": 4.14245818368791e-05,
|
| 43 |
+
"loss": 0.0887,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.09963467286615742,
|
| 48 |
+
"grad_norm": 0.09965868294239044,
|
| 49 |
+
"learning_rate": 4.1325132607904985e-05,
|
| 50 |
+
"loss": 0.0998,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.11624045167718366,
|
| 55 |
+
"grad_norm": 0.03749964013695717,
|
| 56 |
+
"learning_rate": 4.122568337893087e-05,
|
| 57 |
+
"loss": 0.0552,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.1328462304882099,
|
| 62 |
+
"grad_norm": 0.2192620187997818,
|
| 63 |
+
"learning_rate": 4.112623414995675e-05,
|
| 64 |
+
"loss": 0.0717,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.14945200929923613,
|
| 69 |
+
"grad_norm": 136.17337036132812,
|
| 70 |
+
"learning_rate": 4.102678492098263e-05,
|
| 71 |
+
"loss": 0.0343,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.16605778811026237,
|
| 76 |
+
"grad_norm": 0.006200531497597694,
|
| 77 |
+
"learning_rate": 4.092733569200851e-05,
|
| 78 |
+
"loss": 0.0584,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.1826635669212886,
|
| 83 |
+
"grad_norm": 0.032046183943748474,
|
| 84 |
+
"learning_rate": 4.0827886463034394e-05,
|
| 85 |
+
"loss": 0.0458,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.19926934573231483,
|
| 90 |
+
"grad_norm": 3.6847121715545654,
|
| 91 |
+
"learning_rate": 4.072843723406027e-05,
|
| 92 |
+
"loss": 0.0463,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.2158751245433411,
|
| 97 |
+
"grad_norm": 9.91704273223877,
|
| 98 |
+
"learning_rate": 4.0628988005086156e-05,
|
| 99 |
+
"loss": 0.044,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.23248090335436733,
|
| 104 |
+
"grad_norm": 0.02639286033809185,
|
| 105 |
+
"learning_rate": 4.052953877611204e-05,
|
| 106 |
+
"loss": 0.0353,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.24908668216539356,
|
| 111 |
+
"grad_norm": 0.007340637035667896,
|
| 112 |
+
"learning_rate": 4.0430089547137925e-05,
|
| 113 |
+
"loss": 0.0519,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.2656924609764198,
|
| 118 |
+
"grad_norm": 0.003980421461164951,
|
| 119 |
+
"learning_rate": 4.033064031816381e-05,
|
| 120 |
+
"loss": 0.0588,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.282298239787446,
|
| 125 |
+
"grad_norm": 1.6867443323135376,
|
| 126 |
+
"learning_rate": 4.023119108918969e-05,
|
| 127 |
+
"loss": 0.0766,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.29890401859847227,
|
| 132 |
+
"grad_norm": 0.07216636836528778,
|
| 133 |
+
"learning_rate": 4.013174186021557e-05,
|
| 134 |
+
"loss": 0.0646,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.3155097974094985,
|
| 139 |
+
"grad_norm": 0.0015164370415732265,
|
| 140 |
+
"learning_rate": 4.003229263124145e-05,
|
| 141 |
+
"loss": 0.0342,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.33211557622052473,
|
| 146 |
+
"grad_norm": 0.004687592852860689,
|
| 147 |
+
"learning_rate": 3.993284340226733e-05,
|
| 148 |
+
"loss": 0.0327,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.348721355031551,
|
| 153 |
+
"grad_norm": 0.0013525976100936532,
|
| 154 |
+
"learning_rate": 3.983339417329321e-05,
|
| 155 |
+
"loss": 0.0362,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.3653271338425772,
|
| 160 |
+
"grad_norm": 0.021217679604887962,
|
| 161 |
+
"learning_rate": 3.9733944944319096e-05,
|
| 162 |
+
"loss": 0.0251,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.38193291265360346,
|
| 167 |
+
"grad_norm": 0.0011278189485892653,
|
| 168 |
+
"learning_rate": 3.963449571534498e-05,
|
| 169 |
+
"loss": 0.0136,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.39853869146462967,
|
| 174 |
+
"grad_norm": 0.0034522530622780323,
|
| 175 |
+
"learning_rate": 3.9535046486370864e-05,
|
| 176 |
+
"loss": 0.021,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.41514447027565593,
|
| 181 |
+
"grad_norm": 0.20626066625118256,
|
| 182 |
+
"learning_rate": 3.943559725739674e-05,
|
| 183 |
+
"loss": 0.0188,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.4317502490866822,
|
| 188 |
+
"grad_norm": 0.11095874011516571,
|
| 189 |
+
"learning_rate": 3.9336148028422627e-05,
|
| 190 |
+
"loss": 0.0275,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.4483560278977084,
|
| 195 |
+
"grad_norm": 0.09171418100595474,
|
| 196 |
+
"learning_rate": 3.9236698799448504e-05,
|
| 197 |
+
"loss": 0.0386,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.46496180670873466,
|
| 202 |
+
"grad_norm": 0.004308766219764948,
|
| 203 |
+
"learning_rate": 3.913724957047439e-05,
|
| 204 |
+
"loss": 0.0199,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.48156758551976087,
|
| 209 |
+
"grad_norm": 0.099054716527462,
|
| 210 |
+
"learning_rate": 3.9037800341500266e-05,
|
| 211 |
+
"loss": 0.0349,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.4981733643307871,
|
| 216 |
+
"grad_norm": 0.000510412035509944,
|
| 217 |
+
"learning_rate": 3.893835111252615e-05,
|
| 218 |
+
"loss": 0.0405,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.5147791431418134,
|
| 223 |
+
"grad_norm": 0.00045745610259473324,
|
| 224 |
+
"learning_rate": 3.8838901883552035e-05,
|
| 225 |
+
"loss": 0.0081,
|
| 226 |
+
"step": 1550
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"epoch": 0.5313849219528396,
|
| 230 |
+
"grad_norm": 2.640720844268799,
|
| 231 |
+
"learning_rate": 3.873945265457791e-05,
|
| 232 |
+
"loss": 0.0312,
|
| 233 |
+
"step": 1600
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"epoch": 0.5479907007638658,
|
| 237 |
+
"grad_norm": 0.0317668542265892,
|
| 238 |
+
"learning_rate": 3.86400034256038e-05,
|
| 239 |
+
"loss": 0.0231,
|
| 240 |
+
"step": 1650
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"epoch": 0.564596479574892,
|
| 244 |
+
"grad_norm": 0.0374116413295269,
|
| 245 |
+
"learning_rate": 3.854055419662968e-05,
|
| 246 |
+
"loss": 0.0062,
|
| 247 |
+
"step": 1700
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"epoch": 0.5812022583859183,
|
| 251 |
+
"grad_norm": 3.7858481407165527,
|
| 252 |
+
"learning_rate": 3.8441104967655566e-05,
|
| 253 |
+
"loss": 0.0172,
|
| 254 |
+
"step": 1750
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"epoch": 0.5978080371969445,
|
| 258 |
+
"grad_norm": 0.006008578464388847,
|
| 259 |
+
"learning_rate": 3.8341655738681444e-05,
|
| 260 |
+
"loss": 0.0317,
|
| 261 |
+
"step": 1800
|
| 262 |
+
},
|
| 263 |
+
{
|
| 264 |
+
"epoch": 0.6144138160079707,
|
| 265 |
+
"grad_norm": 0.009135287255048752,
|
| 266 |
+
"learning_rate": 3.824220650970733e-05,
|
| 267 |
+
"loss": 0.0267,
|
| 268 |
+
"step": 1850
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.631019594818997,
|
| 272 |
+
"grad_norm": 0.004011407028883696,
|
| 273 |
+
"learning_rate": 3.8142757280733206e-05,
|
| 274 |
+
"loss": 0.0218,
|
| 275 |
+
"step": 1900
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"epoch": 0.6476253736300233,
|
| 279 |
+
"grad_norm": 1.2146693468093872,
|
| 280 |
+
"learning_rate": 3.804330805175909e-05,
|
| 281 |
+
"loss": 0.0284,
|
| 282 |
+
"step": 1950
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 0.6642311524410495,
|
| 286 |
+
"grad_norm": 0.0033848509192466736,
|
| 287 |
+
"learning_rate": 3.794385882278497e-05,
|
| 288 |
+
"loss": 0.0241,
|
| 289 |
+
"step": 2000
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 0.6808369312520757,
|
| 293 |
+
"grad_norm": 0.0062151020392775536,
|
| 294 |
+
"learning_rate": 3.784440959381085e-05,
|
| 295 |
+
"loss": 0.0126,
|
| 296 |
+
"step": 2050
|
| 297 |
+
},
|
| 298 |
+
{
|
| 299 |
+
"epoch": 0.697442710063102,
|
| 300 |
+
"grad_norm": 0.004486024379730225,
|
| 301 |
+
"learning_rate": 3.774496036483674e-05,
|
| 302 |
+
"loss": 0.01,
|
| 303 |
+
"step": 2100
|
| 304 |
+
},
|
| 305 |
+
{
|
| 306 |
+
"epoch": 0.7140484888741282,
|
| 307 |
+
"grad_norm": 0.2388181835412979,
|
| 308 |
+
"learning_rate": 3.764551113586262e-05,
|
| 309 |
+
"loss": 0.029,
|
| 310 |
+
"step": 2150
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"epoch": 0.7306542676851544,
|
| 314 |
+
"grad_norm": 0.0008192298118956387,
|
| 315 |
+
"learning_rate": 3.75460619068885e-05,
|
| 316 |
+
"loss": 0.0042,
|
| 317 |
+
"step": 2200
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"epoch": 0.7472600464961807,
|
| 321 |
+
"grad_norm": 5.3397710871649906e-05,
|
| 322 |
+
"learning_rate": 3.7446612677914384e-05,
|
| 323 |
+
"loss": 0.003,
|
| 324 |
+
"step": 2250
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"epoch": 0.7638658253072069,
|
| 328 |
+
"grad_norm": 0.020504744723439217,
|
| 329 |
+
"learning_rate": 3.734716344894026e-05,
|
| 330 |
+
"loss": 0.0262,
|
| 331 |
+
"step": 2300
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"epoch": 0.7804716041182331,
|
| 335 |
+
"grad_norm": 0.0005639814771711826,
|
| 336 |
+
"learning_rate": 3.7247714219966146e-05,
|
| 337 |
+
"loss": 0.0043,
|
| 338 |
+
"step": 2350
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.7970773829292593,
|
| 342 |
+
"grad_norm": 0.0031704490538686514,
|
| 343 |
+
"learning_rate": 3.714826499099203e-05,
|
| 344 |
+
"loss": 0.0284,
|
| 345 |
+
"step": 2400
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"epoch": 0.8136831617402857,
|
| 349 |
+
"grad_norm": 0.001501227729022503,
|
| 350 |
+
"learning_rate": 3.704881576201791e-05,
|
| 351 |
+
"loss": 0.0139,
|
| 352 |
+
"step": 2450
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"epoch": 0.8302889405513119,
|
| 356 |
+
"grad_norm": 0.0010695902165025473,
|
| 357 |
+
"learning_rate": 3.694936653304379e-05,
|
| 358 |
+
"loss": 0.0002,
|
| 359 |
+
"step": 2500
|
| 360 |
+
},
|
| 361 |
+
{
|
| 362 |
+
"epoch": 0.8468947193623381,
|
| 363 |
+
"grad_norm": 0.0022703870199620724,
|
| 364 |
+
"learning_rate": 3.684991730406968e-05,
|
| 365 |
+
"loss": 0.0127,
|
| 366 |
+
"step": 2550
|
| 367 |
+
},
|
| 368 |
+
{
|
| 369 |
+
"epoch": 0.8635004981733644,
|
| 370 |
+
"grad_norm": 0.057379350066185,
|
| 371 |
+
"learning_rate": 3.675046807509556e-05,
|
| 372 |
+
"loss": 0.0185,
|
| 373 |
+
"step": 2600
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"epoch": 0.8801062769843906,
|
| 377 |
+
"grad_norm": 0.00022332114167511463,
|
| 378 |
+
"learning_rate": 3.665101884612144e-05,
|
| 379 |
+
"loss": 0.0305,
|
| 380 |
+
"step": 2650
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"epoch": 0.8967120557954168,
|
| 384 |
+
"grad_norm": 0.0001592856424394995,
|
| 385 |
+
"learning_rate": 3.6551569617147324e-05,
|
| 386 |
+
"loss": 0.0123,
|
| 387 |
+
"step": 2700
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"epoch": 0.913317834606443,
|
| 391 |
+
"grad_norm": 17.401182174682617,
|
| 392 |
+
"learning_rate": 3.64521203881732e-05,
|
| 393 |
+
"loss": 0.0185,
|
| 394 |
+
"step": 2750
|
| 395 |
+
},
|
| 396 |
+
{
|
| 397 |
+
"epoch": 0.9299236134174693,
|
| 398 |
+
"grad_norm": 6.72700916766189e-05,
|
| 399 |
+
"learning_rate": 3.6352671159199086e-05,
|
| 400 |
+
"loss": 0.0069,
|
| 401 |
+
"step": 2800
|
| 402 |
+
},
|
| 403 |
+
{
|
| 404 |
+
"epoch": 0.9465293922284955,
|
| 405 |
+
"grad_norm": 0.004428895190358162,
|
| 406 |
+
"learning_rate": 3.6253221930224964e-05,
|
| 407 |
+
"loss": 0.0007,
|
| 408 |
+
"step": 2850
|
| 409 |
+
},
|
| 410 |
+
{
|
| 411 |
+
"epoch": 0.9631351710395217,
|
| 412 |
+
"grad_norm": 1.0974332094192505,
|
| 413 |
+
"learning_rate": 3.615377270125085e-05,
|
| 414 |
+
"loss": 0.0404,
|
| 415 |
+
"step": 2900
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"epoch": 0.9797409498505479,
|
| 419 |
+
"grad_norm": 1.9213542938232422,
|
| 420 |
+
"learning_rate": 3.605432347227673e-05,
|
| 421 |
+
"loss": 0.0347,
|
| 422 |
+
"step": 2950
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"epoch": 0.9963467286615743,
|
| 426 |
+
"grad_norm": 0.00922548957169056,
|
| 427 |
+
"learning_rate": 3.595487424330261e-05,
|
| 428 |
+
"loss": 0.0012,
|
| 429 |
+
"step": 3000
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"epoch": 1.0,
|
| 433 |
+
"eval_accuracy": 0.9964217557251909,
|
| 434 |
+
"eval_f1": 0.9964374533901769,
|
| 435 |
+
"eval_loss": 0.0292544886469841,
|
| 436 |
+
"eval_precision": 0.9964596067772543,
|
| 437 |
+
"eval_recall": 0.9964217557251909,
|
| 438 |
+
"eval_runtime": 66.9274,
|
| 439 |
+
"eval_samples_per_second": 125.27,
|
| 440 |
+
"eval_steps_per_second": 7.829,
|
| 441 |
+
"step": 3011
|
| 442 |
+
},
|
| 443 |
+
{
|
| 444 |
+
"epoch": 1.0129525074726005,
|
| 445 |
+
"grad_norm": 0.0020659081637859344,
|
| 446 |
+
"learning_rate": 3.5855425014328495e-05,
|
| 447 |
+
"loss": 0.006,
|
| 448 |
+
"step": 3050
|
| 449 |
+
},
|
| 450 |
+
{
|
| 451 |
+
"epoch": 1.0295582862836268,
|
| 452 |
+
"grad_norm": 0.0007023397483862936,
|
| 453 |
+
"learning_rate": 3.575597578535438e-05,
|
| 454 |
+
"loss": 0.0252,
|
| 455 |
+
"step": 3100
|
| 456 |
+
},
|
| 457 |
+
{
|
| 458 |
+
"epoch": 1.0461640650946529,
|
| 459 |
+
"grad_norm": 0.0009625868406146765,
|
| 460 |
+
"learning_rate": 3.565652655638026e-05,
|
| 461 |
+
"loss": 0.0009,
|
| 462 |
+
"step": 3150
|
| 463 |
+
},
|
| 464 |
+
{
|
| 465 |
+
"epoch": 1.0627698439056792,
|
| 466 |
+
"grad_norm": 0.003358361078426242,
|
| 467 |
+
"learning_rate": 3.555707732740614e-05,
|
| 468 |
+
"loss": 0.0023,
|
| 469 |
+
"step": 3200
|
| 470 |
+
},
|
| 471 |
+
{
|
| 472 |
+
"epoch": 1.0793756227167055,
|
| 473 |
+
"grad_norm": 0.0069133201614022255,
|
| 474 |
+
"learning_rate": 3.5457628098432026e-05,
|
| 475 |
+
"loss": 0.0173,
|
| 476 |
+
"step": 3250
|
| 477 |
+
},
|
| 478 |
+
{
|
| 479 |
+
"epoch": 1.0959814015277316,
|
| 480 |
+
"grad_norm": 0.002833959646522999,
|
| 481 |
+
"learning_rate": 3.5358178869457903e-05,
|
| 482 |
+
"loss": 0.0103,
|
| 483 |
+
"step": 3300
|
| 484 |
+
},
|
| 485 |
+
{
|
| 486 |
+
"epoch": 1.112587180338758,
|
| 487 |
+
"grad_norm": 0.3491974174976349,
|
| 488 |
+
"learning_rate": 3.525872964048379e-05,
|
| 489 |
+
"loss": 0.0106,
|
| 490 |
+
"step": 3350
|
| 491 |
+
},
|
| 492 |
+
{
|
| 493 |
+
"epoch": 1.1291929591497842,
|
| 494 |
+
"grad_norm": 0.002338677179068327,
|
| 495 |
+
"learning_rate": 3.5159280411509666e-05,
|
| 496 |
+
"loss": 0.0113,
|
| 497 |
+
"step": 3400
|
| 498 |
+
},
|
| 499 |
+
{
|
| 500 |
+
"epoch": 1.1457987379608103,
|
| 501 |
+
"grad_norm": 0.01391940750181675,
|
| 502 |
+
"learning_rate": 3.505983118253555e-05,
|
| 503 |
+
"loss": 0.0629,
|
| 504 |
+
"step": 3450
|
| 505 |
+
},
|
| 506 |
+
{
|
| 507 |
+
"epoch": 1.1624045167718366,
|
| 508 |
+
"grad_norm": 0.01091580931097269,
|
| 509 |
+
"learning_rate": 3.4960381953561435e-05,
|
| 510 |
+
"loss": 0.0124,
|
| 511 |
+
"step": 3500
|
| 512 |
+
},
|
| 513 |
+
{
|
| 514 |
+
"epoch": 1.1790102955828627,
|
| 515 |
+
"grad_norm": 0.08861105889081955,
|
| 516 |
+
"learning_rate": 3.486093272458732e-05,
|
| 517 |
+
"loss": 0.005,
|
| 518 |
+
"step": 3550
|
| 519 |
+
},
|
| 520 |
+
{
|
| 521 |
+
"epoch": 1.195616074393889,
|
| 522 |
+
"grad_norm": 13.801424980163574,
|
| 523 |
+
"learning_rate": 3.47614834956132e-05,
|
| 524 |
+
"loss": 0.0198,
|
| 525 |
+
"step": 3600
|
| 526 |
+
},
|
| 527 |
+
{
|
| 528 |
+
"epoch": 1.2122218532049154,
|
| 529 |
+
"grad_norm": 0.0011831964366137981,
|
| 530 |
+
"learning_rate": 3.466203426663908e-05,
|
| 531 |
+
"loss": 0.0199,
|
| 532 |
+
"step": 3650
|
| 533 |
+
},
|
| 534 |
+
{
|
| 535 |
+
"epoch": 1.2288276320159415,
|
| 536 |
+
"grad_norm": 0.0007713422528468072,
|
| 537 |
+
"learning_rate": 3.456258503766496e-05,
|
| 538 |
+
"loss": 0.0002,
|
| 539 |
+
"step": 3700
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"epoch": 1.2454334108269678,
|
| 543 |
+
"grad_norm": 0.012040040455758572,
|
| 544 |
+
"learning_rate": 3.446313580869084e-05,
|
| 545 |
+
"loss": 0.0145,
|
| 546 |
+
"step": 3750
|
| 547 |
+
},
|
| 548 |
+
{
|
| 549 |
+
"epoch": 1.2620391896379939,
|
| 550 |
+
"grad_norm": 0.002991499612107873,
|
| 551 |
+
"learning_rate": 3.436368657971672e-05,
|
| 552 |
+
"loss": 0.0019,
|
| 553 |
+
"step": 3800
|
| 554 |
+
},
|
| 555 |
+
{
|
| 556 |
+
"epoch": 1.2786449684490202,
|
| 557 |
+
"grad_norm": 0.0011237855069339275,
|
| 558 |
+
"learning_rate": 3.4264237350742605e-05,
|
| 559 |
+
"loss": 0.0121,
|
| 560 |
+
"step": 3850
|
| 561 |
+
},
|
| 562 |
+
{
|
| 563 |
+
"epoch": 1.2952507472600465,
|
| 564 |
+
"grad_norm": 0.0003940507594961673,
|
| 565 |
+
"learning_rate": 3.416478812176849e-05,
|
| 566 |
+
"loss": 0.0005,
|
| 567 |
+
"step": 3900
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"epoch": 1.3118565260710726,
|
| 571 |
+
"grad_norm": 0.000680712575558573,
|
| 572 |
+
"learning_rate": 3.4065338892794374e-05,
|
| 573 |
+
"loss": 0.0064,
|
| 574 |
+
"step": 3950
|
| 575 |
+
},
|
| 576 |
+
{
|
| 577 |
+
"epoch": 1.328462304882099,
|
| 578 |
+
"grad_norm": 0.0005092715146020055,
|
| 579 |
+
"learning_rate": 3.396588966382026e-05,
|
| 580 |
+
"loss": 0.0015,
|
| 581 |
+
"step": 4000
|
| 582 |
+
},
|
| 583 |
+
{
|
| 584 |
+
"epoch": 1.3450680836931252,
|
| 585 |
+
"grad_norm": 0.05994075909256935,
|
| 586 |
+
"learning_rate": 3.3866440434846137e-05,
|
| 587 |
+
"loss": 0.012,
|
| 588 |
+
"step": 4050
|
| 589 |
+
},
|
| 590 |
+
{
|
| 591 |
+
"epoch": 1.3616738625041513,
|
| 592 |
+
"grad_norm": 0.00185777444858104,
|
| 593 |
+
"learning_rate": 3.376699120587202e-05,
|
| 594 |
+
"loss": 0.011,
|
| 595 |
+
"step": 4100
|
| 596 |
+
},
|
| 597 |
+
{
|
| 598 |
+
"epoch": 1.3782796413151777,
|
| 599 |
+
"grad_norm": 0.000904878368601203,
|
| 600 |
+
"learning_rate": 3.36675419768979e-05,
|
| 601 |
+
"loss": 0.005,
|
| 602 |
+
"step": 4150
|
| 603 |
+
},
|
| 604 |
+
{
|
| 605 |
+
"epoch": 1.394885420126204,
|
| 606 |
+
"grad_norm": 0.0007558612269349396,
|
| 607 |
+
"learning_rate": 3.356809274792378e-05,
|
| 608 |
+
"loss": 0.0047,
|
| 609 |
+
"step": 4200
|
| 610 |
+
},
|
| 611 |
+
{
|
| 612 |
+
"epoch": 1.41149119893723,
|
| 613 |
+
"grad_norm": 0.000539736298378557,
|
| 614 |
+
"learning_rate": 3.346864351894966e-05,
|
| 615 |
+
"loss": 0.0129,
|
| 616 |
+
"step": 4250
|
| 617 |
+
},
|
| 618 |
+
{
|
| 619 |
+
"epoch": 1.4280969777482564,
|
| 620 |
+
"grad_norm": 0.009128957986831665,
|
| 621 |
+
"learning_rate": 3.3369194289975545e-05,
|
| 622 |
+
"loss": 0.0243,
|
| 623 |
+
"step": 4300
|
| 624 |
+
},
|
| 625 |
+
{
|
| 626 |
+
"epoch": 1.4447027565592827,
|
| 627 |
+
"grad_norm": 0.0029375357553362846,
|
| 628 |
+
"learning_rate": 3.326974506100143e-05,
|
| 629 |
+
"loss": 0.0265,
|
| 630 |
+
"step": 4350
|
| 631 |
+
},
|
| 632 |
+
{
|
| 633 |
+
"epoch": 1.4613085353703088,
|
| 634 |
+
"grad_norm": 0.005572469439357519,
|
| 635 |
+
"learning_rate": 3.317029583202731e-05,
|
| 636 |
+
"loss": 0.0208,
|
| 637 |
+
"step": 4400
|
| 638 |
+
},
|
| 639 |
+
{
|
| 640 |
+
"epoch": 1.4779143141813351,
|
| 641 |
+
"grad_norm": 0.022462476044893265,
|
| 642 |
+
"learning_rate": 3.307084660305319e-05,
|
| 643 |
+
"loss": 0.028,
|
| 644 |
+
"step": 4450
|
| 645 |
+
},
|
| 646 |
+
{
|
| 647 |
+
"epoch": 1.4945200929923614,
|
| 648 |
+
"grad_norm": 0.03258312866091728,
|
| 649 |
+
"learning_rate": 3.2971397374079076e-05,
|
| 650 |
+
"loss": 0.0039,
|
| 651 |
+
"step": 4500
|
| 652 |
+
},
|
| 653 |
+
{
|
| 654 |
+
"epoch": 1.5111258718033875,
|
| 655 |
+
"grad_norm": 0.0003238330245949328,
|
| 656 |
+
"learning_rate": 3.2871948145104954e-05,
|
| 657 |
+
"loss": 0.0004,
|
| 658 |
+
"step": 4550
|
| 659 |
+
},
|
| 660 |
+
{
|
| 661 |
+
"epoch": 1.5277316506144138,
|
| 662 |
+
"grad_norm": 0.0003890040097758174,
|
| 663 |
+
"learning_rate": 3.277249891613084e-05,
|
| 664 |
+
"loss": 0.0019,
|
| 665 |
+
"step": 4600
|
| 666 |
+
},
|
| 667 |
+
{
|
| 668 |
+
"epoch": 1.5443374294254402,
|
| 669 |
+
"grad_norm": 0.0009322063415311277,
|
| 670 |
+
"learning_rate": 3.2673049687156716e-05,
|
| 671 |
+
"loss": 0.0179,
|
| 672 |
+
"step": 4650
|
| 673 |
+
},
|
| 674 |
+
{
|
| 675 |
+
"epoch": 1.5609432082364663,
|
| 676 |
+
"grad_norm": 0.01131466869264841,
|
| 677 |
+
"learning_rate": 3.25736004581826e-05,
|
| 678 |
+
"loss": 0.0104,
|
| 679 |
+
"step": 4700
|
| 680 |
+
},
|
| 681 |
+
{
|
| 682 |
+
"epoch": 1.5775489870474926,
|
| 683 |
+
"grad_norm": 13.522896766662598,
|
| 684 |
+
"learning_rate": 3.247415122920848e-05,
|
| 685 |
+
"loss": 0.0223,
|
| 686 |
+
"step": 4750
|
| 687 |
+
},
|
| 688 |
+
{
|
| 689 |
+
"epoch": 1.594154765858519,
|
| 690 |
+
"grad_norm": 0.02276255562901497,
|
| 691 |
+
"learning_rate": 3.237470200023436e-05,
|
| 692 |
+
"loss": 0.0158,
|
| 693 |
+
"step": 4800
|
| 694 |
+
},
|
| 695 |
+
{
|
| 696 |
+
"epoch": 1.610760544669545,
|
| 697 |
+
"grad_norm": 0.002166257705539465,
|
| 698 |
+
"learning_rate": 3.227525277126025e-05,
|
| 699 |
+
"loss": 0.0078,
|
| 700 |
+
"step": 4850
|
| 701 |
+
},
|
| 702 |
+
{
|
| 703 |
+
"epoch": 1.627366323480571,
|
| 704 |
+
"grad_norm": 1.3065482378005981,
|
| 705 |
+
"learning_rate": 3.217580354228613e-05,
|
| 706 |
+
"loss": 0.0141,
|
| 707 |
+
"step": 4900
|
| 708 |
+
},
|
| 709 |
+
{
|
| 710 |
+
"epoch": 1.6439721022915976,
|
| 711 |
+
"grad_norm": 0.003547689877450466,
|
| 712 |
+
"learning_rate": 3.2076354313312016e-05,
|
| 713 |
+
"loss": 0.015,
|
| 714 |
+
"step": 4950
|
| 715 |
+
},
|
| 716 |
+
{
|
| 717 |
+
"epoch": 1.6605778811026237,
|
| 718 |
+
"grad_norm": 0.014956770464777946,
|
| 719 |
+
"learning_rate": 3.1976905084337894e-05,
|
| 720 |
+
"loss": 0.0152,
|
| 721 |
+
"step": 5000
|
| 722 |
+
},
|
| 723 |
+
{
|
| 724 |
+
"epoch": 1.6771836599136498,
|
| 725 |
+
"grad_norm": 0.04130621254444122,
|
| 726 |
+
"learning_rate": 3.187745585536378e-05,
|
| 727 |
+
"loss": 0.0125,
|
| 728 |
+
"step": 5050
|
| 729 |
+
},
|
| 730 |
+
{
|
| 731 |
+
"epoch": 1.6937894387246761,
|
| 732 |
+
"grad_norm": 0.0008221206953749061,
|
| 733 |
+
"learning_rate": 3.1778006626389656e-05,
|
| 734 |
+
"loss": 0.0135,
|
| 735 |
+
"step": 5100
|
| 736 |
+
},
|
| 737 |
+
{
|
| 738 |
+
"epoch": 1.7103952175357025,
|
| 739 |
+
"grad_norm": 12.82378101348877,
|
| 740 |
+
"learning_rate": 3.167855739741554e-05,
|
| 741 |
+
"loss": 0.0068,
|
| 742 |
+
"step": 5150
|
| 743 |
+
},
|
| 744 |
+
{
|
| 745 |
+
"epoch": 1.7270009963467285,
|
| 746 |
+
"grad_norm": 0.00033835775684565306,
|
| 747 |
+
"learning_rate": 3.157910816844142e-05,
|
| 748 |
+
"loss": 0.0162,
|
| 749 |
+
"step": 5200
|
| 750 |
+
},
|
| 751 |
+
{
|
| 752 |
+
"epoch": 1.7436067751577549,
|
| 753 |
+
"grad_norm": 0.019336150959134102,
|
| 754 |
+
"learning_rate": 3.14796589394673e-05,
|
| 755 |
+
"loss": 0.0233,
|
| 756 |
+
"step": 5250
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"epoch": 1.7602125539687812,
|
| 760 |
+
"grad_norm": 0.0025835856795310974,
|
| 761 |
+
"learning_rate": 3.138020971049319e-05,
|
| 762 |
+
"loss": 0.0049,
|
| 763 |
+
"step": 5300
|
| 764 |
+
},
|
| 765 |
+
{
|
| 766 |
+
"epoch": 1.7768183327798073,
|
| 767 |
+
"grad_norm": 0.001420403248630464,
|
| 768 |
+
"learning_rate": 3.128076048151907e-05,
|
| 769 |
+
"loss": 0.003,
|
| 770 |
+
"step": 5350
|
| 771 |
+
},
|
| 772 |
+
{
|
| 773 |
+
"epoch": 1.7934241115908336,
|
| 774 |
+
"grad_norm": 0.00038990622851997614,
|
| 775 |
+
"learning_rate": 3.118131125254495e-05,
|
| 776 |
+
"loss": 0.0006,
|
| 777 |
+
"step": 5400
|
| 778 |
+
},
|
| 779 |
+
{
|
| 780 |
+
"epoch": 1.81002989040186,
|
| 781 |
+
"grad_norm": 0.008520632050931454,
|
| 782 |
+
"learning_rate": 3.1081862023570834e-05,
|
| 783 |
+
"loss": 0.0017,
|
| 784 |
+
"step": 5450
|
| 785 |
+
},
|
| 786 |
+
{
|
| 787 |
+
"epoch": 1.826635669212886,
|
| 788 |
+
"grad_norm": 6.68273787596263e-05,
|
| 789 |
+
"learning_rate": 3.098241279459671e-05,
|
| 790 |
+
"loss": 0.0036,
|
| 791 |
+
"step": 5500
|
| 792 |
+
},
|
| 793 |
+
{
|
| 794 |
+
"epoch": 1.8432414480239123,
|
| 795 |
+
"grad_norm": 0.0003700813394971192,
|
| 796 |
+
"learning_rate": 3.0882963565622596e-05,
|
| 797 |
+
"loss": 0.0165,
|
| 798 |
+
"step": 5550
|
| 799 |
+
},
|
| 800 |
+
{
|
| 801 |
+
"epoch": 1.8598472268349386,
|
| 802 |
+
"grad_norm": 0.0006010610377416015,
|
| 803 |
+
"learning_rate": 3.078351433664848e-05,
|
| 804 |
+
"loss": 0.0087,
|
| 805 |
+
"step": 5600
|
| 806 |
+
},
|
| 807 |
+
{
|
| 808 |
+
"epoch": 1.8764530056459647,
|
| 809 |
+
"grad_norm": 0.003066062228754163,
|
| 810 |
+
"learning_rate": 3.068406510767436e-05,
|
| 811 |
+
"loss": 0.0146,
|
| 812 |
+
"step": 5650
|
| 813 |
+
},
|
| 814 |
+
{
|
| 815 |
+
"epoch": 1.893058784456991,
|
| 816 |
+
"grad_norm": 0.005507585126906633,
|
| 817 |
+
"learning_rate": 3.058461587870024e-05,
|
| 818 |
+
"loss": 0.0143,
|
| 819 |
+
"step": 5700
|
| 820 |
+
},
|
| 821 |
+
{
|
| 822 |
+
"epoch": 1.9096645632680174,
|
| 823 |
+
"grad_norm": 0.0004035944875795394,
|
| 824 |
+
"learning_rate": 3.0485166649726124e-05,
|
| 825 |
+
"loss": 0.0005,
|
| 826 |
+
"step": 5750
|
| 827 |
+
},
|
| 828 |
+
{
|
| 829 |
+
"epoch": 1.9262703420790435,
|
| 830 |
+
"grad_norm": 0.001009216532111168,
|
| 831 |
+
"learning_rate": 3.0385717420752008e-05,
|
| 832 |
+
"loss": 0.0001,
|
| 833 |
+
"step": 5800
|
| 834 |
+
},
|
| 835 |
+
{
|
| 836 |
+
"epoch": 1.9428761208900698,
|
| 837 |
+
"grad_norm": 0.000999059877358377,
|
| 838 |
+
"learning_rate": 3.0286268191777886e-05,
|
| 839 |
+
"loss": 0.0035,
|
| 840 |
+
"step": 5850
|
| 841 |
+
},
|
| 842 |
+
{
|
| 843 |
+
"epoch": 1.959481899701096,
|
| 844 |
+
"grad_norm": 0.009907165542244911,
|
| 845 |
+
"learning_rate": 3.018681896280377e-05,
|
| 846 |
+
"loss": 0.0169,
|
| 847 |
+
"step": 5900
|
| 848 |
+
},
|
| 849 |
+
{
|
| 850 |
+
"epoch": 1.9760876785121222,
|
| 851 |
+
"grad_norm": 0.0009444186580367386,
|
| 852 |
+
"learning_rate": 3.008736973382965e-05,
|
| 853 |
+
"loss": 0.0002,
|
| 854 |
+
"step": 5950
|
| 855 |
+
},
|
| 856 |
+
{
|
| 857 |
+
"epoch": 1.9926934573231485,
|
| 858 |
+
"grad_norm": 0.004821607377380133,
|
| 859 |
+
"learning_rate": 2.9987920504855536e-05,
|
| 860 |
+
"loss": 0.0279,
|
| 861 |
+
"step": 6000
|
| 862 |
+
},
|
| 863 |
+
{
|
| 864 |
+
"epoch": 2.0,
|
| 865 |
+
"eval_accuracy": 0.9960639312977099,
|
| 866 |
+
"eval_f1": 0.9960726064300177,
|
| 867 |
+
"eval_loss": 0.026200218126177788,
|
| 868 |
+
"eval_precision": 0.9960829017470937,
|
| 869 |
+
"eval_recall": 0.9960639312977099,
|
| 870 |
+
"eval_runtime": 66.8019,
|
| 871 |
+
"eval_samples_per_second": 125.505,
|
| 872 |
+
"eval_steps_per_second": 7.844,
|
| 873 |
+
"step": 6022
|
| 874 |
+
},
|
| 875 |
+
{
|
| 876 |
+
"epoch": 2.009299236134175,
|
| 877 |
+
"grad_norm": 0.005393081344664097,
|
| 878 |
+
"learning_rate": 2.9888471275881413e-05,
|
| 879 |
+
"loss": 0.0005,
|
| 880 |
+
"step": 6050
|
| 881 |
+
},
|
| 882 |
+
{
|
| 883 |
+
"epoch": 2.025905014945201,
|
| 884 |
+
"grad_norm": 0.0005983946030028164,
|
| 885 |
+
"learning_rate": 2.9789022046907298e-05,
|
| 886 |
+
"loss": 0.007,
|
| 887 |
+
"step": 6100
|
| 888 |
+
},
|
| 889 |
+
{
|
| 890 |
+
"epoch": 2.042510793756227,
|
| 891 |
+
"grad_norm": 0.009896568953990936,
|
| 892 |
+
"learning_rate": 2.968957281793318e-05,
|
| 893 |
+
"loss": 0.0126,
|
| 894 |
+
"step": 6150
|
| 895 |
+
},
|
| 896 |
+
{
|
| 897 |
+
"epoch": 2.0591165725672536,
|
| 898 |
+
"grad_norm": 0.0005078279646113515,
|
| 899 |
+
"learning_rate": 2.9590123588959063e-05,
|
| 900 |
+
"loss": 0.0036,
|
| 901 |
+
"step": 6200
|
| 902 |
+
},
|
| 903 |
+
{
|
| 904 |
+
"epoch": 2.0757223513782797,
|
| 905 |
+
"grad_norm": 0.003000972094014287,
|
| 906 |
+
"learning_rate": 2.949067435998494e-05,
|
| 907 |
+
"loss": 0.0084,
|
| 908 |
+
"step": 6250
|
| 909 |
+
},
|
| 910 |
+
{
|
| 911 |
+
"epoch": 2.0923281301893057,
|
| 912 |
+
"grad_norm": 0.000705482205376029,
|
| 913 |
+
"learning_rate": 2.9391225131010826e-05,
|
| 914 |
+
"loss": 0.0114,
|
| 915 |
+
"step": 6300
|
| 916 |
+
},
|
| 917 |
+
{
|
| 918 |
+
"epoch": 2.1089339090003323,
|
| 919 |
+
"grad_norm": 0.0010040552588179708,
|
| 920 |
+
"learning_rate": 2.929177590203671e-05,
|
| 921 |
+
"loss": 0.0001,
|
| 922 |
+
"step": 6350
|
| 923 |
+
},
|
| 924 |
+
{
|
| 925 |
+
"epoch": 2.1255396878113584,
|
| 926 |
+
"grad_norm": 0.00919084157794714,
|
| 927 |
+
"learning_rate": 2.919232667306259e-05,
|
| 928 |
+
"loss": 0.0132,
|
| 929 |
+
"step": 6400
|
| 930 |
+
},
|
| 931 |
+
{
|
| 932 |
+
"epoch": 2.1421454666223845,
|
| 933 |
+
"grad_norm": 0.0013779608998447657,
|
| 934 |
+
"learning_rate": 2.9092877444088476e-05,
|
| 935 |
+
"loss": 0.0005,
|
| 936 |
+
"step": 6450
|
| 937 |
+
},
|
| 938 |
+
{
|
| 939 |
+
"epoch": 2.158751245433411,
|
| 940 |
+
"grad_norm": 0.0005553108640015125,
|
| 941 |
+
"learning_rate": 2.8993428215114353e-05,
|
| 942 |
+
"loss": 0.0024,
|
| 943 |
+
"step": 6500
|
| 944 |
+
},
|
| 945 |
+
{
|
| 946 |
+
"epoch": 2.175357024244437,
|
| 947 |
+
"grad_norm": 0.0021922625601291656,
|
| 948 |
+
"learning_rate": 2.8893978986140238e-05,
|
| 949 |
+
"loss": 0.0001,
|
| 950 |
+
"step": 6550
|
| 951 |
+
},
|
| 952 |
+
{
|
| 953 |
+
"epoch": 2.191962803055463,
|
| 954 |
+
"grad_norm": 0.0002919725957326591,
|
| 955 |
+
"learning_rate": 2.879452975716612e-05,
|
| 956 |
+
"loss": 0.0016,
|
| 957 |
+
"step": 6600
|
| 958 |
+
},
|
| 959 |
+
{
|
| 960 |
+
"epoch": 2.2085685818664897,
|
| 961 |
+
"grad_norm": 0.0009692521998658776,
|
| 962 |
+
"learning_rate": 2.8695080528192003e-05,
|
| 963 |
+
"loss": 0.0,
|
| 964 |
+
"step": 6650
|
| 965 |
+
},
|
| 966 |
+
{
|
| 967 |
+
"epoch": 2.225174360677516,
|
| 968 |
+
"grad_norm": 0.0002649214584380388,
|
| 969 |
+
"learning_rate": 2.859563129921788e-05,
|
| 970 |
+
"loss": 0.0065,
|
| 971 |
+
"step": 6700
|
| 972 |
+
},
|
| 973 |
+
{
|
| 974 |
+
"epoch": 2.241780139488542,
|
| 975 |
+
"grad_norm": 0.00046473185648210347,
|
| 976 |
+
"learning_rate": 2.8496182070243765e-05,
|
| 977 |
+
"loss": 0.0045,
|
| 978 |
+
"step": 6750
|
| 979 |
+
},
|
| 980 |
+
{
|
| 981 |
+
"epoch": 2.2583859182995685,
|
| 982 |
+
"grad_norm": 0.00027421273989602923,
|
| 983 |
+
"learning_rate": 2.8396732841269647e-05,
|
| 984 |
+
"loss": 0.0001,
|
| 985 |
+
"step": 6800
|
| 986 |
+
},
|
| 987 |
+
{
|
| 988 |
+
"epoch": 2.2749916971105946,
|
| 989 |
+
"grad_norm": 0.0005349958664737642,
|
| 990 |
+
"learning_rate": 2.829728361229553e-05,
|
| 991 |
+
"loss": 0.0064,
|
| 992 |
+
"step": 6850
|
| 993 |
+
},
|
| 994 |
+
{
|
| 995 |
+
"epoch": 2.2915974759216207,
|
| 996 |
+
"grad_norm": 0.013357802294194698,
|
| 997 |
+
"learning_rate": 2.819783438332141e-05,
|
| 998 |
+
"loss": 0.0166,
|
| 999 |
+
"step": 6900
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"epoch": 2.308203254732647,
|
| 1003 |
+
"grad_norm": 0.00018077288405038416,
|
| 1004 |
+
"learning_rate": 2.8098385154347293e-05,
|
| 1005 |
+
"loss": 0.0003,
|
| 1006 |
+
"step": 6950
|
| 1007 |
+
},
|
| 1008 |
+
{
|
| 1009 |
+
"epoch": 2.3248090335436733,
|
| 1010 |
+
"grad_norm": 0.00022416921274270862,
|
| 1011 |
+
"learning_rate": 2.7998935925373174e-05,
|
| 1012 |
+
"loss": 0.0258,
|
| 1013 |
+
"step": 7000
|
| 1014 |
+
},
|
| 1015 |
+
{
|
| 1016 |
+
"epoch": 2.3414148123546994,
|
| 1017 |
+
"grad_norm": 0.0035074173938483,
|
| 1018 |
+
"learning_rate": 2.789948669639906e-05,
|
| 1019 |
+
"loss": 0.0235,
|
| 1020 |
+
"step": 7050
|
| 1021 |
+
},
|
| 1022 |
+
{
|
| 1023 |
+
"epoch": 2.3580205911657255,
|
| 1024 |
+
"grad_norm": 0.03189694508910179,
|
| 1025 |
+
"learning_rate": 2.7800037467424936e-05,
|
| 1026 |
+
"loss": 0.0092,
|
| 1027 |
+
"step": 7100
|
| 1028 |
+
},
|
| 1029 |
+
{
|
| 1030 |
+
"epoch": 2.374626369976752,
|
| 1031 |
+
"grad_norm": 0.003983413800597191,
|
| 1032 |
+
"learning_rate": 2.770058823845082e-05,
|
| 1033 |
+
"loss": 0.0178,
|
| 1034 |
+
"step": 7150
|
| 1035 |
+
},
|
| 1036 |
+
{
|
| 1037 |
+
"epoch": 2.391232148787778,
|
| 1038 |
+
"grad_norm": 0.002308150054886937,
|
| 1039 |
+
"learning_rate": 2.7601139009476705e-05,
|
| 1040 |
+
"loss": 0.0038,
|
| 1041 |
+
"step": 7200
|
| 1042 |
+
},
|
| 1043 |
+
{
|
| 1044 |
+
"epoch": 2.407837927598804,
|
| 1045 |
+
"grad_norm": 0.0013349172659218311,
|
| 1046 |
+
"learning_rate": 2.7501689780502583e-05,
|
| 1047 |
+
"loss": 0.001,
|
| 1048 |
+
"step": 7250
|
| 1049 |
+
},
|
| 1050 |
+
{
|
| 1051 |
+
"epoch": 2.4244437064098308,
|
| 1052 |
+
"grad_norm": 0.000329138885717839,
|
| 1053 |
+
"learning_rate": 2.7402240551528467e-05,
|
| 1054 |
+
"loss": 0.0,
|
| 1055 |
+
"step": 7300
|
| 1056 |
+
},
|
| 1057 |
+
{
|
| 1058 |
+
"epoch": 2.441049485220857,
|
| 1059 |
+
"grad_norm": 0.0008932758355513215,
|
| 1060 |
+
"learning_rate": 2.730279132255435e-05,
|
| 1061 |
+
"loss": 0.0,
|
| 1062 |
+
"step": 7350
|
| 1063 |
+
},
|
| 1064 |
+
{
|
| 1065 |
+
"epoch": 2.457655264031883,
|
| 1066 |
+
"grad_norm": 0.0005681042093783617,
|
| 1067 |
+
"learning_rate": 2.7203342093580233e-05,
|
| 1068 |
+
"loss": 0.0,
|
| 1069 |
+
"step": 7400
|
| 1070 |
+
},
|
| 1071 |
+
{
|
| 1072 |
+
"epoch": 2.4742610428429095,
|
| 1073 |
+
"grad_norm": 0.00035528288572095335,
|
| 1074 |
+
"learning_rate": 2.710389286460611e-05,
|
| 1075 |
+
"loss": 0.0006,
|
| 1076 |
+
"step": 7450
|
| 1077 |
+
},
|
| 1078 |
+
{
|
| 1079 |
+
"epoch": 2.4908668216539356,
|
| 1080 |
+
"grad_norm": 0.0064411167986691,
|
| 1081 |
+
"learning_rate": 2.7004443635631995e-05,
|
| 1082 |
+
"loss": 0.0057,
|
| 1083 |
+
"step": 7500
|
| 1084 |
+
},
|
| 1085 |
+
{
|
| 1086 |
+
"epoch": 2.5074726004649617,
|
| 1087 |
+
"grad_norm": 0.004782046191394329,
|
| 1088 |
+
"learning_rate": 2.6904994406657876e-05,
|
| 1089 |
+
"loss": 0.0104,
|
| 1090 |
+
"step": 7550
|
| 1091 |
+
},
|
| 1092 |
+
{
|
| 1093 |
+
"epoch": 2.5240783792759878,
|
| 1094 |
+
"grad_norm": 0.007697090040892363,
|
| 1095 |
+
"learning_rate": 2.680554517768376e-05,
|
| 1096 |
+
"loss": 0.0194,
|
| 1097 |
+
"step": 7600
|
| 1098 |
+
},
|
| 1099 |
+
{
|
| 1100 |
+
"epoch": 2.5406841580870143,
|
| 1101 |
+
"grad_norm": 0.0027768309228122234,
|
| 1102 |
+
"learning_rate": 2.670609594870964e-05,
|
| 1103 |
+
"loss": 0.0003,
|
| 1104 |
+
"step": 7650
|
| 1105 |
+
},
|
| 1106 |
+
{
|
| 1107 |
+
"epoch": 2.5572899368980404,
|
| 1108 |
+
"grad_norm": 0.004415275063365698,
|
| 1109 |
+
"learning_rate": 2.6606646719735523e-05,
|
| 1110 |
+
"loss": 0.0008,
|
| 1111 |
+
"step": 7700
|
| 1112 |
+
},
|
| 1113 |
+
{
|
| 1114 |
+
"epoch": 2.5738957157090665,
|
| 1115 |
+
"grad_norm": 0.0020732246339321136,
|
| 1116 |
+
"learning_rate": 2.6507197490761404e-05,
|
| 1117 |
+
"loss": 0.0099,
|
| 1118 |
+
"step": 7750
|
| 1119 |
+
},
|
| 1120 |
+
{
|
| 1121 |
+
"epoch": 2.590501494520093,
|
| 1122 |
+
"grad_norm": 0.01230535376816988,
|
| 1123 |
+
"learning_rate": 2.640774826178729e-05,
|
| 1124 |
+
"loss": 0.0013,
|
| 1125 |
+
"step": 7800
|
| 1126 |
+
},
|
| 1127 |
+
{
|
| 1128 |
+
"epoch": 2.607107273331119,
|
| 1129 |
+
"grad_norm": 0.00023796973982825875,
|
| 1130 |
+
"learning_rate": 2.6308299032813166e-05,
|
| 1131 |
+
"loss": 0.0001,
|
| 1132 |
+
"step": 7850
|
| 1133 |
+
},
|
| 1134 |
+
{
|
| 1135 |
+
"epoch": 2.6237130521421452,
|
| 1136 |
+
"grad_norm": 0.0007237173849716783,
|
| 1137 |
+
"learning_rate": 2.620884980383905e-05,
|
| 1138 |
+
"loss": 0.0,
|
| 1139 |
+
"step": 7900
|
| 1140 |
+
},
|
| 1141 |
+
{
|
| 1142 |
+
"epoch": 2.6403188309531718,
|
| 1143 |
+
"grad_norm": 0.012582485564053059,
|
| 1144 |
+
"learning_rate": 2.6109400574864935e-05,
|
| 1145 |
+
"loss": 0.0137,
|
| 1146 |
+
"step": 7950
|
| 1147 |
+
},
|
| 1148 |
+
{
|
| 1149 |
+
"epoch": 2.656924609764198,
|
| 1150 |
+
"grad_norm": 0.004291553515940905,
|
| 1151 |
+
"learning_rate": 2.6009951345890816e-05,
|
| 1152 |
+
"loss": 0.0004,
|
| 1153 |
+
"step": 8000
|
| 1154 |
+
},
|
| 1155 |
+
{
|
| 1156 |
+
"epoch": 2.673530388575224,
|
| 1157 |
+
"grad_norm": 0.001662308000959456,
|
| 1158 |
+
"learning_rate": 2.59105021169167e-05,
|
| 1159 |
+
"loss": 0.0001,
|
| 1160 |
+
"step": 8050
|
| 1161 |
+
},
|
| 1162 |
+
{
|
| 1163 |
+
"epoch": 2.6901361673862505,
|
| 1164 |
+
"grad_norm": 0.0007901078206487,
|
| 1165 |
+
"learning_rate": 2.5811052887942578e-05,
|
| 1166 |
+
"loss": 0.0001,
|
| 1167 |
+
"step": 8100
|
| 1168 |
+
},
|
| 1169 |
+
{
|
| 1170 |
+
"epoch": 2.7067419461972766,
|
| 1171 |
+
"grad_norm": 0.0007069796556606889,
|
| 1172 |
+
"learning_rate": 2.5711603658968463e-05,
|
| 1173 |
+
"loss": 0.0001,
|
| 1174 |
+
"step": 8150
|
| 1175 |
+
},
|
| 1176 |
+
{
|
| 1177 |
+
"epoch": 2.7233477250083027,
|
| 1178 |
+
"grad_norm": 0.0006272114696912467,
|
| 1179 |
+
"learning_rate": 2.5612154429994344e-05,
|
| 1180 |
+
"loss": 0.0,
|
| 1181 |
+
"step": 8200
|
| 1182 |
+
},
|
| 1183 |
+
{
|
| 1184 |
+
"epoch": 2.7399535038193292,
|
| 1185 |
+
"grad_norm": 0.0008849167497828603,
|
| 1186 |
+
"learning_rate": 2.5512705201020228e-05,
|
| 1187 |
+
"loss": 0.0108,
|
| 1188 |
+
"step": 8250
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"epoch": 2.7565592826303553,
|
| 1192 |
+
"grad_norm": 0.0023204211611300707,
|
| 1193 |
+
"learning_rate": 2.5413255972046106e-05,
|
| 1194 |
+
"loss": 0.0015,
|
| 1195 |
+
"step": 8300
|
| 1196 |
+
},
|
| 1197 |
+
{
|
| 1198 |
+
"epoch": 2.7731650614413814,
|
| 1199 |
+
"grad_norm": 0.0006785548175685108,
|
| 1200 |
+
"learning_rate": 2.531380674307199e-05,
|
| 1201 |
+
"loss": 0.0001,
|
| 1202 |
+
"step": 8350
|
| 1203 |
+
},
|
| 1204 |
+
{
|
| 1205 |
+
"epoch": 2.789770840252408,
|
| 1206 |
+
"grad_norm": 0.0012301064562052488,
|
| 1207 |
+
"learning_rate": 2.521435751409787e-05,
|
| 1208 |
+
"loss": 0.0001,
|
| 1209 |
+
"step": 8400
|
| 1210 |
+
},
|
| 1211 |
+
{
|
| 1212 |
+
"epoch": 2.806376619063434,
|
| 1213 |
+
"grad_norm": 0.0012826485326513648,
|
| 1214 |
+
"learning_rate": 2.5114908285123756e-05,
|
| 1215 |
+
"loss": 0.0005,
|
| 1216 |
+
"step": 8450
|
| 1217 |
+
},
|
| 1218 |
+
{
|
| 1219 |
+
"epoch": 2.82298239787446,
|
| 1220 |
+
"grad_norm": 0.00045186831266619265,
|
| 1221 |
+
"learning_rate": 2.5015459056149634e-05,
|
| 1222 |
+
"loss": 0.0,
|
| 1223 |
+
"step": 8500
|
| 1224 |
+
},
|
| 1225 |
+
{
|
| 1226 |
+
"epoch": 2.8395881766854867,
|
| 1227 |
+
"grad_norm": 0.004168716724961996,
|
| 1228 |
+
"learning_rate": 2.4916009827175518e-05,
|
| 1229 |
+
"loss": 0.0113,
|
| 1230 |
+
"step": 8550
|
| 1231 |
+
},
|
| 1232 |
+
{
|
| 1233 |
+
"epoch": 2.856193955496513,
|
| 1234 |
+
"grad_norm": 0.0053441463969647884,
|
| 1235 |
+
"learning_rate": 2.48165605982014e-05,
|
| 1236 |
+
"loss": 0.0148,
|
| 1237 |
+
"step": 8600
|
| 1238 |
+
},
|
| 1239 |
+
{
|
| 1240 |
+
"epoch": 2.872799734307539,
|
| 1241 |
+
"grad_norm": 0.002283324720337987,
|
| 1242 |
+
"learning_rate": 2.471711136922728e-05,
|
| 1243 |
+
"loss": 0.0001,
|
| 1244 |
+
"step": 8650
|
| 1245 |
+
},
|
| 1246 |
+
{
|
| 1247 |
+
"epoch": 2.8894055131185654,
|
| 1248 |
+
"grad_norm": 0.0046396502293646336,
|
| 1249 |
+
"learning_rate": 2.461766214025316e-05,
|
| 1250 |
+
"loss": 0.009,
|
| 1251 |
+
"step": 8700
|
| 1252 |
+
},
|
| 1253 |
+
{
|
| 1254 |
+
"epoch": 2.9060112919295915,
|
| 1255 |
+
"grad_norm": 0.01257924735546112,
|
| 1256 |
+
"learning_rate": 2.4518212911279046e-05,
|
| 1257 |
+
"loss": 0.015,
|
| 1258 |
+
"step": 8750
|
| 1259 |
+
},
|
| 1260 |
+
{
|
| 1261 |
+
"epoch": 2.9226170707406176,
|
| 1262 |
+
"grad_norm": 0.00245088548399508,
|
| 1263 |
+
"learning_rate": 2.441876368230493e-05,
|
| 1264 |
+
"loss": 0.0136,
|
| 1265 |
+
"step": 8800
|
| 1266 |
+
},
|
| 1267 |
+
{
|
| 1268 |
+
"epoch": 2.939222849551644,
|
| 1269 |
+
"grad_norm": 0.005676699336618185,
|
| 1270 |
+
"learning_rate": 2.4319314453330808e-05,
|
| 1271 |
+
"loss": 0.005,
|
| 1272 |
+
"step": 8850
|
| 1273 |
+
},
|
| 1274 |
+
{
|
| 1275 |
+
"epoch": 2.9558286283626702,
|
| 1276 |
+
"grad_norm": 0.0012465333566069603,
|
| 1277 |
+
"learning_rate": 2.4219865224356692e-05,
|
| 1278 |
+
"loss": 0.0002,
|
| 1279 |
+
"step": 8900
|
| 1280 |
+
},
|
| 1281 |
+
{
|
| 1282 |
+
"epoch": 2.9724344071736963,
|
| 1283 |
+
"grad_norm": 0.0007043919176794589,
|
| 1284 |
+
"learning_rate": 2.4120415995382573e-05,
|
| 1285 |
+
"loss": 0.0001,
|
| 1286 |
+
"step": 8950
|
| 1287 |
+
},
|
| 1288 |
+
{
|
| 1289 |
+
"epoch": 2.989040185984723,
|
| 1290 |
+
"grad_norm": 0.007725800387561321,
|
| 1291 |
+
"learning_rate": 2.4020966766408458e-05,
|
| 1292 |
+
"loss": 0.0,
|
| 1293 |
+
"step": 9000
|
| 1294 |
+
},
|
| 1295 |
+
{
|
| 1296 |
+
"epoch": 3.0,
|
| 1297 |
+
"eval_accuracy": 0.9971374045801527,
|
| 1298 |
+
"eval_f1": 0.9971159280148129,
|
| 1299 |
+
"eval_loss": 0.030433131381869316,
|
| 1300 |
+
"eval_precision": 0.9971134119828018,
|
| 1301 |
+
"eval_recall": 0.9971374045801527,
|
| 1302 |
+
"eval_runtime": 66.6186,
|
| 1303 |
+
"eval_samples_per_second": 125.851,
|
| 1304 |
+
"eval_steps_per_second": 7.866,
|
| 1305 |
+
"step": 9033
|
| 1306 |
+
},
|
| 1307 |
+
{
|
| 1308 |
+
"epoch": 3.005645964795749,
|
| 1309 |
+
"grad_norm": 0.015563803724944592,
|
| 1310 |
+
"learning_rate": 2.3921517537434336e-05,
|
| 1311 |
+
"loss": 0.0121,
|
| 1312 |
+
"step": 9050
|
| 1313 |
+
},
|
| 1314 |
+
{
|
| 1315 |
+
"epoch": 3.022251743606775,
|
| 1316 |
+
"grad_norm": 0.0009268106077797711,
|
| 1317 |
+
"learning_rate": 2.382206830846022e-05,
|
| 1318 |
+
"loss": 0.002,
|
| 1319 |
+
"step": 9100
|
| 1320 |
+
},
|
| 1321 |
+
{
|
| 1322 |
+
"epoch": 3.0388575224178016,
|
| 1323 |
+
"grad_norm": 0.003995839972048998,
|
| 1324 |
+
"learning_rate": 2.37226190794861e-05,
|
| 1325 |
+
"loss": 0.0001,
|
| 1326 |
+
"step": 9150
|
| 1327 |
+
},
|
| 1328 |
+
{
|
| 1329 |
+
"epoch": 3.0554633012288277,
|
| 1330 |
+
"grad_norm": 0.0004984468687325716,
|
| 1331 |
+
"learning_rate": 2.3623169850511986e-05,
|
| 1332 |
+
"loss": 0.0001,
|
| 1333 |
+
"step": 9200
|
| 1334 |
+
},
|
| 1335 |
+
{
|
| 1336 |
+
"epoch": 3.072069080039854,
|
| 1337 |
+
"grad_norm": 0.0007679828559048474,
|
| 1338 |
+
"learning_rate": 2.3523720621537863e-05,
|
| 1339 |
+
"loss": 0.0,
|
| 1340 |
+
"step": 9250
|
| 1341 |
+
},
|
| 1342 |
+
{
|
| 1343 |
+
"epoch": 3.0886748588508803,
|
| 1344 |
+
"grad_norm": 0.0004945154651068151,
|
| 1345 |
+
"learning_rate": 2.3424271392563748e-05,
|
| 1346 |
+
"loss": 0.0,
|
| 1347 |
+
"step": 9300
|
| 1348 |
+
},
|
| 1349 |
+
{
|
| 1350 |
+
"epoch": 3.1052806376619064,
|
| 1351 |
+
"grad_norm": 0.00045879624667577446,
|
| 1352 |
+
"learning_rate": 2.332482216358963e-05,
|
| 1353 |
+
"loss": 0.0,
|
| 1354 |
+
"step": 9350
|
| 1355 |
+
},
|
| 1356 |
+
{
|
| 1357 |
+
"epoch": 3.1218864164729325,
|
| 1358 |
+
"grad_norm": 0.0004470401909202337,
|
| 1359 |
+
"learning_rate": 2.3225372934615513e-05,
|
| 1360 |
+
"loss": 0.0,
|
| 1361 |
+
"step": 9400
|
| 1362 |
+
},
|
| 1363 |
+
{
|
| 1364 |
+
"epoch": 3.1384921952839586,
|
| 1365 |
+
"grad_norm": 0.00023802775831427425,
|
| 1366 |
+
"learning_rate": 2.312592370564139e-05,
|
| 1367 |
+
"loss": 0.0,
|
| 1368 |
+
"step": 9450
|
| 1369 |
+
},
|
| 1370 |
+
{
|
| 1371 |
+
"epoch": 3.155097974094985,
|
| 1372 |
+
"grad_norm": 0.00043214907054789364,
|
| 1373 |
+
"learning_rate": 2.3026474476667275e-05,
|
| 1374 |
+
"loss": 0.0,
|
| 1375 |
+
"step": 9500
|
| 1376 |
+
},
|
| 1377 |
+
{
|
| 1378 |
+
"epoch": 3.1717037529060113,
|
| 1379 |
+
"grad_norm": 0.00024678235058672726,
|
| 1380 |
+
"learning_rate": 2.292702524769316e-05,
|
| 1381 |
+
"loss": 0.0007,
|
| 1382 |
+
"step": 9550
|
| 1383 |
+
},
|
| 1384 |
+
{
|
| 1385 |
+
"epoch": 3.1883095317170373,
|
| 1386 |
+
"grad_norm": 0.0003429341595619917,
|
| 1387 |
+
"learning_rate": 2.282757601871904e-05,
|
| 1388 |
+
"loss": 0.0,
|
| 1389 |
+
"step": 9600
|
| 1390 |
+
},
|
| 1391 |
+
{
|
| 1392 |
+
"epoch": 3.204915310528064,
|
| 1393 |
+
"grad_norm": 0.00019195489585399628,
|
| 1394 |
+
"learning_rate": 2.2728126789744925e-05,
|
| 1395 |
+
"loss": 0.0001,
|
| 1396 |
+
"step": 9650
|
| 1397 |
+
},
|
| 1398 |
+
{
|
| 1399 |
+
"epoch": 3.22152108933909,
|
| 1400 |
+
"grad_norm": 0.00021821403061039746,
|
| 1401 |
+
"learning_rate": 2.2628677560770803e-05,
|
| 1402 |
+
"loss": 0.0,
|
| 1403 |
+
"step": 9700
|
| 1404 |
+
},
|
| 1405 |
+
{
|
| 1406 |
+
"epoch": 3.238126868150116,
|
| 1407 |
+
"grad_norm": 0.0004384716448839754,
|
| 1408 |
+
"learning_rate": 2.2529228331796688e-05,
|
| 1409 |
+
"loss": 0.0,
|
| 1410 |
+
"step": 9750
|
| 1411 |
+
},
|
| 1412 |
+
{
|
| 1413 |
+
"epoch": 3.2547326469611426,
|
| 1414 |
+
"grad_norm": 0.00017058267258107662,
|
| 1415 |
+
"learning_rate": 2.242977910282257e-05,
|
| 1416 |
+
"loss": 0.0,
|
| 1417 |
+
"step": 9800
|
| 1418 |
+
},
|
| 1419 |
+
{
|
| 1420 |
+
"epoch": 3.2713384257721687,
|
| 1421 |
+
"grad_norm": 0.0007424860959872603,
|
| 1422 |
+
"learning_rate": 2.2330329873848453e-05,
|
| 1423 |
+
"loss": 0.0162,
|
| 1424 |
+
"step": 9850
|
| 1425 |
+
},
|
| 1426 |
+
{
|
| 1427 |
+
"epoch": 3.287944204583195,
|
| 1428 |
+
"grad_norm": 0.0007187157752923667,
|
| 1429 |
+
"learning_rate": 2.223088064487433e-05,
|
| 1430 |
+
"loss": 0.0114,
|
| 1431 |
+
"step": 9900
|
| 1432 |
+
},
|
| 1433 |
+
{
|
| 1434 |
+
"epoch": 3.3045499833942213,
|
| 1435 |
+
"grad_norm": 0.0006962522747926414,
|
| 1436 |
+
"learning_rate": 2.2131431415900215e-05,
|
| 1437 |
+
"loss": 0.003,
|
| 1438 |
+
"step": 9950
|
| 1439 |
+
},
|
| 1440 |
+
{
|
| 1441 |
+
"epoch": 3.3211557622052474,
|
| 1442 |
+
"grad_norm": 0.0007819280726835132,
|
| 1443 |
+
"learning_rate": 2.2031982186926096e-05,
|
| 1444 |
+
"loss": 0.005,
|
| 1445 |
+
"step": 10000
|
| 1446 |
+
},
|
| 1447 |
+
{
|
| 1448 |
+
"epoch": 3.3377615410162735,
|
| 1449 |
+
"grad_norm": 0.00020613332162611187,
|
| 1450 |
+
"learning_rate": 2.1932532957951977e-05,
|
| 1451 |
+
"loss": 0.0002,
|
| 1452 |
+
"step": 10050
|
| 1453 |
+
},
|
| 1454 |
+
{
|
| 1455 |
+
"epoch": 3.3543673198273,
|
| 1456 |
+
"grad_norm": 0.000297825870802626,
|
| 1457 |
+
"learning_rate": 2.183308372897786e-05,
|
| 1458 |
+
"loss": 0.0,
|
| 1459 |
+
"step": 10100
|
| 1460 |
+
},
|
| 1461 |
+
{
|
| 1462 |
+
"epoch": 3.370973098638326,
|
| 1463 |
+
"grad_norm": 7.085188553901389e-05,
|
| 1464 |
+
"learning_rate": 2.1733634500003743e-05,
|
| 1465 |
+
"loss": 0.0002,
|
| 1466 |
+
"step": 10150
|
| 1467 |
+
},
|
| 1468 |
+
{
|
| 1469 |
+
"epoch": 3.3875788774493523,
|
| 1470 |
+
"grad_norm": 0.11117817461490631,
|
| 1471 |
+
"learning_rate": 2.1634185271029624e-05,
|
| 1472 |
+
"loss": 0.0196,
|
| 1473 |
+
"step": 10200
|
| 1474 |
+
},
|
| 1475 |
+
{
|
| 1476 |
+
"epoch": 3.404184656260379,
|
| 1477 |
+
"grad_norm": 0.0007934737950563431,
|
| 1478 |
+
"learning_rate": 2.1534736042055505e-05,
|
| 1479 |
+
"loss": 0.0063,
|
| 1480 |
+
"step": 10250
|
| 1481 |
+
},
|
| 1482 |
+
{
|
| 1483 |
+
"epoch": 3.420790435071405,
|
| 1484 |
+
"grad_norm": 0.0008382880478166044,
|
| 1485 |
+
"learning_rate": 2.1435286813081386e-05,
|
| 1486 |
+
"loss": 0.0003,
|
| 1487 |
+
"step": 10300
|
| 1488 |
+
},
|
| 1489 |
+
{
|
| 1490 |
+
"epoch": 3.437396213882431,
|
| 1491 |
+
"grad_norm": 0.00943814031779766,
|
| 1492 |
+
"learning_rate": 2.133583758410727e-05,
|
| 1493 |
+
"loss": 0.0229,
|
| 1494 |
+
"step": 10350
|
| 1495 |
+
},
|
| 1496 |
+
{
|
| 1497 |
+
"epoch": 3.454001992693457,
|
| 1498 |
+
"grad_norm": 0.0025139478966593742,
|
| 1499 |
+
"learning_rate": 2.1236388355133155e-05,
|
| 1500 |
+
"loss": 0.0135,
|
| 1501 |
+
"step": 10400
|
| 1502 |
+
},
|
| 1503 |
+
{
|
| 1504 |
+
"epoch": 3.4706077715044836,
|
| 1505 |
+
"grad_norm": 0.0012301671085879207,
|
| 1506 |
+
"learning_rate": 2.1136939126159033e-05,
|
| 1507 |
+
"loss": 0.0112,
|
| 1508 |
+
"step": 10450
|
| 1509 |
+
},
|
| 1510 |
+
{
|
| 1511 |
+
"epoch": 3.4872135503155097,
|
| 1512 |
+
"grad_norm": 0.0006923259934410453,
|
| 1513 |
+
"learning_rate": 2.1037489897184917e-05,
|
| 1514 |
+
"loss": 0.0023,
|
| 1515 |
+
"step": 10500
|
| 1516 |
+
},
|
| 1517 |
+
{
|
| 1518 |
+
"epoch": 3.503819329126536,
|
| 1519 |
+
"grad_norm": 0.00143510103225708,
|
| 1520 |
+
"learning_rate": 2.09380406682108e-05,
|
| 1521 |
+
"loss": 0.0,
|
| 1522 |
+
"step": 10550
|
| 1523 |
+
},
|
| 1524 |
+
{
|
| 1525 |
+
"epoch": 3.5204251079375624,
|
| 1526 |
+
"grad_norm": 0.0003266233834438026,
|
| 1527 |
+
"learning_rate": 2.083859143923668e-05,
|
| 1528 |
+
"loss": 0.0001,
|
| 1529 |
+
"step": 10600
|
| 1530 |
+
},
|
| 1531 |
+
{
|
| 1532 |
+
"epoch": 3.5370308867485885,
|
| 1533 |
+
"grad_norm": 0.0002165662735933438,
|
| 1534 |
+
"learning_rate": 2.073914221026256e-05,
|
| 1535 |
+
"loss": 0.0001,
|
| 1536 |
+
"step": 10650
|
| 1537 |
+
},
|
| 1538 |
+
{
|
| 1539 |
+
"epoch": 3.5536366655596145,
|
| 1540 |
+
"grad_norm": 0.000500050897244364,
|
| 1541 |
+
"learning_rate": 2.0639692981288445e-05,
|
| 1542 |
+
"loss": 0.0053,
|
| 1543 |
+
"step": 10700
|
| 1544 |
+
},
|
| 1545 |
+
{
|
| 1546 |
+
"epoch": 3.570242444370641,
|
| 1547 |
+
"grad_norm": 0.00033526355400681496,
|
| 1548 |
+
"learning_rate": 2.0540243752314326e-05,
|
| 1549 |
+
"loss": 0.0001,
|
| 1550 |
+
"step": 10750
|
| 1551 |
+
},
|
| 1552 |
+
{
|
| 1553 |
+
"epoch": 3.586848223181667,
|
| 1554 |
+
"grad_norm": 0.000240692708757706,
|
| 1555 |
+
"learning_rate": 2.044079452334021e-05,
|
| 1556 |
+
"loss": 0.0,
|
| 1557 |
+
"step": 10800
|
| 1558 |
+
},
|
| 1559 |
+
{
|
| 1560 |
+
"epoch": 3.6034540019926933,
|
| 1561 |
+
"grad_norm": 0.0011404575780034065,
|
| 1562 |
+
"learning_rate": 2.034134529436609e-05,
|
| 1563 |
+
"loss": 0.0065,
|
| 1564 |
+
"step": 10850
|
| 1565 |
+
},
|
| 1566 |
+
{
|
| 1567 |
+
"epoch": 3.62005978080372,
|
| 1568 |
+
"grad_norm": 0.0008779458003118634,
|
| 1569 |
+
"learning_rate": 2.0241896065391973e-05,
|
| 1570 |
+
"loss": 0.0,
|
| 1571 |
+
"step": 10900
|
| 1572 |
+
},
|
| 1573 |
+
{
|
| 1574 |
+
"epoch": 3.636665559614746,
|
| 1575 |
+
"grad_norm": 0.000214042782317847,
|
| 1576 |
+
"learning_rate": 2.0142446836417854e-05,
|
| 1577 |
+
"loss": 0.0001,
|
| 1578 |
+
"step": 10950
|
| 1579 |
+
},
|
| 1580 |
+
{
|
| 1581 |
+
"epoch": 3.653271338425772,
|
| 1582 |
+
"grad_norm": 0.001132681965827942,
|
| 1583 |
+
"learning_rate": 2.0042997607443738e-05,
|
| 1584 |
+
"loss": 0.0001,
|
| 1585 |
+
"step": 11000
|
| 1586 |
+
},
|
| 1587 |
+
{
|
| 1588 |
+
"epoch": 3.6698771172367985,
|
| 1589 |
+
"grad_norm": 9.505786874797195e-05,
|
| 1590 |
+
"learning_rate": 1.994354837846962e-05,
|
| 1591 |
+
"loss": 0.0,
|
| 1592 |
+
"step": 11050
|
| 1593 |
+
},
|
| 1594 |
+
{
|
| 1595 |
+
"epoch": 3.6864828960478246,
|
| 1596 |
+
"grad_norm": 0.0014238933799788356,
|
| 1597 |
+
"learning_rate": 1.98440991494955e-05,
|
| 1598 |
+
"loss": 0.0108,
|
| 1599 |
+
"step": 11100
|
| 1600 |
+
},
|
| 1601 |
+
{
|
| 1602 |
+
"epoch": 3.7030886748588507,
|
| 1603 |
+
"grad_norm": 0.0021179679315537214,
|
| 1604 |
+
"learning_rate": 1.974464992052138e-05,
|
| 1605 |
+
"loss": 0.0004,
|
| 1606 |
+
"step": 11150
|
| 1607 |
+
},
|
| 1608 |
+
{
|
| 1609 |
+
"epoch": 3.7196944536698773,
|
| 1610 |
+
"grad_norm": 0.0012716053752228618,
|
| 1611 |
+
"learning_rate": 1.9645200691547266e-05,
|
| 1612 |
+
"loss": 0.0001,
|
| 1613 |
+
"step": 11200
|
| 1614 |
+
},
|
| 1615 |
+
{
|
| 1616 |
+
"epoch": 3.7363002324809034,
|
| 1617 |
+
"grad_norm": 0.0003722730907611549,
|
| 1618 |
+
"learning_rate": 1.9545751462573147e-05,
|
| 1619 |
+
"loss": 0.0001,
|
| 1620 |
+
"step": 11250
|
| 1621 |
+
},
|
| 1622 |
+
{
|
| 1623 |
+
"epoch": 3.7529060112919295,
|
| 1624 |
+
"grad_norm": 0.0004279191780369729,
|
| 1625 |
+
"learning_rate": 1.9446302233599028e-05,
|
| 1626 |
+
"loss": 0.0,
|
| 1627 |
+
"step": 11300
|
| 1628 |
+
},
|
| 1629 |
+
{
|
| 1630 |
+
"epoch": 3.769511790102956,
|
| 1631 |
+
"grad_norm": 0.00044420413905754685,
|
| 1632 |
+
"learning_rate": 1.934685300462491e-05,
|
| 1633 |
+
"loss": 0.0,
|
| 1634 |
+
"step": 11350
|
| 1635 |
+
},
|
| 1636 |
+
{
|
| 1637 |
+
"epoch": 3.786117568913982,
|
| 1638 |
+
"grad_norm": 0.0006720417295582592,
|
| 1639 |
+
"learning_rate": 1.9247403775650794e-05,
|
| 1640 |
+
"loss": 0.0001,
|
| 1641 |
+
"step": 11400
|
| 1642 |
+
},
|
| 1643 |
+
{
|
| 1644 |
+
"epoch": 3.802723347725008,
|
| 1645 |
+
"grad_norm": 0.00018277636263519526,
|
| 1646 |
+
"learning_rate": 1.9147954546676675e-05,
|
| 1647 |
+
"loss": 0.0,
|
| 1648 |
+
"step": 11450
|
| 1649 |
+
},
|
| 1650 |
+
{
|
| 1651 |
+
"epoch": 3.8193291265360347,
|
| 1652 |
+
"grad_norm": 0.0033536076080054045,
|
| 1653 |
+
"learning_rate": 1.904850531770256e-05,
|
| 1654 |
+
"loss": 0.0,
|
| 1655 |
+
"step": 11500
|
| 1656 |
+
},
|
| 1657 |
+
{
|
| 1658 |
+
"epoch": 3.835934905347061,
|
| 1659 |
+
"grad_norm": 0.00032338136225007474,
|
| 1660 |
+
"learning_rate": 1.894905608872844e-05,
|
| 1661 |
+
"loss": 0.0,
|
| 1662 |
+
"step": 11550
|
| 1663 |
+
},
|
| 1664 |
+
{
|
| 1665 |
+
"epoch": 3.852540684158087,
|
| 1666 |
+
"grad_norm": 8.492634515278041e-05,
|
| 1667 |
+
"learning_rate": 1.884960685975432e-05,
|
| 1668 |
+
"loss": 0.0,
|
| 1669 |
+
"step": 11600
|
| 1670 |
+
},
|
| 1671 |
+
{
|
| 1672 |
+
"epoch": 3.8691464629691135,
|
| 1673 |
+
"grad_norm": 0.00010078576451633126,
|
| 1674 |
+
"learning_rate": 1.8750157630780202e-05,
|
| 1675 |
+
"loss": 0.0,
|
| 1676 |
+
"step": 11650
|
| 1677 |
+
},
|
| 1678 |
+
{
|
| 1679 |
+
"epoch": 3.8857522417801396,
|
| 1680 |
+
"grad_norm": 0.0001655027299420908,
|
| 1681 |
+
"learning_rate": 1.8650708401806087e-05,
|
| 1682 |
+
"loss": 0.0,
|
| 1683 |
+
"step": 11700
|
| 1684 |
+
},
|
| 1685 |
+
{
|
| 1686 |
+
"epoch": 3.9023580205911657,
|
| 1687 |
+
"grad_norm": 0.00012061335291946307,
|
| 1688 |
+
"learning_rate": 1.8551259172831968e-05,
|
| 1689 |
+
"loss": 0.0,
|
| 1690 |
+
"step": 11750
|
| 1691 |
+
},
|
| 1692 |
+
{
|
| 1693 |
+
"epoch": 3.918963799402192,
|
| 1694 |
+
"grad_norm": 0.00044828641694039106,
|
| 1695 |
+
"learning_rate": 1.845180994385785e-05,
|
| 1696 |
+
"loss": 0.0,
|
| 1697 |
+
"step": 11800
|
| 1698 |
+
},
|
| 1699 |
+
{
|
| 1700 |
+
"epoch": 3.9355695782132183,
|
| 1701 |
+
"grad_norm": 5.701217378373258e-05,
|
| 1702 |
+
"learning_rate": 1.835236071488373e-05,
|
| 1703 |
+
"loss": 0.0,
|
| 1704 |
+
"step": 11850
|
| 1705 |
+
},
|
| 1706 |
+
{
|
| 1707 |
+
"epoch": 3.9521753570242444,
|
| 1708 |
+
"grad_norm": 5.7118391850963235e-05,
|
| 1709 |
+
"learning_rate": 1.8252911485909615e-05,
|
| 1710 |
+
"loss": 0.0,
|
| 1711 |
+
"step": 11900
|
| 1712 |
+
},
|
| 1713 |
+
{
|
| 1714 |
+
"epoch": 3.968781135835271,
|
| 1715 |
+
"grad_norm": 0.0007707001641392708,
|
| 1716 |
+
"learning_rate": 1.8153462256935496e-05,
|
| 1717 |
+
"loss": 0.0001,
|
| 1718 |
+
"step": 11950
|
| 1719 |
+
},
|
| 1720 |
+
{
|
| 1721 |
+
"epoch": 3.985386914646297,
|
| 1722 |
+
"grad_norm": 6.149195542093366e-05,
|
| 1723 |
+
"learning_rate": 1.8054013027961377e-05,
|
| 1724 |
+
"loss": 0.0,
|
| 1725 |
+
"step": 12000
|
| 1726 |
+
},
|
| 1727 |
+
{
|
| 1728 |
+
"epoch": 4.0,
|
| 1729 |
+
"eval_accuracy": 0.9973759541984732,
|
| 1730 |
+
"eval_f1": 0.9973482118175944,
|
| 1731 |
+
"eval_loss": 0.03656009957194328,
|
| 1732 |
+
"eval_precision": 0.9973582002448674,
|
| 1733 |
+
"eval_recall": 0.9973759541984732,
|
| 1734 |
+
"eval_runtime": 67.5118,
|
| 1735 |
+
"eval_samples_per_second": 124.186,
|
| 1736 |
+
"eval_steps_per_second": 7.762,
|
| 1737 |
+
"step": 12044
|
| 1738 |
+
},
|
| 1739 |
+
{
|
| 1740 |
+
"epoch": 4.001992693457323,
|
| 1741 |
+
"grad_norm": 5.8200610510539263e-05,
|
| 1742 |
+
"learning_rate": 1.7954563798987258e-05,
|
| 1743 |
+
"loss": 0.0,
|
| 1744 |
+
"step": 12050
|
| 1745 |
+
},
|
| 1746 |
+
{
|
| 1747 |
+
"epoch": 4.01859847226835,
|
| 1748 |
+
"grad_norm": 2.329714880033862e-05,
|
| 1749 |
+
"learning_rate": 1.7855114570013142e-05,
|
| 1750 |
+
"loss": 0.0,
|
| 1751 |
+
"step": 12100
|
| 1752 |
+
},
|
| 1753 |
+
{
|
| 1754 |
+
"epoch": 4.035204251079375,
|
| 1755 |
+
"grad_norm": 0.00018428650218993425,
|
| 1756 |
+
"learning_rate": 1.7755665341039023e-05,
|
| 1757 |
+
"loss": 0.0,
|
| 1758 |
+
"step": 12150
|
| 1759 |
+
},
|
| 1760 |
+
{
|
| 1761 |
+
"epoch": 4.051810029890402,
|
| 1762 |
+
"grad_norm": 3.634762470028363e-05,
|
| 1763 |
+
"learning_rate": 1.7656216112064904e-05,
|
| 1764 |
+
"loss": 0.0047,
|
| 1765 |
+
"step": 12200
|
| 1766 |
+
},
|
| 1767 |
+
{
|
| 1768 |
+
"epoch": 4.068415808701428,
|
| 1769 |
+
"grad_norm": 0.016488956287503242,
|
| 1770 |
+
"learning_rate": 1.7556766883090785e-05,
|
| 1771 |
+
"loss": 0.0185,
|
| 1772 |
+
"step": 12250
|
| 1773 |
+
},
|
| 1774 |
+
{
|
| 1775 |
+
"epoch": 4.085021587512454,
|
| 1776 |
+
"grad_norm": 0.0005068861646577716,
|
| 1777 |
+
"learning_rate": 1.745731765411667e-05,
|
| 1778 |
+
"loss": 0.0002,
|
| 1779 |
+
"step": 12300
|
| 1780 |
+
},
|
| 1781 |
+
{
|
| 1782 |
+
"epoch": 4.101627366323481,
|
| 1783 |
+
"grad_norm": 0.0002266648516524583,
|
| 1784 |
+
"learning_rate": 1.735786842514255e-05,
|
| 1785 |
+
"loss": 0.0001,
|
| 1786 |
+
"step": 12350
|
| 1787 |
+
},
|
| 1788 |
+
{
|
| 1789 |
+
"epoch": 4.118233145134507,
|
| 1790 |
+
"grad_norm": 0.000158867915160954,
|
| 1791 |
+
"learning_rate": 1.7258419196168435e-05,
|
| 1792 |
+
"loss": 0.0001,
|
| 1793 |
+
"step": 12400
|
| 1794 |
+
},
|
| 1795 |
+
{
|
| 1796 |
+
"epoch": 4.134838923945533,
|
| 1797 |
+
"grad_norm": 0.00020392602891661227,
|
| 1798 |
+
"learning_rate": 1.7158969967194317e-05,
|
| 1799 |
+
"loss": 0.0001,
|
| 1800 |
+
"step": 12450
|
| 1801 |
+
},
|
| 1802 |
+
{
|
| 1803 |
+
"epoch": 4.151444702756559,
|
| 1804 |
+
"grad_norm": 7.127954449970275e-05,
|
| 1805 |
+
"learning_rate": 1.7059520738220198e-05,
|
| 1806 |
+
"loss": 0.0,
|
| 1807 |
+
"step": 12500
|
| 1808 |
+
},
|
| 1809 |
+
{
|
| 1810 |
+
"epoch": 4.168050481567586,
|
| 1811 |
+
"grad_norm": 0.0027118742000311613,
|
| 1812 |
+
"learning_rate": 1.696007150924608e-05,
|
| 1813 |
+
"loss": 0.0005,
|
| 1814 |
+
"step": 12550
|
| 1815 |
+
},
|
| 1816 |
+
{
|
| 1817 |
+
"epoch": 4.1846562603786115,
|
| 1818 |
+
"grad_norm": 0.00021424326405394822,
|
| 1819 |
+
"learning_rate": 1.6860622280271963e-05,
|
| 1820 |
+
"loss": 0.0001,
|
| 1821 |
+
"step": 12600
|
| 1822 |
+
},
|
| 1823 |
+
{
|
| 1824 |
+
"epoch": 4.201262039189638,
|
| 1825 |
+
"grad_norm": 0.003089814679697156,
|
| 1826 |
+
"learning_rate": 1.6761173051297844e-05,
|
| 1827 |
+
"loss": 0.0136,
|
| 1828 |
+
"step": 12650
|
| 1829 |
+
},
|
| 1830 |
+
{
|
| 1831 |
+
"epoch": 4.217867818000665,
|
| 1832 |
+
"grad_norm": 0.0032255896367132664,
|
| 1833 |
+
"learning_rate": 1.6661723822323725e-05,
|
| 1834 |
+
"loss": 0.0001,
|
| 1835 |
+
"step": 12700
|
| 1836 |
+
},
|
| 1837 |
+
{
|
| 1838 |
+
"epoch": 4.23447359681169,
|
| 1839 |
+
"grad_norm": 0.00042820069938898087,
|
| 1840 |
+
"learning_rate": 1.6562274593349606e-05,
|
| 1841 |
+
"loss": 0.0,
|
| 1842 |
+
"step": 12750
|
| 1843 |
+
},
|
| 1844 |
+
{
|
| 1845 |
+
"epoch": 4.251079375622717,
|
| 1846 |
+
"grad_norm": 0.0002473096828907728,
|
| 1847 |
+
"learning_rate": 1.646282536437549e-05,
|
| 1848 |
+
"loss": 0.0001,
|
| 1849 |
+
"step": 12800
|
| 1850 |
+
},
|
| 1851 |
+
{
|
| 1852 |
+
"epoch": 4.267685154433743,
|
| 1853 |
+
"grad_norm": 0.00025876634754240513,
|
| 1854 |
+
"learning_rate": 1.6363376135401372e-05,
|
| 1855 |
+
"loss": 0.0069,
|
| 1856 |
+
"step": 12850
|
| 1857 |
+
},
|
| 1858 |
+
{
|
| 1859 |
+
"epoch": 4.284290933244769,
|
| 1860 |
+
"grad_norm": 0.00011665420606732368,
|
| 1861 |
+
"learning_rate": 1.6263926906427253e-05,
|
| 1862 |
+
"loss": 0.0,
|
| 1863 |
+
"step": 12900
|
| 1864 |
+
},
|
| 1865 |
+
{
|
| 1866 |
+
"epoch": 4.3008967120557955,
|
| 1867 |
+
"grad_norm": 9.87951279967092e-05,
|
| 1868 |
+
"learning_rate": 1.6164477677453134e-05,
|
| 1869 |
+
"loss": 0.0002,
|
| 1870 |
+
"step": 12950
|
| 1871 |
+
},
|
| 1872 |
+
{
|
| 1873 |
+
"epoch": 4.317502490866822,
|
| 1874 |
+
"grad_norm": 0.0003025582409463823,
|
| 1875 |
+
"learning_rate": 1.606502844847902e-05,
|
| 1876 |
+
"loss": 0.0204,
|
| 1877 |
+
"step": 13000
|
| 1878 |
+
},
|
| 1879 |
+
{
|
| 1880 |
+
"epoch": 4.334108269677848,
|
| 1881 |
+
"grad_norm": 0.002073385054245591,
|
| 1882 |
+
"learning_rate": 1.59655792195049e-05,
|
| 1883 |
+
"loss": 0.0209,
|
| 1884 |
+
"step": 13050
|
| 1885 |
+
},
|
| 1886 |
+
{
|
| 1887 |
+
"epoch": 4.350714048488874,
|
| 1888 |
+
"grad_norm": 0.001544980681501329,
|
| 1889 |
+
"learning_rate": 1.5866129990530784e-05,
|
| 1890 |
+
"loss": 0.0014,
|
| 1891 |
+
"step": 13100
|
| 1892 |
+
},
|
| 1893 |
+
{
|
| 1894 |
+
"epoch": 4.367319827299901,
|
| 1895 |
+
"grad_norm": 0.0011374803725630045,
|
| 1896 |
+
"learning_rate": 1.5766680761556665e-05,
|
| 1897 |
+
"loss": 0.0108,
|
| 1898 |
+
"step": 13150
|
| 1899 |
+
},
|
| 1900 |
+
{
|
| 1901 |
+
"epoch": 4.383925606110926,
|
| 1902 |
+
"grad_norm": 0.0015389297623187304,
|
| 1903 |
+
"learning_rate": 1.5667231532582546e-05,
|
| 1904 |
+
"loss": 0.0001,
|
| 1905 |
+
"step": 13200
|
| 1906 |
+
},
|
| 1907 |
+
{
|
| 1908 |
+
"epoch": 4.400531384921953,
|
| 1909 |
+
"grad_norm": 0.0011413008905947208,
|
| 1910 |
+
"learning_rate": 1.5567782303608427e-05,
|
| 1911 |
+
"loss": 0.0001,
|
| 1912 |
+
"step": 13250
|
| 1913 |
+
},
|
| 1914 |
+
{
|
| 1915 |
+
"epoch": 4.4171371637329795,
|
| 1916 |
+
"grad_norm": 0.001274819835089147,
|
| 1917 |
+
"learning_rate": 1.5468333074634312e-05,
|
| 1918 |
+
"loss": 0.0001,
|
| 1919 |
+
"step": 13300
|
| 1920 |
+
},
|
| 1921 |
+
{
|
| 1922 |
+
"epoch": 4.433742942544005,
|
| 1923 |
+
"grad_norm": 0.0003733636694960296,
|
| 1924 |
+
"learning_rate": 1.5368883845660193e-05,
|
| 1925 |
+
"loss": 0.0001,
|
| 1926 |
+
"step": 13350
|
| 1927 |
+
},
|
| 1928 |
+
{
|
| 1929 |
+
"epoch": 4.450348721355032,
|
| 1930 |
+
"grad_norm": 0.00023891785531304777,
|
| 1931 |
+
"learning_rate": 1.5269434616686074e-05,
|
| 1932 |
+
"loss": 0.0,
|
| 1933 |
+
"step": 13400
|
| 1934 |
+
},
|
| 1935 |
+
{
|
| 1936 |
+
"epoch": 4.466954500166057,
|
| 1937 |
+
"grad_norm": 0.0003116075531579554,
|
| 1938 |
+
"learning_rate": 1.5169985387711957e-05,
|
| 1939 |
+
"loss": 0.0,
|
| 1940 |
+
"step": 13450
|
| 1941 |
+
},
|
| 1942 |
+
{
|
| 1943 |
+
"epoch": 4.483560278977084,
|
| 1944 |
+
"grad_norm": 0.0002987791958730668,
|
| 1945 |
+
"learning_rate": 1.5070536158737838e-05,
|
| 1946 |
+
"loss": 0.0007,
|
| 1947 |
+
"step": 13500
|
| 1948 |
+
},
|
| 1949 |
+
{
|
| 1950 |
+
"epoch": 4.50016605778811,
|
| 1951 |
+
"grad_norm": 0.0006362134590744972,
|
| 1952 |
+
"learning_rate": 1.497108692976372e-05,
|
| 1953 |
+
"loss": 0.0115,
|
| 1954 |
+
"step": 13550
|
| 1955 |
+
},
|
| 1956 |
+
{
|
| 1957 |
+
"epoch": 4.516771836599137,
|
| 1958 |
+
"grad_norm": 0.0006330082542262971,
|
| 1959 |
+
"learning_rate": 1.4871637700789602e-05,
|
| 1960 |
+
"loss": 0.0093,
|
| 1961 |
+
"step": 13600
|
| 1962 |
+
},
|
| 1963 |
+
{
|
| 1964 |
+
"epoch": 4.533377615410163,
|
| 1965 |
+
"grad_norm": 0.0014981752028688788,
|
| 1966 |
+
"learning_rate": 1.4772188471815484e-05,
|
| 1967 |
+
"loss": 0.0076,
|
| 1968 |
+
"step": 13650
|
| 1969 |
+
},
|
| 1970 |
+
{
|
| 1971 |
+
"epoch": 4.549983394221189,
|
| 1972 |
+
"grad_norm": 0.0010505125392228365,
|
| 1973 |
+
"learning_rate": 1.4672739242841365e-05,
|
| 1974 |
+
"loss": 0.0001,
|
| 1975 |
+
"step": 13700
|
| 1976 |
+
},
|
| 1977 |
+
{
|
| 1978 |
+
"epoch": 4.566589173032215,
|
| 1979 |
+
"grad_norm": 0.0011327213142067194,
|
| 1980 |
+
"learning_rate": 1.4573290013867248e-05,
|
| 1981 |
+
"loss": 0.0,
|
| 1982 |
+
"step": 13750
|
| 1983 |
+
},
|
| 1984 |
+
{
|
| 1985 |
+
"epoch": 4.583194951843241,
|
| 1986 |
+
"grad_norm": 0.0005059707909822464,
|
| 1987 |
+
"learning_rate": 1.447384078489313e-05,
|
| 1988 |
+
"loss": 0.0,
|
| 1989 |
+
"step": 13800
|
| 1990 |
+
},
|
| 1991 |
+
{
|
| 1992 |
+
"epoch": 4.599800730654268,
|
| 1993 |
+
"grad_norm": 0.0011543643195182085,
|
| 1994 |
+
"learning_rate": 1.437439155591901e-05,
|
| 1995 |
+
"loss": 0.0,
|
| 1996 |
+
"step": 13850
|
| 1997 |
+
},
|
| 1998 |
+
{
|
| 1999 |
+
"epoch": 4.616406509465294,
|
| 2000 |
+
"grad_norm": 0.00020546128507703543,
|
| 2001 |
+
"learning_rate": 1.4274942326944895e-05,
|
| 2002 |
+
"loss": 0.0,
|
| 2003 |
+
"step": 13900
|
| 2004 |
+
},
|
| 2005 |
+
{
|
| 2006 |
+
"epoch": 4.63301228827632,
|
| 2007 |
+
"grad_norm": 0.0001845878578023985,
|
| 2008 |
+
"learning_rate": 1.4175493097970778e-05,
|
| 2009 |
+
"loss": 0.0,
|
| 2010 |
+
"step": 13950
|
| 2011 |
+
},
|
| 2012 |
+
{
|
| 2013 |
+
"epoch": 4.649618067087347,
|
| 2014 |
+
"grad_norm": 0.00035237689735367894,
|
| 2015 |
+
"learning_rate": 1.4076043868996659e-05,
|
| 2016 |
+
"loss": 0.0,
|
| 2017 |
+
"step": 14000
|
| 2018 |
+
},
|
| 2019 |
+
{
|
| 2020 |
+
"epoch": 4.666223845898372,
|
| 2021 |
+
"grad_norm": 0.00028683870914392173,
|
| 2022 |
+
"learning_rate": 1.3976594640022541e-05,
|
| 2023 |
+
"loss": 0.0,
|
| 2024 |
+
"step": 14050
|
| 2025 |
+
},
|
| 2026 |
+
{
|
| 2027 |
+
"epoch": 4.682829624709399,
|
| 2028 |
+
"grad_norm": 0.0003122540074400604,
|
| 2029 |
+
"learning_rate": 1.3877145411048423e-05,
|
| 2030 |
+
"loss": 0.0,
|
| 2031 |
+
"step": 14100
|
| 2032 |
+
},
|
| 2033 |
+
{
|
| 2034 |
+
"epoch": 4.699435403520425,
|
| 2035 |
+
"grad_norm": 0.00045101705472916365,
|
| 2036 |
+
"learning_rate": 1.3777696182074305e-05,
|
| 2037 |
+
"loss": 0.0002,
|
| 2038 |
+
"step": 14150
|
| 2039 |
+
},
|
| 2040 |
+
{
|
| 2041 |
+
"epoch": 4.716041182331451,
|
| 2042 |
+
"grad_norm": 0.0011838871287181973,
|
| 2043 |
+
"learning_rate": 1.3678246953100186e-05,
|
| 2044 |
+
"loss": 0.0,
|
| 2045 |
+
"step": 14200
|
| 2046 |
+
},
|
| 2047 |
+
{
|
| 2048 |
+
"epoch": 4.7326469611424775,
|
| 2049 |
+
"grad_norm": 0.00016272020002361387,
|
| 2050 |
+
"learning_rate": 1.3578797724126069e-05,
|
| 2051 |
+
"loss": 0.003,
|
| 2052 |
+
"step": 14250
|
| 2053 |
+
},
|
| 2054 |
+
{
|
| 2055 |
+
"epoch": 4.749252739953504,
|
| 2056 |
+
"grad_norm": 0.00011718720634235069,
|
| 2057 |
+
"learning_rate": 1.347934849515195e-05,
|
| 2058 |
+
"loss": 0.0,
|
| 2059 |
+
"step": 14300
|
| 2060 |
+
},
|
| 2061 |
+
{
|
| 2062 |
+
"epoch": 4.76585851876453,
|
| 2063 |
+
"grad_norm": 8.695230644661933e-05,
|
| 2064 |
+
"learning_rate": 1.3379899266177833e-05,
|
| 2065 |
+
"loss": 0.0,
|
| 2066 |
+
"step": 14350
|
| 2067 |
+
},
|
| 2068 |
+
{
|
| 2069 |
+
"epoch": 4.782464297575556,
|
| 2070 |
+
"grad_norm": 6.504805787699297e-05,
|
| 2071 |
+
"learning_rate": 1.3280450037203714e-05,
|
| 2072 |
+
"loss": 0.0,
|
| 2073 |
+
"step": 14400
|
| 2074 |
+
},
|
| 2075 |
+
{
|
| 2076 |
+
"epoch": 4.799070076386583,
|
| 2077 |
+
"grad_norm": 0.0013819790910929441,
|
| 2078 |
+
"learning_rate": 1.3181000808229597e-05,
|
| 2079 |
+
"loss": 0.0,
|
| 2080 |
+
"step": 14450
|
| 2081 |
+
},
|
| 2082 |
+
{
|
| 2083 |
+
"epoch": 4.815675855197608,
|
| 2084 |
+
"grad_norm": 8.097992395050824e-05,
|
| 2085 |
+
"learning_rate": 1.3081551579255478e-05,
|
| 2086 |
+
"loss": 0.0,
|
| 2087 |
+
"step": 14500
|
| 2088 |
+
},
|
| 2089 |
+
{
|
| 2090 |
+
"epoch": 4.832281634008635,
|
| 2091 |
+
"grad_norm": 6.052408934920095e-05,
|
| 2092 |
+
"learning_rate": 1.2982102350281359e-05,
|
| 2093 |
+
"loss": 0.0,
|
| 2094 |
+
"step": 14550
|
| 2095 |
+
},
|
| 2096 |
+
{
|
| 2097 |
+
"epoch": 4.8488874128196615,
|
| 2098 |
+
"grad_norm": 6.93105612299405e-05,
|
| 2099 |
+
"learning_rate": 1.2882653121307242e-05,
|
| 2100 |
+
"loss": 0.0,
|
| 2101 |
+
"step": 14600
|
| 2102 |
+
},
|
| 2103 |
+
{
|
| 2104 |
+
"epoch": 4.865493191630687,
|
| 2105 |
+
"grad_norm": 6.091671093599871e-05,
|
| 2106 |
+
"learning_rate": 1.2783203892333123e-05,
|
| 2107 |
+
"loss": 0.0,
|
| 2108 |
+
"step": 14650
|
| 2109 |
+
},
|
| 2110 |
+
{
|
| 2111 |
+
"epoch": 4.882098970441714,
|
| 2112 |
+
"grad_norm": 0.00023341408814303577,
|
| 2113 |
+
"learning_rate": 1.2683754663359007e-05,
|
| 2114 |
+
"loss": 0.0,
|
| 2115 |
+
"step": 14700
|
| 2116 |
+
},
|
| 2117 |
+
{
|
| 2118 |
+
"epoch": 4.89870474925274,
|
| 2119 |
+
"grad_norm": 4.871335477218963e-05,
|
| 2120 |
+
"learning_rate": 1.258430543438489e-05,
|
| 2121 |
+
"loss": 0.0,
|
| 2122 |
+
"step": 14750
|
| 2123 |
+
},
|
| 2124 |
+
{
|
| 2125 |
+
"epoch": 4.915310528063766,
|
| 2126 |
+
"grad_norm": 0.0001195693839690648,
|
| 2127 |
+
"learning_rate": 1.2484856205410771e-05,
|
| 2128 |
+
"loss": 0.0007,
|
| 2129 |
+
"step": 14800
|
| 2130 |
+
},
|
| 2131 |
+
{
|
| 2132 |
+
"epoch": 4.931916306874792,
|
| 2133 |
+
"grad_norm": 0.0006585444789379835,
|
| 2134 |
+
"learning_rate": 1.2385406976436654e-05,
|
| 2135 |
+
"loss": 0.0001,
|
| 2136 |
+
"step": 14850
|
| 2137 |
+
},
|
| 2138 |
+
{
|
| 2139 |
+
"epoch": 4.948522085685819,
|
| 2140 |
+
"grad_norm": 0.0009381878189742565,
|
| 2141 |
+
"learning_rate": 1.2285957747462535e-05,
|
| 2142 |
+
"loss": 0.0136,
|
| 2143 |
+
"step": 14900
|
| 2144 |
+
},
|
| 2145 |
+
{
|
| 2146 |
+
"epoch": 4.965127864496845,
|
| 2147 |
+
"grad_norm": 0.0007571703754365444,
|
| 2148 |
+
"learning_rate": 1.2186508518488418e-05,
|
| 2149 |
+
"loss": 0.0001,
|
| 2150 |
+
"step": 14950
|
| 2151 |
+
},
|
| 2152 |
+
{
|
| 2153 |
+
"epoch": 4.981733643307871,
|
| 2154 |
+
"grad_norm": 0.0002012668555835262,
|
| 2155 |
+
"learning_rate": 1.2087059289514299e-05,
|
| 2156 |
+
"loss": 0.0,
|
| 2157 |
+
"step": 15000
|
| 2158 |
+
},
|
| 2159 |
+
{
|
| 2160 |
+
"epoch": 4.998339422118898,
|
| 2161 |
+
"grad_norm": 0.0004453076981008053,
|
| 2162 |
+
"learning_rate": 1.1987610060540182e-05,
|
| 2163 |
+
"loss": 0.0011,
|
| 2164 |
+
"step": 15050
|
| 2165 |
+
},
|
| 2166 |
+
{
|
| 2167 |
+
"epoch": 5.0,
|
| 2168 |
+
"eval_accuracy": 0.997256679389313,
|
| 2169 |
+
"eval_f1": 0.9972255532951301,
|
| 2170 |
+
"eval_loss": 0.026163995265960693,
|
| 2171 |
+
"eval_precision": 0.997237804162915,
|
| 2172 |
+
"eval_recall": 0.997256679389313,
|
| 2173 |
+
"eval_runtime": 67.7679,
|
| 2174 |
+
"eval_samples_per_second": 123.716,
|
| 2175 |
+
"eval_steps_per_second": 7.732,
|
| 2176 |
+
"step": 15055
|
| 2177 |
+
}
|
| 2178 |
+
],
|
| 2179 |
+
"logging_steps": 50,
|
| 2180 |
+
"max_steps": 21077,
|
| 2181 |
+
"num_input_tokens_seen": 0,
|
| 2182 |
+
"num_train_epochs": 7,
|
| 2183 |
+
"save_steps": 500,
|
| 2184 |
+
"stateful_callbacks": {
|
| 2185 |
+
"TrainerControl": {
|
| 2186 |
+
"args": {
|
| 2187 |
+
"should_epoch_stop": false,
|
| 2188 |
+
"should_evaluate": false,
|
| 2189 |
+
"should_log": false,
|
| 2190 |
+
"should_save": true,
|
| 2191 |
+
"should_training_stop": false
|
| 2192 |
+
},
|
| 2193 |
+
"attributes": {}
|
| 2194 |
+
}
|
| 2195 |
+
},
|
| 2196 |
+
"total_flos": 2.547123652512768e+17,
|
| 2197 |
+
"train_batch_size": 16,
|
| 2198 |
+
"trial_name": null,
|
| 2199 |
+
"trial_params": null
|
| 2200 |
+
}
|
trial-1/checkpoint-15055/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:535f8963c9933a98cbda6a7a60fb42b0daa9affc10889961579027cb42dfd7b9
|
| 3 |
+
size 5368
|
trial-2/checkpoint-9033/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-2/checkpoint-9033/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ceb5aea83d31becf51f33bad0101af19aa72e30825571029e2058839c2598942
|
| 3 |
+
size 1583351632
|
trial-2/checkpoint-9033/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d58ab59719b06a08e5012236adc8ead9857b2a6b939a9df2e7ad1fd2ea7f3856
|
| 3 |
+
size 3166813178
|
trial-2/checkpoint-9033/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:000f96e69b5b01f915aefc67b0e71455a9632e90db9f36791eb61370a67ffd58
|
| 3 |
+
size 14244
|
trial-2/checkpoint-9033/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ef3fd896aed775d6359ff0434e2911a0c84bd73dffb2a8c635b32b0f9e87d938
|
| 3 |
+
size 1064
|
trial-2/checkpoint-9033/trainer_state.json
ADDED
|
@@ -0,0 +1,1329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.017316868528723717,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-2/checkpoint-9033",
|
| 4 |
+
"epoch": 3.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 9033,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.016605778811026237,
|
| 13 |
+
"grad_norm": 21.98246192932129,
|
| 14 |
+
"learning_rate": 4.176603346793102e-05,
|
| 15 |
+
"loss": 0.3038,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.033211557622052475,
|
| 20 |
+
"grad_norm": 0.019962424412369728,
|
| 21 |
+
"learning_rate": 4.162685974761136e-05,
|
| 22 |
+
"loss": 0.1339,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.04981733643307871,
|
| 27 |
+
"grad_norm": 4.160129070281982,
|
| 28 |
+
"learning_rate": 4.1487686027291694e-05,
|
| 29 |
+
"loss": 0.1214,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.06642311524410495,
|
| 34 |
+
"grad_norm": 0.058199040591716766,
|
| 35 |
+
"learning_rate": 4.1348512306972037e-05,
|
| 36 |
+
"loss": 0.0991,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.08302889405513118,
|
| 41 |
+
"grad_norm": 4.1636199951171875,
|
| 42 |
+
"learning_rate": 4.120933858665237e-05,
|
| 43 |
+
"loss": 0.0483,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.09963467286615742,
|
| 48 |
+
"grad_norm": 0.009575674310326576,
|
| 49 |
+
"learning_rate": 4.107016486633271e-05,
|
| 50 |
+
"loss": 0.0655,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.11624045167718366,
|
| 55 |
+
"grad_norm": 0.005853955168277025,
|
| 56 |
+
"learning_rate": 4.0930991146013044e-05,
|
| 57 |
+
"loss": 0.0417,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.1328462304882099,
|
| 62 |
+
"grad_norm": 0.22517672181129456,
|
| 63 |
+
"learning_rate": 4.079181742569338e-05,
|
| 64 |
+
"loss": 0.0559,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.14945200929923613,
|
| 69 |
+
"grad_norm": 2.013570785522461,
|
| 70 |
+
"learning_rate": 4.0652643705373716e-05,
|
| 71 |
+
"loss": 0.0295,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.16605778811026237,
|
| 76 |
+
"grad_norm": 0.06311897933483124,
|
| 77 |
+
"learning_rate": 4.051346998505405e-05,
|
| 78 |
+
"loss": 0.0683,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.1826635669212886,
|
| 83 |
+
"grad_norm": 0.5382233262062073,
|
| 84 |
+
"learning_rate": 4.037429626473439e-05,
|
| 85 |
+
"loss": 0.03,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.19926934573231483,
|
| 90 |
+
"grad_norm": 1.0302859544754028,
|
| 91 |
+
"learning_rate": 4.0235122544414723e-05,
|
| 92 |
+
"loss": 0.0502,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.2158751245433411,
|
| 97 |
+
"grad_norm": 5.7028913497924805,
|
| 98 |
+
"learning_rate": 4.009594882409506e-05,
|
| 99 |
+
"loss": 0.0429,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.23248090335436733,
|
| 104 |
+
"grad_norm": 0.05518786981701851,
|
| 105 |
+
"learning_rate": 3.9956775103775395e-05,
|
| 106 |
+
"loss": 0.0384,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.24908668216539356,
|
| 111 |
+
"grad_norm": 0.0032245127949863672,
|
| 112 |
+
"learning_rate": 3.981760138345573e-05,
|
| 113 |
+
"loss": 0.048,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.2656924609764198,
|
| 118 |
+
"grad_norm": 0.12157031893730164,
|
| 119 |
+
"learning_rate": 3.967842766313607e-05,
|
| 120 |
+
"loss": 0.0739,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.282298239787446,
|
| 125 |
+
"grad_norm": 1.955265998840332,
|
| 126 |
+
"learning_rate": 3.95392539428164e-05,
|
| 127 |
+
"loss": 0.0607,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.29890401859847227,
|
| 132 |
+
"grad_norm": 0.03681463748216629,
|
| 133 |
+
"learning_rate": 3.9400080222496745e-05,
|
| 134 |
+
"loss": 0.0469,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.3155097974094985,
|
| 139 |
+
"grad_norm": 0.00883490964770317,
|
| 140 |
+
"learning_rate": 3.926090650217708e-05,
|
| 141 |
+
"loss": 0.0251,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.33211557622052473,
|
| 146 |
+
"grad_norm": 0.017260171473026276,
|
| 147 |
+
"learning_rate": 3.912173278185742e-05,
|
| 148 |
+
"loss": 0.0274,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.348721355031551,
|
| 153 |
+
"grad_norm": 0.0017717696027830243,
|
| 154 |
+
"learning_rate": 3.898255906153775e-05,
|
| 155 |
+
"loss": 0.0603,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.3653271338425772,
|
| 160 |
+
"grad_norm": 0.03903853893280029,
|
| 161 |
+
"learning_rate": 3.884338534121809e-05,
|
| 162 |
+
"loss": 0.0378,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.38193291265360346,
|
| 167 |
+
"grad_norm": 0.002946289489045739,
|
| 168 |
+
"learning_rate": 3.8704211620898425e-05,
|
| 169 |
+
"loss": 0.0362,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.39853869146462967,
|
| 174 |
+
"grad_norm": 0.013737122528254986,
|
| 175 |
+
"learning_rate": 3.856503790057876e-05,
|
| 176 |
+
"loss": 0.048,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.41514447027565593,
|
| 181 |
+
"grad_norm": 0.22858883440494537,
|
| 182 |
+
"learning_rate": 3.84258641802591e-05,
|
| 183 |
+
"loss": 0.0195,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.4317502490866822,
|
| 188 |
+
"grad_norm": 0.605839729309082,
|
| 189 |
+
"learning_rate": 3.828669045993944e-05,
|
| 190 |
+
"loss": 0.0422,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.4483560278977084,
|
| 195 |
+
"grad_norm": 0.045106545090675354,
|
| 196 |
+
"learning_rate": 3.8147516739619775e-05,
|
| 197 |
+
"loss": 0.0303,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.46496180670873466,
|
| 202 |
+
"grad_norm": 0.0033422100823372602,
|
| 203 |
+
"learning_rate": 3.800834301930011e-05,
|
| 204 |
+
"loss": 0.0109,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.48156758551976087,
|
| 209 |
+
"grad_norm": 0.04552963748574257,
|
| 210 |
+
"learning_rate": 3.786916929898045e-05,
|
| 211 |
+
"loss": 0.02,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.4981733643307871,
|
| 216 |
+
"grad_norm": 0.0007607729057781398,
|
| 217 |
+
"learning_rate": 3.7729995578660776e-05,
|
| 218 |
+
"loss": 0.0283,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.5147791431418134,
|
| 223 |
+
"grad_norm": 0.011046077124774456,
|
| 224 |
+
"learning_rate": 3.759082185834111e-05,
|
| 225 |
+
"loss": 0.0249,
|
| 226 |
+
"step": 1550
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"epoch": 0.5313849219528396,
|
| 230 |
+
"grad_norm": 0.1515175700187683,
|
| 231 |
+
"learning_rate": 3.7451648138021454e-05,
|
| 232 |
+
"loss": 0.0128,
|
| 233 |
+
"step": 1600
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"epoch": 0.5479907007638658,
|
| 237 |
+
"grad_norm": 0.0051905689761042595,
|
| 238 |
+
"learning_rate": 3.731247441770179e-05,
|
| 239 |
+
"loss": 0.0235,
|
| 240 |
+
"step": 1650
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"epoch": 0.564596479574892,
|
| 244 |
+
"grad_norm": 0.03131992742419243,
|
| 245 |
+
"learning_rate": 3.7173300697382126e-05,
|
| 246 |
+
"loss": 0.0017,
|
| 247 |
+
"step": 1700
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"epoch": 0.5812022583859183,
|
| 251 |
+
"grad_norm": 50.02948760986328,
|
| 252 |
+
"learning_rate": 3.703412697706246e-05,
|
| 253 |
+
"loss": 0.0119,
|
| 254 |
+
"step": 1750
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"epoch": 0.5978080371969445,
|
| 258 |
+
"grad_norm": 0.017883650958538055,
|
| 259 |
+
"learning_rate": 3.68949532567428e-05,
|
| 260 |
+
"loss": 0.0374,
|
| 261 |
+
"step": 1800
|
| 262 |
+
},
|
| 263 |
+
{
|
| 264 |
+
"epoch": 0.6144138160079707,
|
| 265 |
+
"grad_norm": 0.002184309996664524,
|
| 266 |
+
"learning_rate": 3.6755779536423134e-05,
|
| 267 |
+
"loss": 0.0258,
|
| 268 |
+
"step": 1850
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.631019594818997,
|
| 272 |
+
"grad_norm": 0.009681240655481815,
|
| 273 |
+
"learning_rate": 3.661660581610347e-05,
|
| 274 |
+
"loss": 0.0246,
|
| 275 |
+
"step": 1900
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"epoch": 0.6476253736300233,
|
| 279 |
+
"grad_norm": 0.0064963954500854015,
|
| 280 |
+
"learning_rate": 3.647743209578381e-05,
|
| 281 |
+
"loss": 0.0076,
|
| 282 |
+
"step": 1950
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 0.6642311524410495,
|
| 286 |
+
"grad_norm": 0.0018849828047677875,
|
| 287 |
+
"learning_rate": 3.633825837546415e-05,
|
| 288 |
+
"loss": 0.0146,
|
| 289 |
+
"step": 2000
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 0.6808369312520757,
|
| 293 |
+
"grad_norm": 0.0013440287439152598,
|
| 294 |
+
"learning_rate": 3.6199084655144484e-05,
|
| 295 |
+
"loss": 0.0042,
|
| 296 |
+
"step": 2050
|
| 297 |
+
},
|
| 298 |
+
{
|
| 299 |
+
"epoch": 0.697442710063102,
|
| 300 |
+
"grad_norm": 0.0016228831373155117,
|
| 301 |
+
"learning_rate": 3.605991093482482e-05,
|
| 302 |
+
"loss": 0.0136,
|
| 303 |
+
"step": 2100
|
| 304 |
+
},
|
| 305 |
+
{
|
| 306 |
+
"epoch": 0.7140484888741282,
|
| 307 |
+
"grad_norm": 0.0009898327989503741,
|
| 308 |
+
"learning_rate": 3.5920737214505156e-05,
|
| 309 |
+
"loss": 0.0187,
|
| 310 |
+
"step": 2150
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"epoch": 0.7306542676851544,
|
| 314 |
+
"grad_norm": 0.001361815258860588,
|
| 315 |
+
"learning_rate": 3.578156349418549e-05,
|
| 316 |
+
"loss": 0.0167,
|
| 317 |
+
"step": 2200
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"epoch": 0.7472600464961807,
|
| 321 |
+
"grad_norm": 0.004233693704009056,
|
| 322 |
+
"learning_rate": 3.564238977386583e-05,
|
| 323 |
+
"loss": 0.0142,
|
| 324 |
+
"step": 2250
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"epoch": 0.7638658253072069,
|
| 328 |
+
"grad_norm": 0.002790838712826371,
|
| 329 |
+
"learning_rate": 3.550321605354616e-05,
|
| 330 |
+
"loss": 0.0235,
|
| 331 |
+
"step": 2300
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"epoch": 0.7804716041182331,
|
| 335 |
+
"grad_norm": 0.005341578274965286,
|
| 336 |
+
"learning_rate": 3.53640423332265e-05,
|
| 337 |
+
"loss": 0.002,
|
| 338 |
+
"step": 2350
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.7970773829292593,
|
| 342 |
+
"grad_norm": 0.011184672825038433,
|
| 343 |
+
"learning_rate": 3.5224868612906835e-05,
|
| 344 |
+
"loss": 0.0325,
|
| 345 |
+
"step": 2400
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"epoch": 0.8136831617402857,
|
| 349 |
+
"grad_norm": 0.001378358923830092,
|
| 350 |
+
"learning_rate": 3.508569489258717e-05,
|
| 351 |
+
"loss": 0.0012,
|
| 352 |
+
"step": 2450
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"epoch": 0.8302889405513119,
|
| 356 |
+
"grad_norm": 0.0006328076124191284,
|
| 357 |
+
"learning_rate": 3.4946521172267507e-05,
|
| 358 |
+
"loss": 0.0028,
|
| 359 |
+
"step": 2500
|
| 360 |
+
},
|
| 361 |
+
{
|
| 362 |
+
"epoch": 0.8468947193623381,
|
| 363 |
+
"grad_norm": 0.003556261071935296,
|
| 364 |
+
"learning_rate": 3.480734745194784e-05,
|
| 365 |
+
"loss": 0.0103,
|
| 366 |
+
"step": 2550
|
| 367 |
+
},
|
| 368 |
+
{
|
| 369 |
+
"epoch": 0.8635004981733644,
|
| 370 |
+
"grad_norm": 0.09675312787294388,
|
| 371 |
+
"learning_rate": 3.466817373162818e-05,
|
| 372 |
+
"loss": 0.0154,
|
| 373 |
+
"step": 2600
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"epoch": 0.8801062769843906,
|
| 377 |
+
"grad_norm": 0.0008714852156117558,
|
| 378 |
+
"learning_rate": 3.452900001130852e-05,
|
| 379 |
+
"loss": 0.0098,
|
| 380 |
+
"step": 2650
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"epoch": 0.8967120557954168,
|
| 384 |
+
"grad_norm": 0.00011318879842292517,
|
| 385 |
+
"learning_rate": 3.438982629098886e-05,
|
| 386 |
+
"loss": 0.0007,
|
| 387 |
+
"step": 2700
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"epoch": 0.913317834606443,
|
| 391 |
+
"grad_norm": 0.0007046264945529401,
|
| 392 |
+
"learning_rate": 3.425065257066919e-05,
|
| 393 |
+
"loss": 0.0163,
|
| 394 |
+
"step": 2750
|
| 395 |
+
},
|
| 396 |
+
{
|
| 397 |
+
"epoch": 0.9299236134174693,
|
| 398 |
+
"grad_norm": 0.0003467966744210571,
|
| 399 |
+
"learning_rate": 3.411147885034953e-05,
|
| 400 |
+
"loss": 0.0087,
|
| 401 |
+
"step": 2800
|
| 402 |
+
},
|
| 403 |
+
{
|
| 404 |
+
"epoch": 0.9465293922284955,
|
| 405 |
+
"grad_norm": 0.002381992759183049,
|
| 406 |
+
"learning_rate": 3.3972305130029864e-05,
|
| 407 |
+
"loss": 0.025,
|
| 408 |
+
"step": 2850
|
| 409 |
+
},
|
| 410 |
+
{
|
| 411 |
+
"epoch": 0.9631351710395217,
|
| 412 |
+
"grad_norm": 3.5343263149261475,
|
| 413 |
+
"learning_rate": 3.38331314097102e-05,
|
| 414 |
+
"loss": 0.0066,
|
| 415 |
+
"step": 2900
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"epoch": 0.9797409498505479,
|
| 419 |
+
"grad_norm": 1.6730190515518188,
|
| 420 |
+
"learning_rate": 3.369395768939054e-05,
|
| 421 |
+
"loss": 0.028,
|
| 422 |
+
"step": 2950
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"epoch": 0.9963467286615743,
|
| 426 |
+
"grad_norm": 0.006649247836321592,
|
| 427 |
+
"learning_rate": 3.355478396907088e-05,
|
| 428 |
+
"loss": 0.001,
|
| 429 |
+
"step": 3000
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"epoch": 1.0,
|
| 433 |
+
"eval_accuracy": 0.9877146946564885,
|
| 434 |
+
"eval_f1": 0.9883925037796503,
|
| 435 |
+
"eval_loss": 0.056414589285850525,
|
| 436 |
+
"eval_precision": 0.9900276560214059,
|
| 437 |
+
"eval_recall": 0.9877146946564885,
|
| 438 |
+
"eval_runtime": 66.3519,
|
| 439 |
+
"eval_samples_per_second": 126.357,
|
| 440 |
+
"eval_steps_per_second": 7.897,
|
| 441 |
+
"step": 3011
|
| 442 |
+
},
|
| 443 |
+
{
|
| 444 |
+
"epoch": 1.0129525074726005,
|
| 445 |
+
"grad_norm": 0.03574318438768387,
|
| 446 |
+
"learning_rate": 3.3415610248751215e-05,
|
| 447 |
+
"loss": 0.0166,
|
| 448 |
+
"step": 3050
|
| 449 |
+
},
|
| 450 |
+
{
|
| 451 |
+
"epoch": 1.0295582862836268,
|
| 452 |
+
"grad_norm": 0.0022263473365455866,
|
| 453 |
+
"learning_rate": 3.327643652843155e-05,
|
| 454 |
+
"loss": 0.004,
|
| 455 |
+
"step": 3100
|
| 456 |
+
},
|
| 457 |
+
{
|
| 458 |
+
"epoch": 1.0461640650946529,
|
| 459 |
+
"grad_norm": 0.0038769582752138376,
|
| 460 |
+
"learning_rate": 3.3137262808111886e-05,
|
| 461 |
+
"loss": 0.0003,
|
| 462 |
+
"step": 3150
|
| 463 |
+
},
|
| 464 |
+
{
|
| 465 |
+
"epoch": 1.0627698439056792,
|
| 466 |
+
"grad_norm": 0.005572175141423941,
|
| 467 |
+
"learning_rate": 3.299808908779222e-05,
|
| 468 |
+
"loss": 0.001,
|
| 469 |
+
"step": 3200
|
| 470 |
+
},
|
| 471 |
+
{
|
| 472 |
+
"epoch": 1.0793756227167055,
|
| 473 |
+
"grad_norm": 0.1954014003276825,
|
| 474 |
+
"learning_rate": 3.285891536747256e-05,
|
| 475 |
+
"loss": 0.0323,
|
| 476 |
+
"step": 3250
|
| 477 |
+
},
|
| 478 |
+
{
|
| 479 |
+
"epoch": 1.0959814015277316,
|
| 480 |
+
"grad_norm": 0.0821860134601593,
|
| 481 |
+
"learning_rate": 3.2719741647152894e-05,
|
| 482 |
+
"loss": 0.0035,
|
| 483 |
+
"step": 3300
|
| 484 |
+
},
|
| 485 |
+
{
|
| 486 |
+
"epoch": 1.112587180338758,
|
| 487 |
+
"grad_norm": 0.04094453528523445,
|
| 488 |
+
"learning_rate": 3.258056792683323e-05,
|
| 489 |
+
"loss": 0.0025,
|
| 490 |
+
"step": 3350
|
| 491 |
+
},
|
| 492 |
+
{
|
| 493 |
+
"epoch": 1.1291929591497842,
|
| 494 |
+
"grad_norm": 0.0028018904849886894,
|
| 495 |
+
"learning_rate": 3.2441394206513566e-05,
|
| 496 |
+
"loss": 0.0092,
|
| 497 |
+
"step": 3400
|
| 498 |
+
},
|
| 499 |
+
{
|
| 500 |
+
"epoch": 1.1457987379608103,
|
| 501 |
+
"grad_norm": 0.002664918312802911,
|
| 502 |
+
"learning_rate": 3.23022204861939e-05,
|
| 503 |
+
"loss": 0.0194,
|
| 504 |
+
"step": 3450
|
| 505 |
+
},
|
| 506 |
+
{
|
| 507 |
+
"epoch": 1.1624045167718366,
|
| 508 |
+
"grad_norm": 6.756401538848877,
|
| 509 |
+
"learning_rate": 3.216304676587424e-05,
|
| 510 |
+
"loss": 0.0135,
|
| 511 |
+
"step": 3500
|
| 512 |
+
},
|
| 513 |
+
{
|
| 514 |
+
"epoch": 1.1790102955828627,
|
| 515 |
+
"grad_norm": 0.0011647256324067712,
|
| 516 |
+
"learning_rate": 3.202387304555457e-05,
|
| 517 |
+
"loss": 0.0009,
|
| 518 |
+
"step": 3550
|
| 519 |
+
},
|
| 520 |
+
{
|
| 521 |
+
"epoch": 1.195616074393889,
|
| 522 |
+
"grad_norm": 0.0007124203257262707,
|
| 523 |
+
"learning_rate": 3.188469932523491e-05,
|
| 524 |
+
"loss": 0.0004,
|
| 525 |
+
"step": 3600
|
| 526 |
+
},
|
| 527 |
+
{
|
| 528 |
+
"epoch": 1.2122218532049154,
|
| 529 |
+
"grad_norm": 0.00033606469514779747,
|
| 530 |
+
"learning_rate": 3.174552560491525e-05,
|
| 531 |
+
"loss": 0.0,
|
| 532 |
+
"step": 3650
|
| 533 |
+
},
|
| 534 |
+
{
|
| 535 |
+
"epoch": 1.2288276320159415,
|
| 536 |
+
"grad_norm": 0.0005367195117287338,
|
| 537 |
+
"learning_rate": 3.160635188459559e-05,
|
| 538 |
+
"loss": 0.0001,
|
| 539 |
+
"step": 3700
|
| 540 |
+
},
|
| 541 |
+
{
|
| 542 |
+
"epoch": 1.2454334108269678,
|
| 543 |
+
"grad_norm": 0.012845808640122414,
|
| 544 |
+
"learning_rate": 3.1467178164275924e-05,
|
| 545 |
+
"loss": 0.0132,
|
| 546 |
+
"step": 3750
|
| 547 |
+
},
|
| 548 |
+
{
|
| 549 |
+
"epoch": 1.2620391896379939,
|
| 550 |
+
"grad_norm": 0.0020410455763339996,
|
| 551 |
+
"learning_rate": 3.132800444395626e-05,
|
| 552 |
+
"loss": 0.0006,
|
| 553 |
+
"step": 3800
|
| 554 |
+
},
|
| 555 |
+
{
|
| 556 |
+
"epoch": 1.2786449684490202,
|
| 557 |
+
"grad_norm": 0.0007005564984865487,
|
| 558 |
+
"learning_rate": 3.1188830723636595e-05,
|
| 559 |
+
"loss": 0.0074,
|
| 560 |
+
"step": 3850
|
| 561 |
+
},
|
| 562 |
+
{
|
| 563 |
+
"epoch": 1.2952507472600465,
|
| 564 |
+
"grad_norm": 0.00040667568100616336,
|
| 565 |
+
"learning_rate": 3.104965700331693e-05,
|
| 566 |
+
"loss": 0.004,
|
| 567 |
+
"step": 3900
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"epoch": 1.3118565260710726,
|
| 571 |
+
"grad_norm": 0.0005291103734634817,
|
| 572 |
+
"learning_rate": 3.091048328299727e-05,
|
| 573 |
+
"loss": 0.0035,
|
| 574 |
+
"step": 3950
|
| 575 |
+
},
|
| 576 |
+
{
|
| 577 |
+
"epoch": 1.328462304882099,
|
| 578 |
+
"grad_norm": 0.01364582683891058,
|
| 579 |
+
"learning_rate": 3.077130956267761e-05,
|
| 580 |
+
"loss": 0.0157,
|
| 581 |
+
"step": 4000
|
| 582 |
+
},
|
| 583 |
+
{
|
| 584 |
+
"epoch": 1.3450680836931252,
|
| 585 |
+
"grad_norm": 4.155655860900879,
|
| 586 |
+
"learning_rate": 3.0632135842357946e-05,
|
| 587 |
+
"loss": 0.015,
|
| 588 |
+
"step": 4050
|
| 589 |
+
},
|
| 590 |
+
{
|
| 591 |
+
"epoch": 1.3616738625041513,
|
| 592 |
+
"grad_norm": 0.22104530036449432,
|
| 593 |
+
"learning_rate": 3.0492962122038278e-05,
|
| 594 |
+
"loss": 0.0118,
|
| 595 |
+
"step": 4100
|
| 596 |
+
},
|
| 597 |
+
{
|
| 598 |
+
"epoch": 1.3782796413151777,
|
| 599 |
+
"grad_norm": 0.027948148548603058,
|
| 600 |
+
"learning_rate": 3.0353788401718614e-05,
|
| 601 |
+
"loss": 0.0238,
|
| 602 |
+
"step": 4150
|
| 603 |
+
},
|
| 604 |
+
{
|
| 605 |
+
"epoch": 1.394885420126204,
|
| 606 |
+
"grad_norm": 0.0006893076351843774,
|
| 607 |
+
"learning_rate": 3.021461468139895e-05,
|
| 608 |
+
"loss": 0.0001,
|
| 609 |
+
"step": 4200
|
| 610 |
+
},
|
| 611 |
+
{
|
| 612 |
+
"epoch": 1.41149119893723,
|
| 613 |
+
"grad_norm": 0.00040428817737847567,
|
| 614 |
+
"learning_rate": 3.0075440961079286e-05,
|
| 615 |
+
"loss": 0.0003,
|
| 616 |
+
"step": 4250
|
| 617 |
+
},
|
| 618 |
+
{
|
| 619 |
+
"epoch": 1.4280969777482564,
|
| 620 |
+
"grad_norm": 0.13213320076465607,
|
| 621 |
+
"learning_rate": 2.993626724075962e-05,
|
| 622 |
+
"loss": 0.0192,
|
| 623 |
+
"step": 4300
|
| 624 |
+
},
|
| 625 |
+
{
|
| 626 |
+
"epoch": 1.4447027565592827,
|
| 627 |
+
"grad_norm": 0.010131658986210823,
|
| 628 |
+
"learning_rate": 2.979709352043996e-05,
|
| 629 |
+
"loss": 0.0114,
|
| 630 |
+
"step": 4350
|
| 631 |
+
},
|
| 632 |
+
{
|
| 633 |
+
"epoch": 1.4613085353703088,
|
| 634 |
+
"grad_norm": 0.0037651287857443094,
|
| 635 |
+
"learning_rate": 2.9657919800120297e-05,
|
| 636 |
+
"loss": 0.0311,
|
| 637 |
+
"step": 4400
|
| 638 |
+
},
|
| 639 |
+
{
|
| 640 |
+
"epoch": 1.4779143141813351,
|
| 641 |
+
"grad_norm": 0.002042593201622367,
|
| 642 |
+
"learning_rate": 2.9518746079800632e-05,
|
| 643 |
+
"loss": 0.0004,
|
| 644 |
+
"step": 4450
|
| 645 |
+
},
|
| 646 |
+
{
|
| 647 |
+
"epoch": 1.4945200929923614,
|
| 648 |
+
"grad_norm": 0.0017547437455505133,
|
| 649 |
+
"learning_rate": 2.9379572359480968e-05,
|
| 650 |
+
"loss": 0.0003,
|
| 651 |
+
"step": 4500
|
| 652 |
+
},
|
| 653 |
+
{
|
| 654 |
+
"epoch": 1.5111258718033875,
|
| 655 |
+
"grad_norm": 0.0004074271419085562,
|
| 656 |
+
"learning_rate": 2.9240398639161304e-05,
|
| 657 |
+
"loss": 0.0,
|
| 658 |
+
"step": 4550
|
| 659 |
+
},
|
| 660 |
+
{
|
| 661 |
+
"epoch": 1.5277316506144138,
|
| 662 |
+
"grad_norm": 0.0007494412711821496,
|
| 663 |
+
"learning_rate": 2.910122491884164e-05,
|
| 664 |
+
"loss": 0.0,
|
| 665 |
+
"step": 4600
|
| 666 |
+
},
|
| 667 |
+
{
|
| 668 |
+
"epoch": 1.5443374294254402,
|
| 669 |
+
"grad_norm": 0.0027854584623128176,
|
| 670 |
+
"learning_rate": 2.8962051198521976e-05,
|
| 671 |
+
"loss": 0.0101,
|
| 672 |
+
"step": 4650
|
| 673 |
+
},
|
| 674 |
+
{
|
| 675 |
+
"epoch": 1.5609432082364663,
|
| 676 |
+
"grad_norm": 0.0018231570720672607,
|
| 677 |
+
"learning_rate": 2.882287747820232e-05,
|
| 678 |
+
"loss": 0.0038,
|
| 679 |
+
"step": 4700
|
| 680 |
+
},
|
| 681 |
+
{
|
| 682 |
+
"epoch": 1.5775489870474926,
|
| 683 |
+
"grad_norm": 0.0018910124199464917,
|
| 684 |
+
"learning_rate": 2.8683703757882654e-05,
|
| 685 |
+
"loss": 0.0101,
|
| 686 |
+
"step": 4750
|
| 687 |
+
},
|
| 688 |
+
{
|
| 689 |
+
"epoch": 1.594154765858519,
|
| 690 |
+
"grad_norm": 0.02916385605931282,
|
| 691 |
+
"learning_rate": 2.854453003756299e-05,
|
| 692 |
+
"loss": 0.0116,
|
| 693 |
+
"step": 4800
|
| 694 |
+
},
|
| 695 |
+
{
|
| 696 |
+
"epoch": 1.610760544669545,
|
| 697 |
+
"grad_norm": 0.000494068895932287,
|
| 698 |
+
"learning_rate": 2.8405356317243326e-05,
|
| 699 |
+
"loss": 0.0003,
|
| 700 |
+
"step": 4850
|
| 701 |
+
},
|
| 702 |
+
{
|
| 703 |
+
"epoch": 1.627366323480571,
|
| 704 |
+
"grad_norm": 0.003024607663974166,
|
| 705 |
+
"learning_rate": 2.8266182596923662e-05,
|
| 706 |
+
"loss": 0.0017,
|
| 707 |
+
"step": 4900
|
| 708 |
+
},
|
| 709 |
+
{
|
| 710 |
+
"epoch": 1.6439721022915976,
|
| 711 |
+
"grad_norm": 0.0006424608873203397,
|
| 712 |
+
"learning_rate": 2.8127008876603998e-05,
|
| 713 |
+
"loss": 0.0004,
|
| 714 |
+
"step": 4950
|
| 715 |
+
},
|
| 716 |
+
{
|
| 717 |
+
"epoch": 1.6605778811026237,
|
| 718 |
+
"grad_norm": 0.0013206731528043747,
|
| 719 |
+
"learning_rate": 2.7987835156284334e-05,
|
| 720 |
+
"loss": 0.0421,
|
| 721 |
+
"step": 5000
|
| 722 |
+
},
|
| 723 |
+
{
|
| 724 |
+
"epoch": 1.6771836599136498,
|
| 725 |
+
"grad_norm": 0.009919991716742516,
|
| 726 |
+
"learning_rate": 2.7848661435964673e-05,
|
| 727 |
+
"loss": 0.014,
|
| 728 |
+
"step": 5050
|
| 729 |
+
},
|
| 730 |
+
{
|
| 731 |
+
"epoch": 1.6937894387246761,
|
| 732 |
+
"grad_norm": 0.0010778785217553377,
|
| 733 |
+
"learning_rate": 2.770948771564501e-05,
|
| 734 |
+
"loss": 0.0111,
|
| 735 |
+
"step": 5100
|
| 736 |
+
},
|
| 737 |
+
{
|
| 738 |
+
"epoch": 1.7103952175357025,
|
| 739 |
+
"grad_norm": 0.048366744071245193,
|
| 740 |
+
"learning_rate": 2.7570313995325345e-05,
|
| 741 |
+
"loss": 0.0173,
|
| 742 |
+
"step": 5150
|
| 743 |
+
},
|
| 744 |
+
{
|
| 745 |
+
"epoch": 1.7270009963467285,
|
| 746 |
+
"grad_norm": 0.0018048906931653619,
|
| 747 |
+
"learning_rate": 2.743114027500568e-05,
|
| 748 |
+
"loss": 0.0004,
|
| 749 |
+
"step": 5200
|
| 750 |
+
},
|
| 751 |
+
{
|
| 752 |
+
"epoch": 1.7436067751577549,
|
| 753 |
+
"grad_norm": 0.0049881902523338795,
|
| 754 |
+
"learning_rate": 2.7291966554686016e-05,
|
| 755 |
+
"loss": 0.0055,
|
| 756 |
+
"step": 5250
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"epoch": 1.7602125539687812,
|
| 760 |
+
"grad_norm": 0.035248152911663055,
|
| 761 |
+
"learning_rate": 2.7152792834366352e-05,
|
| 762 |
+
"loss": 0.0139,
|
| 763 |
+
"step": 5300
|
| 764 |
+
},
|
| 765 |
+
{
|
| 766 |
+
"epoch": 1.7768183327798073,
|
| 767 |
+
"grad_norm": 0.0055509209632873535,
|
| 768 |
+
"learning_rate": 2.7013619114046688e-05,
|
| 769 |
+
"loss": 0.0119,
|
| 770 |
+
"step": 5350
|
| 771 |
+
},
|
| 772 |
+
{
|
| 773 |
+
"epoch": 1.7934241115908336,
|
| 774 |
+
"grad_norm": 0.0015154307475313544,
|
| 775 |
+
"learning_rate": 2.6874445393727027e-05,
|
| 776 |
+
"loss": 0.0059,
|
| 777 |
+
"step": 5400
|
| 778 |
+
},
|
| 779 |
+
{
|
| 780 |
+
"epoch": 1.81002989040186,
|
| 781 |
+
"grad_norm": 0.002082501072436571,
|
| 782 |
+
"learning_rate": 2.6735271673407363e-05,
|
| 783 |
+
"loss": 0.0001,
|
| 784 |
+
"step": 5450
|
| 785 |
+
},
|
| 786 |
+
{
|
| 787 |
+
"epoch": 1.826635669212886,
|
| 788 |
+
"grad_norm": 0.0010320444125682116,
|
| 789 |
+
"learning_rate": 2.65960979530877e-05,
|
| 790 |
+
"loss": 0.0026,
|
| 791 |
+
"step": 5500
|
| 792 |
+
},
|
| 793 |
+
{
|
| 794 |
+
"epoch": 1.8432414480239123,
|
| 795 |
+
"grad_norm": 0.0015966288046911359,
|
| 796 |
+
"learning_rate": 2.6456924232768035e-05,
|
| 797 |
+
"loss": 0.0001,
|
| 798 |
+
"step": 5550
|
| 799 |
+
},
|
| 800 |
+
{
|
| 801 |
+
"epoch": 1.8598472268349386,
|
| 802 |
+
"grad_norm": 0.0002549632336013019,
|
| 803 |
+
"learning_rate": 2.631775051244837e-05,
|
| 804 |
+
"loss": 0.0,
|
| 805 |
+
"step": 5600
|
| 806 |
+
},
|
| 807 |
+
{
|
| 808 |
+
"epoch": 1.8764530056459647,
|
| 809 |
+
"grad_norm": 0.003573563415557146,
|
| 810 |
+
"learning_rate": 2.6178576792128707e-05,
|
| 811 |
+
"loss": 0.0119,
|
| 812 |
+
"step": 5650
|
| 813 |
+
},
|
| 814 |
+
{
|
| 815 |
+
"epoch": 1.893058784456991,
|
| 816 |
+
"grad_norm": 0.0006668689311482012,
|
| 817 |
+
"learning_rate": 2.6039403071809043e-05,
|
| 818 |
+
"loss": 0.0002,
|
| 819 |
+
"step": 5700
|
| 820 |
+
},
|
| 821 |
+
{
|
| 822 |
+
"epoch": 1.9096645632680174,
|
| 823 |
+
"grad_norm": 0.0003313050256110728,
|
| 824 |
+
"learning_rate": 2.5900229351489382e-05,
|
| 825 |
+
"loss": 0.0005,
|
| 826 |
+
"step": 5750
|
| 827 |
+
},
|
| 828 |
+
{
|
| 829 |
+
"epoch": 1.9262703420790435,
|
| 830 |
+
"grad_norm": 0.000735230278223753,
|
| 831 |
+
"learning_rate": 2.5761055631169718e-05,
|
| 832 |
+
"loss": 0.0033,
|
| 833 |
+
"step": 5800
|
| 834 |
+
},
|
| 835 |
+
{
|
| 836 |
+
"epoch": 1.9428761208900698,
|
| 837 |
+
"grad_norm": 0.0006316429935395718,
|
| 838 |
+
"learning_rate": 2.5621881910850054e-05,
|
| 839 |
+
"loss": 0.0,
|
| 840 |
+
"step": 5850
|
| 841 |
+
},
|
| 842 |
+
{
|
| 843 |
+
"epoch": 1.959481899701096,
|
| 844 |
+
"grad_norm": 0.07467895746231079,
|
| 845 |
+
"learning_rate": 2.548270819053039e-05,
|
| 846 |
+
"loss": 0.0239,
|
| 847 |
+
"step": 5900
|
| 848 |
+
},
|
| 849 |
+
{
|
| 850 |
+
"epoch": 1.9760876785121222,
|
| 851 |
+
"grad_norm": 0.0030303276143968105,
|
| 852 |
+
"learning_rate": 2.5343534470210725e-05,
|
| 853 |
+
"loss": 0.0051,
|
| 854 |
+
"step": 5950
|
| 855 |
+
},
|
| 856 |
+
{
|
| 857 |
+
"epoch": 1.9926934573231485,
|
| 858 |
+
"grad_norm": 0.028144309297204018,
|
| 859 |
+
"learning_rate": 2.520436074989106e-05,
|
| 860 |
+
"loss": 0.0411,
|
| 861 |
+
"step": 6000
|
| 862 |
+
},
|
| 863 |
+
{
|
| 864 |
+
"epoch": 2.0,
|
| 865 |
+
"eval_accuracy": 0.9971374045801527,
|
| 866 |
+
"eval_f1": 0.9970892145409969,
|
| 867 |
+
"eval_loss": 0.022991055622696877,
|
| 868 |
+
"eval_precision": 0.9971362128759713,
|
| 869 |
+
"eval_recall": 0.9971374045801527,
|
| 870 |
+
"eval_runtime": 67.1475,
|
| 871 |
+
"eval_samples_per_second": 124.859,
|
| 872 |
+
"eval_steps_per_second": 7.804,
|
| 873 |
+
"step": 6022
|
| 874 |
+
},
|
| 875 |
+
{
|
| 876 |
+
"epoch": 2.009299236134175,
|
| 877 |
+
"grad_norm": 0.01069187093526125,
|
| 878 |
+
"learning_rate": 2.5065187029571397e-05,
|
| 879 |
+
"loss": 0.0176,
|
| 880 |
+
"step": 6050
|
| 881 |
+
},
|
| 882 |
+
{
|
| 883 |
+
"epoch": 2.025905014945201,
|
| 884 |
+
"grad_norm": 0.002449960447847843,
|
| 885 |
+
"learning_rate": 2.492601330925174e-05,
|
| 886 |
+
"loss": 0.0034,
|
| 887 |
+
"step": 6100
|
| 888 |
+
},
|
| 889 |
+
{
|
| 890 |
+
"epoch": 2.042510793756227,
|
| 891 |
+
"grad_norm": 0.006188757251948118,
|
| 892 |
+
"learning_rate": 2.4786839588932076e-05,
|
| 893 |
+
"loss": 0.0001,
|
| 894 |
+
"step": 6150
|
| 895 |
+
},
|
| 896 |
+
{
|
| 897 |
+
"epoch": 2.0591165725672536,
|
| 898 |
+
"grad_norm": 0.0007646733429282904,
|
| 899 |
+
"learning_rate": 2.464766586861241e-05,
|
| 900 |
+
"loss": 0.0049,
|
| 901 |
+
"step": 6200
|
| 902 |
+
},
|
| 903 |
+
{
|
| 904 |
+
"epoch": 2.0757223513782797,
|
| 905 |
+
"grad_norm": 0.008807332254946232,
|
| 906 |
+
"learning_rate": 2.4508492148292747e-05,
|
| 907 |
+
"loss": 0.0108,
|
| 908 |
+
"step": 6250
|
| 909 |
+
},
|
| 910 |
+
{
|
| 911 |
+
"epoch": 2.0923281301893057,
|
| 912 |
+
"grad_norm": 0.0009894605027511716,
|
| 913 |
+
"learning_rate": 2.4369318427973083e-05,
|
| 914 |
+
"loss": 0.0002,
|
| 915 |
+
"step": 6300
|
| 916 |
+
},
|
| 917 |
+
{
|
| 918 |
+
"epoch": 2.1089339090003323,
|
| 919 |
+
"grad_norm": 0.0016475095180794597,
|
| 920 |
+
"learning_rate": 2.423014470765342e-05,
|
| 921 |
+
"loss": 0.0009,
|
| 922 |
+
"step": 6350
|
| 923 |
+
},
|
| 924 |
+
{
|
| 925 |
+
"epoch": 2.1255396878113584,
|
| 926 |
+
"grad_norm": 0.000374704715795815,
|
| 927 |
+
"learning_rate": 2.4090970987333755e-05,
|
| 928 |
+
"loss": 0.002,
|
| 929 |
+
"step": 6400
|
| 930 |
+
},
|
| 931 |
+
{
|
| 932 |
+
"epoch": 2.1421454666223845,
|
| 933 |
+
"grad_norm": 0.0004738509014714509,
|
| 934 |
+
"learning_rate": 2.3951797267014094e-05,
|
| 935 |
+
"loss": 0.0,
|
| 936 |
+
"step": 6450
|
| 937 |
+
},
|
| 938 |
+
{
|
| 939 |
+
"epoch": 2.158751245433411,
|
| 940 |
+
"grad_norm": 0.004183988086879253,
|
| 941 |
+
"learning_rate": 2.381262354669443e-05,
|
| 942 |
+
"loss": 0.0059,
|
| 943 |
+
"step": 6500
|
| 944 |
+
},
|
| 945 |
+
{
|
| 946 |
+
"epoch": 2.175357024244437,
|
| 947 |
+
"grad_norm": 0.0020687321666628122,
|
| 948 |
+
"learning_rate": 2.3673449826374766e-05,
|
| 949 |
+
"loss": 0.0079,
|
| 950 |
+
"step": 6550
|
| 951 |
+
},
|
| 952 |
+
{
|
| 953 |
+
"epoch": 2.191962803055463,
|
| 954 |
+
"grad_norm": 0.00045966755715198815,
|
| 955 |
+
"learning_rate": 2.35342761060551e-05,
|
| 956 |
+
"loss": 0.0,
|
| 957 |
+
"step": 6600
|
| 958 |
+
},
|
| 959 |
+
{
|
| 960 |
+
"epoch": 2.2085685818664897,
|
| 961 |
+
"grad_norm": 0.001768000889569521,
|
| 962 |
+
"learning_rate": 2.3395102385735438e-05,
|
| 963 |
+
"loss": 0.0,
|
| 964 |
+
"step": 6650
|
| 965 |
+
},
|
| 966 |
+
{
|
| 967 |
+
"epoch": 2.225174360677516,
|
| 968 |
+
"grad_norm": 0.00014257608563639224,
|
| 969 |
+
"learning_rate": 2.3255928665415773e-05,
|
| 970 |
+
"loss": 0.0,
|
| 971 |
+
"step": 6700
|
| 972 |
+
},
|
| 973 |
+
{
|
| 974 |
+
"epoch": 2.241780139488542,
|
| 975 |
+
"grad_norm": 0.0002034334756899625,
|
| 976 |
+
"learning_rate": 2.311675494509611e-05,
|
| 977 |
+
"loss": 0.0121,
|
| 978 |
+
"step": 6750
|
| 979 |
+
},
|
| 980 |
+
{
|
| 981 |
+
"epoch": 2.2583859182995685,
|
| 982 |
+
"grad_norm": 0.7104772329330444,
|
| 983 |
+
"learning_rate": 2.297758122477645e-05,
|
| 984 |
+
"loss": 0.0,
|
| 985 |
+
"step": 6800
|
| 986 |
+
},
|
| 987 |
+
{
|
| 988 |
+
"epoch": 2.2749916971105946,
|
| 989 |
+
"grad_norm": 0.0013439609901979566,
|
| 990 |
+
"learning_rate": 2.2838407504456784e-05,
|
| 991 |
+
"loss": 0.0116,
|
| 992 |
+
"step": 6850
|
| 993 |
+
},
|
| 994 |
+
{
|
| 995 |
+
"epoch": 2.2915974759216207,
|
| 996 |
+
"grad_norm": 0.003318699076771736,
|
| 997 |
+
"learning_rate": 2.269923378413712e-05,
|
| 998 |
+
"loss": 0.0001,
|
| 999 |
+
"step": 6900
|
| 1000 |
+
},
|
| 1001 |
+
{
|
| 1002 |
+
"epoch": 2.308203254732647,
|
| 1003 |
+
"grad_norm": 0.00031852992833592,
|
| 1004 |
+
"learning_rate": 2.2560060063817456e-05,
|
| 1005 |
+
"loss": 0.0065,
|
| 1006 |
+
"step": 6950
|
| 1007 |
+
},
|
| 1008 |
+
{
|
| 1009 |
+
"epoch": 2.3248090335436733,
|
| 1010 |
+
"grad_norm": 0.00043174950405955315,
|
| 1011 |
+
"learning_rate": 2.2420886343497792e-05,
|
| 1012 |
+
"loss": 0.0,
|
| 1013 |
+
"step": 7000
|
| 1014 |
+
},
|
| 1015 |
+
{
|
| 1016 |
+
"epoch": 2.3414148123546994,
|
| 1017 |
+
"grad_norm": 0.008213113993406296,
|
| 1018 |
+
"learning_rate": 2.2281712623178128e-05,
|
| 1019 |
+
"loss": 0.0164,
|
| 1020 |
+
"step": 7050
|
| 1021 |
+
},
|
| 1022 |
+
{
|
| 1023 |
+
"epoch": 2.3580205911657255,
|
| 1024 |
+
"grad_norm": 0.0064167445525527,
|
| 1025 |
+
"learning_rate": 2.2142538902858464e-05,
|
| 1026 |
+
"loss": 0.0035,
|
| 1027 |
+
"step": 7100
|
| 1028 |
+
},
|
| 1029 |
+
{
|
| 1030 |
+
"epoch": 2.374626369976752,
|
| 1031 |
+
"grad_norm": 0.00048106853500939906,
|
| 1032 |
+
"learning_rate": 2.2003365182538803e-05,
|
| 1033 |
+
"loss": 0.0002,
|
| 1034 |
+
"step": 7150
|
| 1035 |
+
},
|
| 1036 |
+
{
|
| 1037 |
+
"epoch": 2.391232148787778,
|
| 1038 |
+
"grad_norm": 0.00031906799995340407,
|
| 1039 |
+
"learning_rate": 2.186419146221914e-05,
|
| 1040 |
+
"loss": 0.0032,
|
| 1041 |
+
"step": 7200
|
| 1042 |
+
},
|
| 1043 |
+
{
|
| 1044 |
+
"epoch": 2.407837927598804,
|
| 1045 |
+
"grad_norm": 0.0002367593697272241,
|
| 1046 |
+
"learning_rate": 2.1725017741899475e-05,
|
| 1047 |
+
"loss": 0.0001,
|
| 1048 |
+
"step": 7250
|
| 1049 |
+
},
|
| 1050 |
+
{
|
| 1051 |
+
"epoch": 2.4244437064098308,
|
| 1052 |
+
"grad_norm": 0.0002154409303329885,
|
| 1053 |
+
"learning_rate": 2.158584402157981e-05,
|
| 1054 |
+
"loss": 0.0,
|
| 1055 |
+
"step": 7300
|
| 1056 |
+
},
|
| 1057 |
+
{
|
| 1058 |
+
"epoch": 2.441049485220857,
|
| 1059 |
+
"grad_norm": 0.004206398501992226,
|
| 1060 |
+
"learning_rate": 2.1446670301260146e-05,
|
| 1061 |
+
"loss": 0.0,
|
| 1062 |
+
"step": 7350
|
| 1063 |
+
},
|
| 1064 |
+
{
|
| 1065 |
+
"epoch": 2.457655264031883,
|
| 1066 |
+
"grad_norm": 0.00027124237385578454,
|
| 1067 |
+
"learning_rate": 2.1307496580940482e-05,
|
| 1068 |
+
"loss": 0.0,
|
| 1069 |
+
"step": 7400
|
| 1070 |
+
},
|
| 1071 |
+
{
|
| 1072 |
+
"epoch": 2.4742610428429095,
|
| 1073 |
+
"grad_norm": 0.00019591822638176382,
|
| 1074 |
+
"learning_rate": 2.1168322860620818e-05,
|
| 1075 |
+
"loss": 0.0,
|
| 1076 |
+
"step": 7450
|
| 1077 |
+
},
|
| 1078 |
+
{
|
| 1079 |
+
"epoch": 2.4908668216539356,
|
| 1080 |
+
"grad_norm": 0.004273345228284597,
|
| 1081 |
+
"learning_rate": 2.102914914030116e-05,
|
| 1082 |
+
"loss": 0.0069,
|
| 1083 |
+
"step": 7500
|
| 1084 |
+
},
|
| 1085 |
+
{
|
| 1086 |
+
"epoch": 2.5074726004649617,
|
| 1087 |
+
"grad_norm": 0.0011552508221939206,
|
| 1088 |
+
"learning_rate": 2.0889975419981497e-05,
|
| 1089 |
+
"loss": 0.0108,
|
| 1090 |
+
"step": 7550
|
| 1091 |
+
},
|
| 1092 |
+
{
|
| 1093 |
+
"epoch": 2.5240783792759878,
|
| 1094 |
+
"grad_norm": 0.0009868694469332695,
|
| 1095 |
+
"learning_rate": 2.0750801699661833e-05,
|
| 1096 |
+
"loss": 0.0023,
|
| 1097 |
+
"step": 7600
|
| 1098 |
+
},
|
| 1099 |
+
{
|
| 1100 |
+
"epoch": 2.5406841580870143,
|
| 1101 |
+
"grad_norm": 0.00020178337581455708,
|
| 1102 |
+
"learning_rate": 2.061162797934217e-05,
|
| 1103 |
+
"loss": 0.0131,
|
| 1104 |
+
"step": 7650
|
| 1105 |
+
},
|
| 1106 |
+
{
|
| 1107 |
+
"epoch": 2.5572899368980404,
|
| 1108 |
+
"grad_norm": 0.005387285258620977,
|
| 1109 |
+
"learning_rate": 2.0472454259022504e-05,
|
| 1110 |
+
"loss": 0.0,
|
| 1111 |
+
"step": 7700
|
| 1112 |
+
},
|
| 1113 |
+
{
|
| 1114 |
+
"epoch": 2.5738957157090665,
|
| 1115 |
+
"grad_norm": 5.118713670526631e-05,
|
| 1116 |
+
"learning_rate": 2.033328053870284e-05,
|
| 1117 |
+
"loss": 0.0001,
|
| 1118 |
+
"step": 7750
|
| 1119 |
+
},
|
| 1120 |
+
{
|
| 1121 |
+
"epoch": 2.590501494520093,
|
| 1122 |
+
"grad_norm": 0.00040173486922867596,
|
| 1123 |
+
"learning_rate": 2.0194106818383176e-05,
|
| 1124 |
+
"loss": 0.0001,
|
| 1125 |
+
"step": 7800
|
| 1126 |
+
},
|
| 1127 |
+
{
|
| 1128 |
+
"epoch": 2.607107273331119,
|
| 1129 |
+
"grad_norm": 6.548867531819269e-05,
|
| 1130 |
+
"learning_rate": 2.0054933098063512e-05,
|
| 1131 |
+
"loss": 0.0,
|
| 1132 |
+
"step": 7850
|
| 1133 |
+
},
|
| 1134 |
+
{
|
| 1135 |
+
"epoch": 2.6237130521421452,
|
| 1136 |
+
"grad_norm": 0.00011618030839599669,
|
| 1137 |
+
"learning_rate": 1.991575937774385e-05,
|
| 1138 |
+
"loss": 0.0,
|
| 1139 |
+
"step": 7900
|
| 1140 |
+
},
|
| 1141 |
+
{
|
| 1142 |
+
"epoch": 2.6403188309531718,
|
| 1143 |
+
"grad_norm": 3.7753208744106814e-05,
|
| 1144 |
+
"learning_rate": 1.9776585657424187e-05,
|
| 1145 |
+
"loss": 0.0,
|
| 1146 |
+
"step": 7950
|
| 1147 |
+
},
|
| 1148 |
+
{
|
| 1149 |
+
"epoch": 2.656924609764198,
|
| 1150 |
+
"grad_norm": 0.00011960588017245755,
|
| 1151 |
+
"learning_rate": 1.9637411937104523e-05,
|
| 1152 |
+
"loss": 0.0,
|
| 1153 |
+
"step": 8000
|
| 1154 |
+
},
|
| 1155 |
+
{
|
| 1156 |
+
"epoch": 2.673530388575224,
|
| 1157 |
+
"grad_norm": 0.0005026832805015147,
|
| 1158 |
+
"learning_rate": 1.949823821678486e-05,
|
| 1159 |
+
"loss": 0.0,
|
| 1160 |
+
"step": 8050
|
| 1161 |
+
},
|
| 1162 |
+
{
|
| 1163 |
+
"epoch": 2.6901361673862505,
|
| 1164 |
+
"grad_norm": 5.5350832553813234e-05,
|
| 1165 |
+
"learning_rate": 1.9359064496465198e-05,
|
| 1166 |
+
"loss": 0.0,
|
| 1167 |
+
"step": 8100
|
| 1168 |
+
},
|
| 1169 |
+
{
|
| 1170 |
+
"epoch": 2.7067419461972766,
|
| 1171 |
+
"grad_norm": 0.00011889787128893659,
|
| 1172 |
+
"learning_rate": 1.9219890776145534e-05,
|
| 1173 |
+
"loss": 0.006,
|
| 1174 |
+
"step": 8150
|
| 1175 |
+
},
|
| 1176 |
+
{
|
| 1177 |
+
"epoch": 2.7233477250083027,
|
| 1178 |
+
"grad_norm": 0.001216597855091095,
|
| 1179 |
+
"learning_rate": 1.908071705582587e-05,
|
| 1180 |
+
"loss": 0.0,
|
| 1181 |
+
"step": 8200
|
| 1182 |
+
},
|
| 1183 |
+
{
|
| 1184 |
+
"epoch": 2.7399535038193292,
|
| 1185 |
+
"grad_norm": 0.000991505105048418,
|
| 1186 |
+
"learning_rate": 1.8941543335506206e-05,
|
| 1187 |
+
"loss": 0.028,
|
| 1188 |
+
"step": 8250
|
| 1189 |
+
},
|
| 1190 |
+
{
|
| 1191 |
+
"epoch": 2.7565592826303553,
|
| 1192 |
+
"grad_norm": 0.0008777762413956225,
|
| 1193 |
+
"learning_rate": 1.880236961518654e-05,
|
| 1194 |
+
"loss": 0.0017,
|
| 1195 |
+
"step": 8300
|
| 1196 |
+
},
|
| 1197 |
+
{
|
| 1198 |
+
"epoch": 2.7731650614413814,
|
| 1199 |
+
"grad_norm": 0.0007498575723730028,
|
| 1200 |
+
"learning_rate": 1.8663195894866877e-05,
|
| 1201 |
+
"loss": 0.0001,
|
| 1202 |
+
"step": 8350
|
| 1203 |
+
},
|
| 1204 |
+
{
|
| 1205 |
+
"epoch": 2.789770840252408,
|
| 1206 |
+
"grad_norm": 0.0012167900567874312,
|
| 1207 |
+
"learning_rate": 1.8524022174547213e-05,
|
| 1208 |
+
"loss": 0.0,
|
| 1209 |
+
"step": 8400
|
| 1210 |
+
},
|
| 1211 |
+
{
|
| 1212 |
+
"epoch": 2.806376619063434,
|
| 1213 |
+
"grad_norm": 0.007960589602589607,
|
| 1214 |
+
"learning_rate": 1.8384848454227552e-05,
|
| 1215 |
+
"loss": 0.0,
|
| 1216 |
+
"step": 8450
|
| 1217 |
+
},
|
| 1218 |
+
{
|
| 1219 |
+
"epoch": 2.82298239787446,
|
| 1220 |
+
"grad_norm": 0.00019711998174898326,
|
| 1221 |
+
"learning_rate": 1.8245674733907888e-05,
|
| 1222 |
+
"loss": 0.0,
|
| 1223 |
+
"step": 8500
|
| 1224 |
+
},
|
| 1225 |
+
{
|
| 1226 |
+
"epoch": 2.8395881766854867,
|
| 1227 |
+
"grad_norm": 0.009689416736364365,
|
| 1228 |
+
"learning_rate": 1.8106501013588224e-05,
|
| 1229 |
+
"loss": 0.012,
|
| 1230 |
+
"step": 8550
|
| 1231 |
+
},
|
| 1232 |
+
{
|
| 1233 |
+
"epoch": 2.856193955496513,
|
| 1234 |
+
"grad_norm": 0.0010081271175295115,
|
| 1235 |
+
"learning_rate": 1.796732729326856e-05,
|
| 1236 |
+
"loss": 0.0001,
|
| 1237 |
+
"step": 8600
|
| 1238 |
+
},
|
| 1239 |
+
{
|
| 1240 |
+
"epoch": 2.872799734307539,
|
| 1241 |
+
"grad_norm": 0.0037073129788041115,
|
| 1242 |
+
"learning_rate": 1.7828153572948896e-05,
|
| 1243 |
+
"loss": 0.0037,
|
| 1244 |
+
"step": 8650
|
| 1245 |
+
},
|
| 1246 |
+
{
|
| 1247 |
+
"epoch": 2.8894055131185654,
|
| 1248 |
+
"grad_norm": 0.000657514261547476,
|
| 1249 |
+
"learning_rate": 1.7688979852629232e-05,
|
| 1250 |
+
"loss": 0.0005,
|
| 1251 |
+
"step": 8700
|
| 1252 |
+
},
|
| 1253 |
+
{
|
| 1254 |
+
"epoch": 2.9060112919295915,
|
| 1255 |
+
"grad_norm": 0.0041756597347557545,
|
| 1256 |
+
"learning_rate": 1.7549806132309568e-05,
|
| 1257 |
+
"loss": 0.0001,
|
| 1258 |
+
"step": 8750
|
| 1259 |
+
},
|
| 1260 |
+
{
|
| 1261 |
+
"epoch": 2.9226170707406176,
|
| 1262 |
+
"grad_norm": 0.0014196323463693261,
|
| 1263 |
+
"learning_rate": 1.7410632411989907e-05,
|
| 1264 |
+
"loss": 0.0051,
|
| 1265 |
+
"step": 8800
|
| 1266 |
+
},
|
| 1267 |
+
{
|
| 1268 |
+
"epoch": 2.939222849551644,
|
| 1269 |
+
"grad_norm": 0.001023727236315608,
|
| 1270 |
+
"learning_rate": 1.7271458691670243e-05,
|
| 1271 |
+
"loss": 0.0112,
|
| 1272 |
+
"step": 8850
|
| 1273 |
+
},
|
| 1274 |
+
{
|
| 1275 |
+
"epoch": 2.9558286283626702,
|
| 1276 |
+
"grad_norm": 0.0005508707836270332,
|
| 1277 |
+
"learning_rate": 1.713228497135058e-05,
|
| 1278 |
+
"loss": 0.0001,
|
| 1279 |
+
"step": 8900
|
| 1280 |
+
},
|
| 1281 |
+
{
|
| 1282 |
+
"epoch": 2.9724344071736963,
|
| 1283 |
+
"grad_norm": 0.0005536659737117589,
|
| 1284 |
+
"learning_rate": 1.6993111251030918e-05,
|
| 1285 |
+
"loss": 0.0,
|
| 1286 |
+
"step": 8950
|
| 1287 |
+
},
|
| 1288 |
+
{
|
| 1289 |
+
"epoch": 2.989040185984723,
|
| 1290 |
+
"grad_norm": 0.002692180685698986,
|
| 1291 |
+
"learning_rate": 1.6853937530711254e-05,
|
| 1292 |
+
"loss": 0.0018,
|
| 1293 |
+
"step": 9000
|
| 1294 |
+
},
|
| 1295 |
+
{
|
| 1296 |
+
"epoch": 3.0,
|
| 1297 |
+
"eval_accuracy": 0.9977337786259542,
|
| 1298 |
+
"eval_f1": 0.997711567093862,
|
| 1299 |
+
"eval_loss": 0.017316868528723717,
|
| 1300 |
+
"eval_precision": 0.9977219427584941,
|
| 1301 |
+
"eval_recall": 0.9977337786259542,
|
| 1302 |
+
"eval_runtime": 67.8318,
|
| 1303 |
+
"eval_samples_per_second": 123.6,
|
| 1304 |
+
"eval_steps_per_second": 7.725,
|
| 1305 |
+
"step": 9033
|
| 1306 |
+
}
|
| 1307 |
+
],
|
| 1308 |
+
"logging_steps": 50,
|
| 1309 |
+
"max_steps": 15055,
|
| 1310 |
+
"num_input_tokens_seen": 0,
|
| 1311 |
+
"num_train_epochs": 5,
|
| 1312 |
+
"save_steps": 500,
|
| 1313 |
+
"stateful_callbacks": {
|
| 1314 |
+
"TrainerControl": {
|
| 1315 |
+
"args": {
|
| 1316 |
+
"should_epoch_stop": false,
|
| 1317 |
+
"should_evaluate": false,
|
| 1318 |
+
"should_log": false,
|
| 1319 |
+
"should_save": true,
|
| 1320 |
+
"should_training_stop": false
|
| 1321 |
+
},
|
| 1322 |
+
"attributes": {}
|
| 1323 |
+
}
|
| 1324 |
+
},
|
| 1325 |
+
"total_flos": 1.5282741915076608e+17,
|
| 1326 |
+
"train_batch_size": 16,
|
| 1327 |
+
"trial_name": null,
|
| 1328 |
+
"trial_params": null
|
| 1329 |
+
}
|
trial-2/checkpoint-9033/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:31b16aa5b08a9764ff6ccb4b3d56d2656dee1972624cbbcf01b67cb965ad3e06
|
| 3 |
+
size 5368
|
trial-3/checkpoint-1506/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-3/checkpoint-1506/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2633ff2f035355d1f491e943541f26f2b553f8bb1f21a963431b246a1dac02a3
|
| 3 |
+
size 1583351632
|
trial-3/checkpoint-1506/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8dbd147ad035350b88e1d53f3cf901e1b9d28549304b83bb06a4cc02825d41cf
|
| 3 |
+
size 3166813178
|
trial-3/checkpoint-1506/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:568428d80a25211a390c359ca51b0b20b38ca0607fbc196f106c9841c02d3e59
|
| 3 |
+
size 14244
|
trial-3/checkpoint-1506/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:baefd0e97bcced8a792772527e556de3be3ee540a7e8c67d6914b57db907793c
|
| 3 |
+
size 1064
|
trial-3/checkpoint-1506/trainer_state.json
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.029849544167518616,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-3/checkpoint-1506",
|
| 4 |
+
"epoch": 1.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 1506,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.033200531208499334,
|
| 13 |
+
"grad_norm": 23.8559627532959,
|
| 14 |
+
"learning_rate": 4.534517164997864e-06,
|
| 15 |
+
"loss": 0.4406,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.06640106241699867,
|
| 20 |
+
"grad_norm": 6.17399787902832,
|
| 21 |
+
"learning_rate": 4.51562019398187e-06,
|
| 22 |
+
"loss": 0.1819,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.099601593625498,
|
| 27 |
+
"grad_norm": 0.49522921442985535,
|
| 28 |
+
"learning_rate": 4.496723222965876e-06,
|
| 29 |
+
"loss": 0.107,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.13280212483399734,
|
| 34 |
+
"grad_norm": 7.754148960113525,
|
| 35 |
+
"learning_rate": 4.477826251949882e-06,
|
| 36 |
+
"loss": 0.0688,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.16600265604249667,
|
| 41 |
+
"grad_norm": 0.5113905668258667,
|
| 42 |
+
"learning_rate": 4.458929280933889e-06,
|
| 43 |
+
"loss": 0.0495,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.199203187250996,
|
| 48 |
+
"grad_norm": 4.4989800453186035,
|
| 49 |
+
"learning_rate": 4.440032309917895e-06,
|
| 50 |
+
"loss": 0.0415,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.23240371845949534,
|
| 55 |
+
"grad_norm": 9.69454288482666,
|
| 56 |
+
"learning_rate": 4.421135338901901e-06,
|
| 57 |
+
"loss": 0.0663,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.2656042496679947,
|
| 62 |
+
"grad_norm": 0.023875955492258072,
|
| 63 |
+
"learning_rate": 4.4022383678859074e-06,
|
| 64 |
+
"loss": 0.0373,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.29880478087649404,
|
| 69 |
+
"grad_norm": 0.09503920376300812,
|
| 70 |
+
"learning_rate": 4.383341396869914e-06,
|
| 71 |
+
"loss": 0.0444,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.33200531208499334,
|
| 76 |
+
"grad_norm": 0.008267635479569435,
|
| 77 |
+
"learning_rate": 4.36444442585392e-06,
|
| 78 |
+
"loss": 0.0286,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.3652058432934927,
|
| 83 |
+
"grad_norm": 0.12851744890213013,
|
| 84 |
+
"learning_rate": 4.345547454837926e-06,
|
| 85 |
+
"loss": 0.0204,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.398406374501992,
|
| 90 |
+
"grad_norm": 2.524458646774292,
|
| 91 |
+
"learning_rate": 4.3266504838219325e-06,
|
| 92 |
+
"loss": 0.0095,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.4316069057104914,
|
| 97 |
+
"grad_norm": 1.337737798690796,
|
| 98 |
+
"learning_rate": 4.307753512805939e-06,
|
| 99 |
+
"loss": 0.0249,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.4648074369189907,
|
| 104 |
+
"grad_norm": 0.08153943717479706,
|
| 105 |
+
"learning_rate": 4.288856541789945e-06,
|
| 106 |
+
"loss": 0.0092,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.49800796812749004,
|
| 111 |
+
"grad_norm": 0.018035605549812317,
|
| 112 |
+
"learning_rate": 4.269959570773951e-06,
|
| 113 |
+
"loss": 0.0132,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.5312084993359893,
|
| 118 |
+
"grad_norm": 0.22391293942928314,
|
| 119 |
+
"learning_rate": 4.251062599757957e-06,
|
| 120 |
+
"loss": 0.0103,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.5644090305444888,
|
| 125 |
+
"grad_norm": 7.64361047744751,
|
| 126 |
+
"learning_rate": 4.232165628741963e-06,
|
| 127 |
+
"loss": 0.0099,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.5976095617529881,
|
| 132 |
+
"grad_norm": 0.0029439961072057486,
|
| 133 |
+
"learning_rate": 4.213268657725969e-06,
|
| 134 |
+
"loss": 0.0099,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.6308100929614874,
|
| 139 |
+
"grad_norm": 0.02843591570854187,
|
| 140 |
+
"learning_rate": 4.194371686709975e-06,
|
| 141 |
+
"loss": 0.0051,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.6640106241699867,
|
| 146 |
+
"grad_norm": 0.21529018878936768,
|
| 147 |
+
"learning_rate": 4.175474715693982e-06,
|
| 148 |
+
"loss": 0.0165,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.6972111553784861,
|
| 153 |
+
"grad_norm": 0.0392189547419548,
|
| 154 |
+
"learning_rate": 4.156577744677988e-06,
|
| 155 |
+
"loss": 0.011,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.7304116865869854,
|
| 160 |
+
"grad_norm": 0.006516862660646439,
|
| 161 |
+
"learning_rate": 4.137680773661994e-06,
|
| 162 |
+
"loss": 0.002,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.7636122177954847,
|
| 167 |
+
"grad_norm": 0.0022248616442084312,
|
| 168 |
+
"learning_rate": 4.1187838026460004e-06,
|
| 169 |
+
"loss": 0.0117,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.796812749003984,
|
| 174 |
+
"grad_norm": 0.014311583712697029,
|
| 175 |
+
"learning_rate": 4.099886831630007e-06,
|
| 176 |
+
"loss": 0.0115,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.8300132802124834,
|
| 181 |
+
"grad_norm": 0.006024663802236319,
|
| 182 |
+
"learning_rate": 4.080989860614013e-06,
|
| 183 |
+
"loss": 0.0004,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.8632138114209827,
|
| 188 |
+
"grad_norm": 0.005676358472555876,
|
| 189 |
+
"learning_rate": 4.062092889598019e-06,
|
| 190 |
+
"loss": 0.0067,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.896414342629482,
|
| 195 |
+
"grad_norm": 0.0005013855989091098,
|
| 196 |
+
"learning_rate": 4.0431959185820255e-06,
|
| 197 |
+
"loss": 0.0008,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.9296148738379814,
|
| 202 |
+
"grad_norm": 0.0016384737100452185,
|
| 203 |
+
"learning_rate": 4.024298947566032e-06,
|
| 204 |
+
"loss": 0.001,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.9628154050464808,
|
| 209 |
+
"grad_norm": 3.2831873893737793,
|
| 210 |
+
"learning_rate": 4.005401976550038e-06,
|
| 211 |
+
"loss": 0.0015,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.9960159362549801,
|
| 216 |
+
"grad_norm": 0.036648038774728775,
|
| 217 |
+
"learning_rate": 3.986505005534044e-06,
|
| 218 |
+
"loss": 0.0061,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 1.0,
|
| 223 |
+
"eval_accuracy": 0.9970181297709924,
|
| 224 |
+
"eval_f1": 0.9969934810410721,
|
| 225 |
+
"eval_loss": 0.029849544167518616,
|
| 226 |
+
"eval_precision": 0.9969918072979542,
|
| 227 |
+
"eval_recall": 0.9970181297709924,
|
| 228 |
+
"eval_runtime": 59.3241,
|
| 229 |
+
"eval_samples_per_second": 141.325,
|
| 230 |
+
"eval_steps_per_second": 4.416,
|
| 231 |
+
"step": 1506
|
| 232 |
+
}
|
| 233 |
+
],
|
| 234 |
+
"logging_steps": 50,
|
| 235 |
+
"max_steps": 12048,
|
| 236 |
+
"num_input_tokens_seen": 0,
|
| 237 |
+
"num_train_epochs": 8,
|
| 238 |
+
"save_steps": 500,
|
| 239 |
+
"stateful_callbacks": {
|
| 240 |
+
"TrainerControl": {
|
| 241 |
+
"args": {
|
| 242 |
+
"should_epoch_stop": false,
|
| 243 |
+
"should_evaluate": false,
|
| 244 |
+
"should_log": false,
|
| 245 |
+
"should_save": true,
|
| 246 |
+
"should_training_stop": false
|
| 247 |
+
},
|
| 248 |
+
"attributes": {}
|
| 249 |
+
}
|
| 250 |
+
},
|
| 251 |
+
"total_flos": 5.094247305025536e+16,
|
| 252 |
+
"train_batch_size": 32,
|
| 253 |
+
"trial_name": null,
|
| 254 |
+
"trial_params": null
|
| 255 |
+
}
|
trial-3/checkpoint-1506/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:862f96821c71b38bd72c3c10e43adbfe554490b2332bd838409265267f16997e
|
| 3 |
+
size 5368
|
trial-4/checkpoint-3012/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-4/checkpoint-3012/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:84a78c605436363f9db346ac678e170cff4e009ca2331def7f65ff704e3349e4
|
| 3 |
+
size 1583351632
|
trial-4/checkpoint-3012/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4385bc937b3ea0adfa5c713df04eb100e89eb731c8f09a620e7ed3c4d32d4df3
|
| 3 |
+
size 3166813178
|
trial-4/checkpoint-3012/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:914f37830aa379563c31bd15a8b8f53b8ccc8e2de0f0aa6da9695369e4ad84ef
|
| 3 |
+
size 14244
|
trial-4/checkpoint-3012/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:159228974380d6d8a61d5fb2da4e5cc76ffc86287d9ca0582ad4c46cb816cc56
|
| 3 |
+
size 1064
|
trial-4/checkpoint-3012/trainer_state.json
ADDED
|
@@ -0,0 +1,477 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.019555753096938133,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-4/checkpoint-3012",
|
| 4 |
+
"epoch": 2.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 3012,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.033200531208499334,
|
| 13 |
+
"grad_norm": 111.04224395751953,
|
| 14 |
+
"learning_rate": 1.1277048866432555e-05,
|
| 15 |
+
"loss": 0.3116,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.06640106241699867,
|
| 20 |
+
"grad_norm": 0.14363102614879608,
|
| 21 |
+
"learning_rate": 1.123948374695743e-05,
|
| 22 |
+
"loss": 0.098,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.099601593625498,
|
| 27 |
+
"grad_norm": 0.10250398516654968,
|
| 28 |
+
"learning_rate": 1.1201918627482305e-05,
|
| 29 |
+
"loss": 0.0729,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.13280212483399734,
|
| 34 |
+
"grad_norm": 1.9557462930679321,
|
| 35 |
+
"learning_rate": 1.116435350800718e-05,
|
| 36 |
+
"loss": 0.0537,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.16600265604249667,
|
| 41 |
+
"grad_norm": 0.5240038633346558,
|
| 42 |
+
"learning_rate": 1.1126788388532055e-05,
|
| 43 |
+
"loss": 0.0442,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.199203187250996,
|
| 48 |
+
"grad_norm": 2.139970541000366,
|
| 49 |
+
"learning_rate": 1.1089223269056931e-05,
|
| 50 |
+
"loss": 0.036,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.23240371845949534,
|
| 55 |
+
"grad_norm": 0.04191768541932106,
|
| 56 |
+
"learning_rate": 1.1051658149581805e-05,
|
| 57 |
+
"loss": 0.0355,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.2656042496679947,
|
| 62 |
+
"grad_norm": 0.0023582959547638893,
|
| 63 |
+
"learning_rate": 1.1014093030106681e-05,
|
| 64 |
+
"loss": 0.0368,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.29880478087649404,
|
| 69 |
+
"grad_norm": 12.193012237548828,
|
| 70 |
+
"learning_rate": 1.0976527910631555e-05,
|
| 71 |
+
"loss": 0.0574,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.33200531208499334,
|
| 76 |
+
"grad_norm": 0.00729788513854146,
|
| 77 |
+
"learning_rate": 1.0938962791156431e-05,
|
| 78 |
+
"loss": 0.0238,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.3652058432934927,
|
| 83 |
+
"grad_norm": 1.7422609329223633,
|
| 84 |
+
"learning_rate": 1.0901397671681305e-05,
|
| 85 |
+
"loss": 0.0213,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.398406374501992,
|
| 90 |
+
"grad_norm": 0.06725198775529861,
|
| 91 |
+
"learning_rate": 1.086383255220618e-05,
|
| 92 |
+
"loss": 0.013,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.4316069057104914,
|
| 97 |
+
"grad_norm": 0.21940571069717407,
|
| 98 |
+
"learning_rate": 1.0826267432731055e-05,
|
| 99 |
+
"loss": 0.025,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.4648074369189907,
|
| 104 |
+
"grad_norm": 0.032884348183870316,
|
| 105 |
+
"learning_rate": 1.078870231325593e-05,
|
| 106 |
+
"loss": 0.0138,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.49800796812749004,
|
| 111 |
+
"grad_norm": 0.0014803586527705193,
|
| 112 |
+
"learning_rate": 1.0751137193780805e-05,
|
| 113 |
+
"loss": 0.0058,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.5312084993359893,
|
| 118 |
+
"grad_norm": 10.339282989501953,
|
| 119 |
+
"learning_rate": 1.071357207430568e-05,
|
| 120 |
+
"loss": 0.009,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.5644090305444888,
|
| 125 |
+
"grad_norm": 1.531823754310608,
|
| 126 |
+
"learning_rate": 1.0676006954830555e-05,
|
| 127 |
+
"loss": 0.0115,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.5976095617529881,
|
| 132 |
+
"grad_norm": 0.00043045339407399297,
|
| 133 |
+
"learning_rate": 1.063844183535543e-05,
|
| 134 |
+
"loss": 0.0068,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.6308100929614874,
|
| 139 |
+
"grad_norm": 0.025898275896906853,
|
| 140 |
+
"learning_rate": 1.0600876715880303e-05,
|
| 141 |
+
"loss": 0.0098,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.6640106241699867,
|
| 146 |
+
"grad_norm": 0.0007346518104895949,
|
| 147 |
+
"learning_rate": 1.0563311596405178e-05,
|
| 148 |
+
"loss": 0.0094,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.6972111553784861,
|
| 153 |
+
"grad_norm": 0.005919306073337793,
|
| 154 |
+
"learning_rate": 1.0525746476930054e-05,
|
| 155 |
+
"loss": 0.0115,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.7304116865869854,
|
| 160 |
+
"grad_norm": 0.04206903651356697,
|
| 161 |
+
"learning_rate": 1.0488181357454928e-05,
|
| 162 |
+
"loss": 0.0061,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.7636122177954847,
|
| 167 |
+
"grad_norm": 0.000557853898499161,
|
| 168 |
+
"learning_rate": 1.0450616237979804e-05,
|
| 169 |
+
"loss": 0.0024,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.796812749003984,
|
| 174 |
+
"grad_norm": 0.0012018937850371003,
|
| 175 |
+
"learning_rate": 1.0413051118504678e-05,
|
| 176 |
+
"loss": 0.0058,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.8300132802124834,
|
| 181 |
+
"grad_norm": 0.0013845885405316949,
|
| 182 |
+
"learning_rate": 1.0375485999029554e-05,
|
| 183 |
+
"loss": 0.0022,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.8632138114209827,
|
| 188 |
+
"grad_norm": 0.07051751017570496,
|
| 189 |
+
"learning_rate": 1.0337920879554428e-05,
|
| 190 |
+
"loss": 0.0049,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.896414342629482,
|
| 195 |
+
"grad_norm": 0.00019932868599426,
|
| 196 |
+
"learning_rate": 1.0300355760079302e-05,
|
| 197 |
+
"loss": 0.0026,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.9296148738379814,
|
| 202 |
+
"grad_norm": 2.3489619707106613e-05,
|
| 203 |
+
"learning_rate": 1.0262790640604178e-05,
|
| 204 |
+
"loss": 0.0027,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.9628154050464808,
|
| 209 |
+
"grad_norm": 0.0037654500920325518,
|
| 210 |
+
"learning_rate": 1.0225225521129052e-05,
|
| 211 |
+
"loss": 0.0079,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.9960159362549801,
|
| 216 |
+
"grad_norm": 0.05777144059538841,
|
| 217 |
+
"learning_rate": 1.0187660401653928e-05,
|
| 218 |
+
"loss": 0.0154,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 1.0,
|
| 223 |
+
"eval_accuracy": 0.9967795801526718,
|
| 224 |
+
"eval_f1": 0.9967913721697382,
|
| 225 |
+
"eval_loss": 0.022535286843776703,
|
| 226 |
+
"eval_precision": 0.9968076807415951,
|
| 227 |
+
"eval_recall": 0.9967795801526718,
|
| 228 |
+
"eval_runtime": 59.7088,
|
| 229 |
+
"eval_samples_per_second": 140.415,
|
| 230 |
+
"eval_steps_per_second": 4.388,
|
| 231 |
+
"step": 1506
|
| 232 |
+
},
|
| 233 |
+
{
|
| 234 |
+
"epoch": 1.0292164674634794,
|
| 235 |
+
"grad_norm": 0.020927241072058678,
|
| 236 |
+
"learning_rate": 1.0150095282178802e-05,
|
| 237 |
+
"loss": 0.0068,
|
| 238 |
+
"step": 1550
|
| 239 |
+
},
|
| 240 |
+
{
|
| 241 |
+
"epoch": 1.0624169986719787,
|
| 242 |
+
"grad_norm": 0.00013940146891400218,
|
| 243 |
+
"learning_rate": 1.0112530162703678e-05,
|
| 244 |
+
"loss": 0.0023,
|
| 245 |
+
"step": 1600
|
| 246 |
+
},
|
| 247 |
+
{
|
| 248 |
+
"epoch": 1.095617529880478,
|
| 249 |
+
"grad_norm": 0.01088524330407381,
|
| 250 |
+
"learning_rate": 1.0074965043228552e-05,
|
| 251 |
+
"loss": 0.0062,
|
| 252 |
+
"step": 1650
|
| 253 |
+
},
|
| 254 |
+
{
|
| 255 |
+
"epoch": 1.1288180610889773,
|
| 256 |
+
"grad_norm": 0.058450598269701004,
|
| 257 |
+
"learning_rate": 1.0037399923753428e-05,
|
| 258 |
+
"loss": 0.0153,
|
| 259 |
+
"step": 1700
|
| 260 |
+
},
|
| 261 |
+
{
|
| 262 |
+
"epoch": 1.1620185922974768,
|
| 263 |
+
"grad_norm": 0.011701447889208794,
|
| 264 |
+
"learning_rate": 9.999834804278302e-06,
|
| 265 |
+
"loss": 0.0105,
|
| 266 |
+
"step": 1750
|
| 267 |
+
},
|
| 268 |
+
{
|
| 269 |
+
"epoch": 1.1952191235059761,
|
| 270 |
+
"grad_norm": 3.078742742538452,
|
| 271 |
+
"learning_rate": 9.962269684803178e-06,
|
| 272 |
+
"loss": 0.0076,
|
| 273 |
+
"step": 1800
|
| 274 |
+
},
|
| 275 |
+
{
|
| 276 |
+
"epoch": 1.2284196547144755,
|
| 277 |
+
"grad_norm": 0.02360646426677704,
|
| 278 |
+
"learning_rate": 9.924704565328052e-06,
|
| 279 |
+
"loss": 0.0025,
|
| 280 |
+
"step": 1850
|
| 281 |
+
},
|
| 282 |
+
{
|
| 283 |
+
"epoch": 1.2616201859229748,
|
| 284 |
+
"grad_norm": 0.0027641034685075283,
|
| 285 |
+
"learning_rate": 9.887139445852926e-06,
|
| 286 |
+
"loss": 0.0114,
|
| 287 |
+
"step": 1900
|
| 288 |
+
},
|
| 289 |
+
{
|
| 290 |
+
"epoch": 1.294820717131474,
|
| 291 |
+
"grad_norm": 0.00172056641895324,
|
| 292 |
+
"learning_rate": 9.849574326377802e-06,
|
| 293 |
+
"loss": 0.0003,
|
| 294 |
+
"step": 1950
|
| 295 |
+
},
|
| 296 |
+
{
|
| 297 |
+
"epoch": 1.3280212483399734,
|
| 298 |
+
"grad_norm": 0.07806120812892914,
|
| 299 |
+
"learning_rate": 9.812009206902676e-06,
|
| 300 |
+
"loss": 0.0008,
|
| 301 |
+
"step": 2000
|
| 302 |
+
},
|
| 303 |
+
{
|
| 304 |
+
"epoch": 1.361221779548473,
|
| 305 |
+
"grad_norm": 0.000884020933881402,
|
| 306 |
+
"learning_rate": 9.774444087427552e-06,
|
| 307 |
+
"loss": 0.0038,
|
| 308 |
+
"step": 2050
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"epoch": 1.3944223107569722,
|
| 312 |
+
"grad_norm": 0.1488543003797531,
|
| 313 |
+
"learning_rate": 9.736878967952426e-06,
|
| 314 |
+
"loss": 0.0106,
|
| 315 |
+
"step": 2100
|
| 316 |
+
},
|
| 317 |
+
{
|
| 318 |
+
"epoch": 1.4276228419654715,
|
| 319 |
+
"grad_norm": 0.037523552775382996,
|
| 320 |
+
"learning_rate": 9.699313848477302e-06,
|
| 321 |
+
"loss": 0.0007,
|
| 322 |
+
"step": 2150
|
| 323 |
+
},
|
| 324 |
+
{
|
| 325 |
+
"epoch": 1.4608233731739708,
|
| 326 |
+
"grad_norm": 0.00033480292768217623,
|
| 327 |
+
"learning_rate": 9.661748729002176e-06,
|
| 328 |
+
"loss": 0.0116,
|
| 329 |
+
"step": 2200
|
| 330 |
+
},
|
| 331 |
+
{
|
| 332 |
+
"epoch": 1.4940239043824701,
|
| 333 |
+
"grad_norm": 0.007270739413797855,
|
| 334 |
+
"learning_rate": 9.624183609527052e-06,
|
| 335 |
+
"loss": 0.0079,
|
| 336 |
+
"step": 2250
|
| 337 |
+
},
|
| 338 |
+
{
|
| 339 |
+
"epoch": 1.5272244355909694,
|
| 340 |
+
"grad_norm": 0.00751983979716897,
|
| 341 |
+
"learning_rate": 9.586618490051926e-06,
|
| 342 |
+
"loss": 0.0044,
|
| 343 |
+
"step": 2300
|
| 344 |
+
},
|
| 345 |
+
{
|
| 346 |
+
"epoch": 1.5604249667994687,
|
| 347 |
+
"grad_norm": 4.298997402191162,
|
| 348 |
+
"learning_rate": 9.549053370576802e-06,
|
| 349 |
+
"loss": 0.0014,
|
| 350 |
+
"step": 2350
|
| 351 |
+
},
|
| 352 |
+
{
|
| 353 |
+
"epoch": 1.593625498007968,
|
| 354 |
+
"grad_norm": 0.0014925749273970723,
|
| 355 |
+
"learning_rate": 9.511488251101676e-06,
|
| 356 |
+
"loss": 0.0001,
|
| 357 |
+
"step": 2400
|
| 358 |
+
},
|
| 359 |
+
{
|
| 360 |
+
"epoch": 1.6268260292164674,
|
| 361 |
+
"grad_norm": 0.003561707679182291,
|
| 362 |
+
"learning_rate": 9.47392313162655e-06,
|
| 363 |
+
"loss": 0.0077,
|
| 364 |
+
"step": 2450
|
| 365 |
+
},
|
| 366 |
+
{
|
| 367 |
+
"epoch": 1.6600265604249667,
|
| 368 |
+
"grad_norm": 0.0006838434492237866,
|
| 369 |
+
"learning_rate": 9.436358012151426e-06,
|
| 370 |
+
"loss": 0.0001,
|
| 371 |
+
"step": 2500
|
| 372 |
+
},
|
| 373 |
+
{
|
| 374 |
+
"epoch": 1.6932270916334662,
|
| 375 |
+
"grad_norm": 0.0002309294941369444,
|
| 376 |
+
"learning_rate": 9.3987928926763e-06,
|
| 377 |
+
"loss": 0.0029,
|
| 378 |
+
"step": 2550
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"epoch": 1.7264276228419655,
|
| 382 |
+
"grad_norm": 0.0011594091774895787,
|
| 383 |
+
"learning_rate": 9.361227773201176e-06,
|
| 384 |
+
"loss": 0.0107,
|
| 385 |
+
"step": 2600
|
| 386 |
+
},
|
| 387 |
+
{
|
| 388 |
+
"epoch": 1.7596281540504648,
|
| 389 |
+
"grad_norm": 0.00012066392082488164,
|
| 390 |
+
"learning_rate": 9.32366265372605e-06,
|
| 391 |
+
"loss": 0.0006,
|
| 392 |
+
"step": 2650
|
| 393 |
+
},
|
| 394 |
+
{
|
| 395 |
+
"epoch": 1.792828685258964,
|
| 396 |
+
"grad_norm": 0.0021935878321528435,
|
| 397 |
+
"learning_rate": 9.286097534250926e-06,
|
| 398 |
+
"loss": 0.0065,
|
| 399 |
+
"step": 2700
|
| 400 |
+
},
|
| 401 |
+
{
|
| 402 |
+
"epoch": 1.8260292164674636,
|
| 403 |
+
"grad_norm": 0.0002105861931340769,
|
| 404 |
+
"learning_rate": 9.2485324147758e-06,
|
| 405 |
+
"loss": 0.0032,
|
| 406 |
+
"step": 2750
|
| 407 |
+
},
|
| 408 |
+
{
|
| 409 |
+
"epoch": 1.859229747675963,
|
| 410 |
+
"grad_norm": 0.0009871097281575203,
|
| 411 |
+
"learning_rate": 9.210967295300676e-06,
|
| 412 |
+
"loss": 0.0,
|
| 413 |
+
"step": 2800
|
| 414 |
+
},
|
| 415 |
+
{
|
| 416 |
+
"epoch": 1.8924302788844622,
|
| 417 |
+
"grad_norm": 5.9001271438319236e-05,
|
| 418 |
+
"learning_rate": 9.173402175825549e-06,
|
| 419 |
+
"loss": 0.0077,
|
| 420 |
+
"step": 2850
|
| 421 |
+
},
|
| 422 |
+
{
|
| 423 |
+
"epoch": 1.9256308100929616,
|
| 424 |
+
"grad_norm": 0.001614038716070354,
|
| 425 |
+
"learning_rate": 9.135837056350425e-06,
|
| 426 |
+
"loss": 0.0059,
|
| 427 |
+
"step": 2900
|
| 428 |
+
},
|
| 429 |
+
{
|
| 430 |
+
"epoch": 1.9588313413014609,
|
| 431 |
+
"grad_norm": 0.001071999897249043,
|
| 432 |
+
"learning_rate": 9.098271936875299e-06,
|
| 433 |
+
"loss": 0.008,
|
| 434 |
+
"step": 2950
|
| 435 |
+
},
|
| 436 |
+
{
|
| 437 |
+
"epoch": 1.9920318725099602,
|
| 438 |
+
"grad_norm": 0.018942702561616898,
|
| 439 |
+
"learning_rate": 9.060706817400175e-06,
|
| 440 |
+
"loss": 0.0107,
|
| 441 |
+
"step": 3000
|
| 442 |
+
},
|
| 443 |
+
{
|
| 444 |
+
"epoch": 2.0,
|
| 445 |
+
"eval_accuracy": 0.997256679389313,
|
| 446 |
+
"eval_f1": 0.997238185991172,
|
| 447 |
+
"eval_loss": 0.019555753096938133,
|
| 448 |
+
"eval_precision": 0.997235028769195,
|
| 449 |
+
"eval_recall": 0.997256679389313,
|
| 450 |
+
"eval_runtime": 59.293,
|
| 451 |
+
"eval_samples_per_second": 141.4,
|
| 452 |
+
"eval_steps_per_second": 4.419,
|
| 453 |
+
"step": 3012
|
| 454 |
+
}
|
| 455 |
+
],
|
| 456 |
+
"logging_steps": 50,
|
| 457 |
+
"max_steps": 15060,
|
| 458 |
+
"num_input_tokens_seen": 0,
|
| 459 |
+
"num_train_epochs": 10,
|
| 460 |
+
"save_steps": 500,
|
| 461 |
+
"stateful_callbacks": {
|
| 462 |
+
"TrainerControl": {
|
| 463 |
+
"args": {
|
| 464 |
+
"should_epoch_stop": false,
|
| 465 |
+
"should_evaluate": false,
|
| 466 |
+
"should_log": false,
|
| 467 |
+
"should_save": true,
|
| 468 |
+
"should_training_stop": false
|
| 469 |
+
},
|
| 470 |
+
"attributes": {}
|
| 471 |
+
}
|
| 472 |
+
},
|
| 473 |
+
"total_flos": 1.0188494610051072e+17,
|
| 474 |
+
"train_batch_size": 32,
|
| 475 |
+
"trial_name": null,
|
| 476 |
+
"trial_params": null
|
| 477 |
+
}
|
trial-4/checkpoint-3012/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e6980b2d26960e0944dcf6a709651feb944395421a5df274ddff277d8343607e
|
| 3 |
+
size 5368
|
trial-5/checkpoint-1506/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-5/checkpoint-1506/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5f5ecd51b91e86db092fb98e5e0ed6bb61c710dc95a00fe365f829c3218537a1
|
| 3 |
+
size 1583351632
|
trial-5/checkpoint-1506/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d0309590d2cd916de4a2f04ddce5837c3c90bd59129970b6cb4f3ed8ac4e03b7
|
| 3 |
+
size 3166813178
|
trial-5/checkpoint-1506/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:568428d80a25211a390c359ca51b0b20b38ca0607fbc196f106c9841c02d3e59
|
| 3 |
+
size 14244
|
trial-5/checkpoint-1506/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c12ccce76e7b5dcd2d6e5c79dcffd36e8cc7c4912a60cfbc89196c37d5a03edc
|
| 3 |
+
size 1064
|
trial-5/checkpoint-1506/trainer_state.json
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.022090721875429153,
|
| 3 |
+
"best_model_checkpoint": "./results/answerdotai/ModernBERT-large/trial-5/checkpoint-1506",
|
| 4 |
+
"epoch": 1.0,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 1506,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.033200531208499334,
|
| 13 |
+
"grad_norm": 21.02681541442871,
|
| 14 |
+
"learning_rate": 1.1549761256591334e-05,
|
| 15 |
+
"loss": 0.3094,
|
| 16 |
+
"step": 50
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.06640106241699867,
|
| 20 |
+
"grad_norm": 14.655999183654785,
|
| 21 |
+
"learning_rate": 1.1494720457617871e-05,
|
| 22 |
+
"loss": 0.0991,
|
| 23 |
+
"step": 100
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.099601593625498,
|
| 27 |
+
"grad_norm": 0.3093714416027069,
|
| 28 |
+
"learning_rate": 1.143967965864441e-05,
|
| 29 |
+
"loss": 0.045,
|
| 30 |
+
"step": 150
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.13280212483399734,
|
| 34 |
+
"grad_norm": 0.19615088403224945,
|
| 35 |
+
"learning_rate": 1.1384638859670947e-05,
|
| 36 |
+
"loss": 0.0505,
|
| 37 |
+
"step": 200
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.16600265604249667,
|
| 41 |
+
"grad_norm": 1.4286335706710815,
|
| 42 |
+
"learning_rate": 1.1329598060697483e-05,
|
| 43 |
+
"loss": 0.0281,
|
| 44 |
+
"step": 250
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.199203187250996,
|
| 48 |
+
"grad_norm": 2.1918282508850098,
|
| 49 |
+
"learning_rate": 1.1274557261724022e-05,
|
| 50 |
+
"loss": 0.0536,
|
| 51 |
+
"step": 300
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.23240371845949534,
|
| 55 |
+
"grad_norm": 2.13693904876709,
|
| 56 |
+
"learning_rate": 1.1219516462750559e-05,
|
| 57 |
+
"loss": 0.0385,
|
| 58 |
+
"step": 350
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.2656042496679947,
|
| 62 |
+
"grad_norm": 0.029252415522933006,
|
| 63 |
+
"learning_rate": 1.1164475663777096e-05,
|
| 64 |
+
"loss": 0.0326,
|
| 65 |
+
"step": 400
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.29880478087649404,
|
| 69 |
+
"grad_norm": 0.5969660878181458,
|
| 70 |
+
"learning_rate": 1.1109434864803635e-05,
|
| 71 |
+
"loss": 0.0547,
|
| 72 |
+
"step": 450
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.33200531208499334,
|
| 76 |
+
"grad_norm": 0.00738520547747612,
|
| 77 |
+
"learning_rate": 1.1054394065830171e-05,
|
| 78 |
+
"loss": 0.0115,
|
| 79 |
+
"step": 500
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.3652058432934927,
|
| 83 |
+
"grad_norm": 1.9720779657363892,
|
| 84 |
+
"learning_rate": 1.099935326685671e-05,
|
| 85 |
+
"loss": 0.0239,
|
| 86 |
+
"step": 550
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.398406374501992,
|
| 90 |
+
"grad_norm": 0.6309007406234741,
|
| 91 |
+
"learning_rate": 1.0944312467883247e-05,
|
| 92 |
+
"loss": 0.0043,
|
| 93 |
+
"step": 600
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.4316069057104914,
|
| 97 |
+
"grad_norm": 0.011916632764041424,
|
| 98 |
+
"learning_rate": 1.0889271668909786e-05,
|
| 99 |
+
"loss": 0.0164,
|
| 100 |
+
"step": 650
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.4648074369189907,
|
| 104 |
+
"grad_norm": 0.009058245457708836,
|
| 105 |
+
"learning_rate": 1.0834230869936323e-05,
|
| 106 |
+
"loss": 0.0103,
|
| 107 |
+
"step": 700
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.49800796812749004,
|
| 111 |
+
"grad_norm": 0.0032912548631429672,
|
| 112 |
+
"learning_rate": 1.0779190070962861e-05,
|
| 113 |
+
"loss": 0.028,
|
| 114 |
+
"step": 750
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.5312084993359893,
|
| 118 |
+
"grad_norm": 0.011074424721300602,
|
| 119 |
+
"learning_rate": 1.0724149271989398e-05,
|
| 120 |
+
"loss": 0.0104,
|
| 121 |
+
"step": 800
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.5644090305444888,
|
| 125 |
+
"grad_norm": 0.951624870300293,
|
| 126 |
+
"learning_rate": 1.0669108473015937e-05,
|
| 127 |
+
"loss": 0.0114,
|
| 128 |
+
"step": 850
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.5976095617529881,
|
| 132 |
+
"grad_norm": 0.0027369000017642975,
|
| 133 |
+
"learning_rate": 1.0614067674042474e-05,
|
| 134 |
+
"loss": 0.016,
|
| 135 |
+
"step": 900
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.6308100929614874,
|
| 139 |
+
"grad_norm": 0.012001908384263515,
|
| 140 |
+
"learning_rate": 1.055902687506901e-05,
|
| 141 |
+
"loss": 0.0094,
|
| 142 |
+
"step": 950
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.6640106241699867,
|
| 146 |
+
"grad_norm": 0.69849693775177,
|
| 147 |
+
"learning_rate": 1.050398607609555e-05,
|
| 148 |
+
"loss": 0.0199,
|
| 149 |
+
"step": 1000
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.6972111553784861,
|
| 153 |
+
"grad_norm": 0.036301348358392715,
|
| 154 |
+
"learning_rate": 1.0448945277122086e-05,
|
| 155 |
+
"loss": 0.0077,
|
| 156 |
+
"step": 1050
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.7304116865869854,
|
| 160 |
+
"grad_norm": 0.008320258930325508,
|
| 161 |
+
"learning_rate": 1.0393904478148625e-05,
|
| 162 |
+
"loss": 0.0043,
|
| 163 |
+
"step": 1100
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.7636122177954847,
|
| 167 |
+
"grad_norm": 0.0027414376381784678,
|
| 168 |
+
"learning_rate": 1.0338863679175162e-05,
|
| 169 |
+
"loss": 0.004,
|
| 170 |
+
"step": 1150
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.796812749003984,
|
| 174 |
+
"grad_norm": 0.0007768127834424376,
|
| 175 |
+
"learning_rate": 1.02838228802017e-05,
|
| 176 |
+
"loss": 0.0072,
|
| 177 |
+
"step": 1200
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.8300132802124834,
|
| 181 |
+
"grad_norm": 0.0004548605065792799,
|
| 182 |
+
"learning_rate": 1.0228782081228237e-05,
|
| 183 |
+
"loss": 0.0051,
|
| 184 |
+
"step": 1250
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.8632138114209827,
|
| 188 |
+
"grad_norm": 0.007403654046356678,
|
| 189 |
+
"learning_rate": 1.0173741282254776e-05,
|
| 190 |
+
"loss": 0.0112,
|
| 191 |
+
"step": 1300
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.896414342629482,
|
| 195 |
+
"grad_norm": 0.0017905730055645108,
|
| 196 |
+
"learning_rate": 1.0118700483281313e-05,
|
| 197 |
+
"loss": 0.0069,
|
| 198 |
+
"step": 1350
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.9296148738379814,
|
| 202 |
+
"grad_norm": 0.0004039919876959175,
|
| 203 |
+
"learning_rate": 1.0063659684307851e-05,
|
| 204 |
+
"loss": 0.0008,
|
| 205 |
+
"step": 1400
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.9628154050464808,
|
| 209 |
+
"grad_norm": 0.000610634742770344,
|
| 210 |
+
"learning_rate": 1.0008618885334388e-05,
|
| 211 |
+
"loss": 0.0011,
|
| 212 |
+
"step": 1450
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.9960159362549801,
|
| 216 |
+
"grad_norm": 1.0261385440826416,
|
| 217 |
+
"learning_rate": 9.953578086360925e-06,
|
| 218 |
+
"loss": 0.0207,
|
| 219 |
+
"step": 1500
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 1.0,
|
| 223 |
+
"eval_accuracy": 0.9971374045801527,
|
| 224 |
+
"eval_f1": 0.9971246021172431,
|
| 225 |
+
"eval_loss": 0.022090721875429153,
|
| 226 |
+
"eval_precision": 0.9971185448871154,
|
| 227 |
+
"eval_recall": 0.9971374045801527,
|
| 228 |
+
"eval_runtime": 59.3469,
|
| 229 |
+
"eval_samples_per_second": 141.271,
|
| 230 |
+
"eval_steps_per_second": 4.415,
|
| 231 |
+
"step": 1506
|
| 232 |
+
}
|
| 233 |
+
],
|
| 234 |
+
"logging_steps": 50,
|
| 235 |
+
"max_steps": 10542,
|
| 236 |
+
"num_input_tokens_seen": 0,
|
| 237 |
+
"num_train_epochs": 7,
|
| 238 |
+
"save_steps": 500,
|
| 239 |
+
"stateful_callbacks": {
|
| 240 |
+
"TrainerControl": {
|
| 241 |
+
"args": {
|
| 242 |
+
"should_epoch_stop": false,
|
| 243 |
+
"should_evaluate": false,
|
| 244 |
+
"should_log": false,
|
| 245 |
+
"should_save": true,
|
| 246 |
+
"should_training_stop": false
|
| 247 |
+
},
|
| 248 |
+
"attributes": {}
|
| 249 |
+
}
|
| 250 |
+
},
|
| 251 |
+
"total_flos": 5.094247305025536e+16,
|
| 252 |
+
"train_batch_size": 32,
|
| 253 |
+
"trial_name": null,
|
| 254 |
+
"trial_params": null
|
| 255 |
+
}
|
trial-5/checkpoint-1506/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:57df86703a932a26c855abebf91502e9da3c5daba164538c1fa2ecde95e9c014
|
| 3 |
+
size 5368
|
trial-6/checkpoint-1506/config.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "answerdotai/ModernBERT-large",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ModernBertForSequenceClassification"
|
| 5 |
+
],
|
| 6 |
+
"attention_bias": false,
|
| 7 |
+
"attention_dropout": 0.0,
|
| 8 |
+
"bos_token_id": 50281,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 1024,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
+
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 2624,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
+
"num_attention_heads": 16,
|
| 36 |
+
"num_hidden_layers": 28,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
+
"position_embedding_type": "absolute",
|
| 39 |
+
"problem_type": "single_label_classification",
|
| 40 |
+
"reference_compile": true,
|
| 41 |
+
"sep_token_id": 50282,
|
| 42 |
+
"sparse_pred_ignore_index": -100,
|
| 43 |
+
"sparse_prediction": false,
|
| 44 |
+
"torch_dtype": "float32",
|
| 45 |
+
"transformers_version": "4.48.0.dev0",
|
| 46 |
+
"vocab_size": 50368
|
| 47 |
+
}
|
trial-6/checkpoint-1506/model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3d8dd9c50dd83347fd2f3d076bda888d52bc1f28720f6537878e55817f57b843
|
| 3 |
+
size 1583351632
|
trial-6/checkpoint-1506/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ba4b588a87754fc94b6d70748348851ee39e0cd46fcba5bc4098f18c984a4eee
|
| 3 |
+
size 3166813178
|
trial-6/checkpoint-1506/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:568428d80a25211a390c359ca51b0b20b38ca0607fbc196f106c9841c02d3e59
|
| 3 |
+
size 14244
|