diff --git a/.gitattributes b/.gitattributes
index d5f5214f67810bec738bbb27fe8398ce6b91e8a1..962ad648a7fa1bdee55a78e9fc176023cf51bda7 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -37,3 +37,4 @@ tokenizer.json filter=lfs diff=lfs merge=lfs -text
checkpoint-231/tokenizer.json filter=lfs diff=lfs merge=lfs -text
checkpoint-462/tokenizer.json filter=lfs diff=lfs merge=lfs -text
checkpoint-693/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-924/tokenizer.json filter=lfs diff=lfs merge=lfs -text
diff --git a/checkpoint-924/1_AdvancedWeightedPooling/config.json b/checkpoint-924/1_AdvancedWeightedPooling/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..25ff91031ae4753e5f5692b851e62a5f1183d31f
--- /dev/null
+++ b/checkpoint-924/1_AdvancedWeightedPooling/config.json
@@ -0,0 +1,16 @@
+{
+ "embed_dim": 1024,
+ "num_heads": 4,
+ "dropout": 0.1,
+ "bias": true,
+ "use_layernorm": true,
+ "use_MLP": true,
+ "MLP_h_size": 2048,
+ "MLP_output_size": 2048,
+ "use_residual_MLP": "no",
+ "MLP_type": "swiglu_h+swiglu_d",
+ "h2_size": 2048,
+ "ignore_cls_as_kv": true,
+ "expand_emb_dim_to": 0,
+ "compress_output_dim_to": 0
+}
\ No newline at end of file
diff --git a/checkpoint-924/1_AdvancedWeightedPooling/pytorch_model.bin b/checkpoint-924/1_AdvancedWeightedPooling/pytorch_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..cc02d659ce2eb9dc750a079f3b7aecbcee2ce191
--- /dev/null
+++ b/checkpoint-924/1_AdvancedWeightedPooling/pytorch_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b7c7804224f238e0703165303043ec0dc5972aacff73b383d8a35fed881cdf2b
+size 83965726
diff --git a/checkpoint-924/README.md b/checkpoint-924/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e54c4763135f94899704e1e3ddc85a9251ff87c5
--- /dev/null
+++ b/checkpoint-924/README.md
@@ -0,0 +1,1881 @@
+---
+tags:
+- sentence-transformers
+- sentence-similarity
+- feature-extraction
+- generated_from_trainer
+- dataset_size:147810
+- loss:MatryoshkaLoss
+- loss:CachedGISTEmbedLoss
+base_model: BAAI/bge-m3
+widget:
+- source_sentence: if we could vibrate at the natural frequency of air could we "phase"
+ through objects?
+ sentences:
+ - No. There is literally nothing about that question that makes any scientific sense.
+ Even if air had a "natural frequency", it does not pass through solid objects,
+ so there would be no reason to think a person would either.
+ - 'The industry is worth about £80m a year to the economy, employs about 4,000
+ workers, with 105 million journeys taken in Wales each year.
+
+ Economy Secretary Ken Skates said 2016 had been a "mixed year" with the "demise"
+ of some service providers.
+
+ He told Wales'' first bus summit some communities had been left "vulnerable" and
+ longer-term solutions are needed.
+
+ Mr Skates added more young people should be encouraged to travel by bus, and services
+ must work for both rural and urban areas.
+
+ Between 2011 and 2014, nearly 100 subsidised bus routes were scrapped by local
+ councils in Wales.
+
+ Passengers and transport providers attended Monday''s summit, which was one of
+ five steps the Welsh Government recommended to strengthen the industry.
+
+ Bus coordinators have also been appointed in north and south Wales to develop
+ good practice.
+
+ Mr Skates said: "It''s a tough task but I''m positive we have the expertise and
+ the will in Wales to deliver it.
+
+ "I''m keen, amongst other things, to explore with others whether new legislation
+ can play a role in this.
+
+ "In the meantime, of course, we will continue to support local authorities and
+ communities wherever possible, whether it be through taking on new services like
+ the T1C between Aberystwyth and Cardiff or providing further financial support
+ to areas affected by loss of service providers."'
+ - It's called [ASMR](_URL_0_) and it is not entirely understood. It can be triggered
+ by a varsity of stimuli and each person is different, but it is a pretty good
+ sensation, isn't it?
+- source_sentence: Why do we get shakey?
+ sentences:
+ - 'It was the first time Prince Charles and his son Prince William have attended
+ the event at Edinburgh Castle.
+
+ The annual Tattoo is currently in its 68th season.
+
+ This year''s event recognises 2017 as being the Year of the Royal Navy and comes
+ ahead of the official naming of the aircraft carrier HMS Prince of Wales later
+ this year.
+
+ It is also marking Scotland''s Year of History, Heritage and Archaeology with
+ a second "Splash of Tartan" theme.
+
+ As well as attending the Tattoo, the royals viewed a short performance beforehand
+ on the forecourt at the Palace of Holyroodhouse.
+
+ Charles, known as the Duke of Rothesay in Scotland, and William, who takes the
+ title the Earl of Strathearn when north of the border, attended the Tattoo around
+ halfway through its August run.
+
+ This year''s performance involves a cast of more than 1,200 people from across
+ the globe.
+
+ The line-up includes more than 250 pipers and drummers, five UK military bands
+ and the event''s first Japanese act, as well as major contingents from France,
+ India and the United States.
+
+ The first Edinburgh Tattoo took place in 1950, with the first overseas regiment
+ taking part in 1952.
+
+ Since then, 48 countries from across six continents have been represented at the
+ Tattoo.
+
+ It attracts an annual audience of around 220,000, meaning that more than 14m people
+ have attended the Tattoo since it began.'
+ - Any particular triggers? I get something like that occasionally, but usually in
+ a situation like being in a very quiet house and trying not to wake people up.
+ - Electrolytes. Salt, magnesium, and the such... Like an engine will shudder on
+ Empty with no fuel.
+- source_sentence: How did man discover smoking things like tobacco and marijuana?
+ What made those plants special to give them the idea?
+ sentences:
+ - They probably just started out by throwing random things in the fire as fuel or
+ to create smoke (which is great for keeping bugs away). One day someone threw
+ on the leaves of one particular plant and it was **awesome** so they did it again.
+ - Tenth Doctor This incarnation's companions include working class shop assistant
+ Rose Tyler (Billie Piper), medical student Martha Jones (Freema Agyeman), and
+ fiery temp worker Donna Noble (Catherine Tate). He eventually parts ways with
+ them all by the end of the 2008 series finale, "Journey's End", after which he
+ attempts to travel alone for the duration of the 2008–2010 specials before being
+ accompanied by Donna Noble's grandfather Wilfred Mott on his final adventure in
+ The End of Time.
+ - The compound was patented by Dr. Patrick Page and his team , and was invented
+ in 2007 by Genkyotex .
+- source_sentence: when did hurricane maria occur in puerto rico
+ sentences:
+ - As of 21 March , more than 275,000 cases of COVID-19 have been reported in over
+ 185 countries and territories , resulting in more than 11,300 deaths and 91,000
+ recoveries .
+ - Hurricane Maria Originating from a tropical wave, Maria became a tropical storm
+ on September 16, east of the Lesser Antilles. Highly favorable environmental conditions
+ allowed the storm to undergo explosive intensification as it approached the island
+ arc. The hurricane reached Category 5 strength on September 18 just before making
+ landfall on Dominica, becoming the first Category 5 hurricane on record to strike
+ the island. After weakening slightly due to crossing Dominica, Maria achieved
+ its peak intensity over the eastern Caribbean with maximum sustained winds of
+ 175 mph (280 km/h) and a pressure of 908 mbar (hPa; 26.81 inHg), making it the
+ tenth-most intense Atlantic hurricane on record. On September 20, an eyewall replacement
+ cycle took place, weakening Maria to a high-end Category 4 hurricane by the time
+ it struck Puerto Rico. Interaction with land further weakened the hurricane, though
+ it regained some strength as it moved northeast of The Bahamas. Moving slowly
+ to the north, Maria gradually degraded and weakened to a tropical storm on September
+ 28. Embedded in the westerlies, Maria accelerated toward the east and later east-northeast
+ over the open Atlantic, becoming extratropical on September 30 and dissipating
+ by October 3.
+ - NPR's Ari Shapiro talks with reporter Jacqueline Charles of The Miami Herald
+ about the end of temporary protection for Haitians. The Trump administration decided
+ that Haitians must go back to their home country next year, adding that the conditions
+ after the 2010 earthquake no longer exist.
+- source_sentence: The Elder Scrolls series up until "Skyrim".
+ sentences:
+ - 'The government-appointed trust was set up after the services at Sandwell Council
+ were ranked "inadequate".
+
+ Mrs Smith will head the new Sandwell Children''s Social Care Trust which is being
+ asked to bring "rapid improvements".
+
+ She said she relished the opportunity of improving the lives of vulnerable children
+ and families in Sandwell.
+
+ Mrs Smith added: "My job is to help build that team by challenging us all to find
+ the best ways to improve and innovate - and by making the trust a great place
+ to work."
+
+ For more on this and other West Midlands news
+
+ Sandwell''s children services was rated inadequate in June 2015 after another
+ poor rating in 2013.
+
+ Ofsted inspectors in 2015 said the department did "not fully understand the scale
+ and prevalence of child sexual exploitation".
+
+ "There are widespread and serious failures that create or leave children being
+ harmed or at risk of harm," the report found:.
+
+ An Ofsted inspection last year found "positive progress" had been made.
+
+ The council said since the announcement in October 2016 that an independent trust
+ would run the services, it had worked with commissioner Malcolm Newsam and the
+ Department for Education.
+
+ Council Leader Steve Eling welcomed Mrs Smith''s appointment and her "wealth of
+ experience".
+
+ "With the setting up of the trust, we want to build on recent improvements with
+ a board that will focus on providing a clear vision, be innovative and strive
+ to deliver rapid and sustained improvements," he said.'
+ - 'The Unofficial Elder Scrolls Pages had some great stuff but it''s not really
+ for five year olds. [Here](_URL_2_) is the main lore page and [here](_URL_0_)
+ is the beginning of the history. Based on previous Elder Scrolls games you will
+ not need to have played the previous game to enjoy them and understand the story.
+ You might miss out on some small references or easter eggs put in for fans though.
+ EDIT: _URL_1_ will most likely put together a [historical references](_URL_3_)
+ page for Skyrim like it did for Oblivion.'
+ - Game of Thrones (season 7) The seventh season of the fantasy drama television
+ series Game of Thrones premiered on HBO on July 16, 2017, and concluded on August
+ 27, 2017.[1][2][3] Unlike previous seasons that consisted of ten episodes each,
+ the seventh season consisted of only seven.[4] Like the previous season, it largely
+ consisted of original content not found in George R. R. Martin's A Song of Ice
+ and Fire series, while also adapting material Martin revealed to showrunners about
+ the upcoming novels in the series.[5] The series was adapted for television by
+ David Benioff and D. B. Weiss.
+pipeline_tag: sentence-similarity
+library_name: sentence-transformers
+metrics:
+- pearson_cosine
+- spearman_cosine
+- cosine_accuracy
+model-index:
+- name: SentenceTransformer based on BAAI/bge-m3
+ results:
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test
+ type: sts-test
+ metrics:
+ - type: pearson_cosine
+ value: 0.8878947010894737
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9258002047273748
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 1024
+ type: sts-test-1024
+ metrics:
+ - type: pearson_cosine
+ value: 0.885149365257635
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9258567760577375
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 512
+ type: sts-test-512
+ metrics:
+ - type: pearson_cosine
+ value: 0.88725994150817
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9272581696465901
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 256
+ type: sts-test-256
+ metrics:
+ - type: pearson_cosine
+ value: 0.8861095686328364
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9268503548030278
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 128
+ type: sts-test-128
+ metrics:
+ - type: pearson_cosine
+ value: 0.880812025351224
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9247529190228797
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 64
+ type: sts-test-64
+ metrics:
+ - type: pearson_cosine
+ value: 0.8741180224496476
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.922292066152112
+ name: Spearman Cosine
+ - task:
+ type: semantic-similarity
+ name: Semantic Similarity
+ dataset:
+ name: sts test 32
+ type: sts-test-32
+ metrics:
+ - type: pearson_cosine
+ value: 0.8553627166015102
+ name: Pearson Cosine
+ - type: spearman_cosine
+ value: 0.9153789063632594
+ name: Spearman Cosine
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets
+ type: allNLI-triplets
+ metrics:
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets 1024
+ type: allNLI--triplets-1024
+ metrics:
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets 512
+ type: allNLI--triplets-512
+ metrics:
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets 256
+ type: allNLI--triplets-256
+ metrics:
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets 128
+ type: allNLI--triplets-128
+ metrics:
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - type: cosine_accuracy
+ value: 0.9296875
+ name: Cosine Accuracy
+ - task:
+ type: triplet
+ name: Triplet
+ dataset:
+ name: allNLI triplets 32
+ type: allNLI--triplets-32
+ metrics:
+ - type: cosine_accuracy
+ value: 0.921875
+ name: Cosine Accuracy
+---
+
+# SentenceTransformer based on BAAI/bge-m3
+
+This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3) on the global_dataset dataset. It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
+
+## Model Details
+
+### Model Description
+- **Model Type:** Sentence Transformer
+- **Base model:** [BAAI/bge-m3](https://huggingface.co/BAAI/bge-m3)
+- **Maximum Sequence Length:** 512 tokens
+- **Output Dimensionality:** 1024 dimensions
+- **Similarity Function:** Cosine Similarity
+- **Training Dataset:**
+ - global_dataset
+
+
+
+### Model Sources
+
+- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
+- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
+- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
+
+### Full Model Architecture
+
+```
+SentenceTransformer(
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
+ (1): AdvancedWeightedPooling(
+ (mha): MultiheadAttention(
+ (out_proj): NonDynamicallyQuantizableLinear(in_features=1024, out_features=1024, bias=True)
+ )
+ (MLP): Sequential(
+ (0): SwiGLUBlock(
+ (in_proj_w): Linear(in_features=1024, out_features=2048, bias=True)
+ (in_proj_v): Linear(in_features=1024, out_features=2048, bias=True)
+ (dropout): Dropout(p=0.1, inplace=False)
+ )
+ (1): SwiGLUBlock(
+ (in_proj_w): Linear(in_features=2048, out_features=2048, bias=True)
+ (in_proj_v): Linear(in_features=2048, out_features=2048, bias=True)
+ (dropout): Dropout(p=0.05, inplace=False)
+ )
+ (2): Linear(in_features=2048, out_features=2048, bias=True)
+ )
+ (layernorm): LayerNorm((2048,), eps=1e-05, elementwise_affine=True)
+ )
+)
+```
+
+## Usage
+
+### Direct Usage (Sentence Transformers)
+
+First install the Sentence Transformers library:
+
+```bash
+pip install -U sentence-transformers
+```
+
+Then you can load this model and run inference.
+```python
+from sentence_transformers import SentenceTransformer
+
+# Download from the 🤗 Hub
+model = SentenceTransformer("bobox/XLMRoBERTaM3-CustomPoolinMRL-v1.07-2048MLP-step1-checkpoints-tmp")
+# Run inference
+sentences = [
+ 'The Elder Scrolls series up until "Skyrim".',
+ "The Unofficial Elder Scrolls Pages had some great stuff but it's not really for five year olds. [Here](_URL_2_) is the main lore page and [here](_URL_0_) is the beginning of the history. Based on previous Elder Scrolls games you will not need to have played the previous game to enjoy them and understand the story. You might miss out on some small references or easter eggs put in for fans though. EDIT: _URL_1_ will most likely put together a [historical references](_URL_3_) page for Skyrim like it did for Oblivion.",
+ "Game of Thrones (season 7) The seventh season of the fantasy drama television series Game of Thrones premiered on HBO on July 16, 2017, and concluded on August 27, 2017.[1][2][3] Unlike previous seasons that consisted of ten episodes each, the seventh season consisted of only seven.[4] Like the previous season, it largely consisted of original content not found in George R. R. Martin's A Song of Ice and Fire series, while also adapting material Martin revealed to showrunners about the upcoming novels in the series.[5] The series was adapted for television by David Benioff and D. B. Weiss.",
+]
+embeddings = model.encode(sentences)
+print(embeddings.shape)
+# [3, 1024]
+
+# Get the similarity scores for the embeddings
+similarities = model.similarity(embeddings, embeddings)
+print(similarities.shape)
+# [3, 3]
+```
+
+
+
+
+
+
+
+## Evaluation
+
+### Metrics
+
+#### Semantic Similarity
+
+* Dataset: `sts-test`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8879 |
+| **spearman_cosine** | **0.9258** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-1024`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 1024
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8851 |
+| **spearman_cosine** | **0.9259** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-512`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 512
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8873 |
+| **spearman_cosine** | **0.9273** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-256`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 256
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8861 |
+| **spearman_cosine** | **0.9269** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-128`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 128
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8808 |
+| **spearman_cosine** | **0.9248** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-64`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 64
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8741 |
+| **spearman_cosine** | **0.9223** |
+
+#### Semantic Similarity
+
+* Dataset: `sts-test-32`
+* Evaluated with [EmbeddingSimilarityEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 32
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| pearson_cosine | 0.8554 |
+| **spearman_cosine** | **0.9154** |
+
+#### Triplet
+
+* Dataset: `allNLI-triplets`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator)
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-1024`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 1024
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-512`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 512
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-256`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 256
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-128`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 128
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-128`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 64
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9297** |
+
+#### Triplet
+
+* Dataset: `allNLI--triplets-32`
+* Evaluated with [TripletEvaluator](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator) with these parameters:
+ ```json
+ {
+ "truncate_dim": 32
+ }
+ ```
+
+| Metric | Value |
+|:--------------------|:-----------|
+| **cosine_accuracy** | **0.9219** |
+
+
+
+
+
+## Training Details
+
+### Training Dataset
+
+#### global_dataset
+
+* Dataset: global_dataset
+* Size: 147,810 training samples
+* Columns: sentence1, sentence2, and negative_1
+* Approximate statistics based on the first 1000 samples:
+ | | sentence1 | sentence2 | negative_1 |
+ |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
+ | type | string | string | string |
+ | details |
- min: 6 tokens
- mean: 21.86 tokens
- max: 85 tokens
| - min: 6 tokens
- mean: 100.42 tokens
- max: 444 tokens
| - min: 5 tokens
- mean: 93.99 tokens
- max: 450 tokens
|
+* Samples:
+ | sentence1 | sentence2 | negative_1 |
+ |:----------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+ | why is my earliest memory not from birth or a little after birth? | Your brain is not finished developing until further age. By the time of your birth your brain still lack the cognitive ability to store memories. It is not until further age you get some of the brain abilities that might seem obvious to you now. You should watch this video about [cognitive development among children](_URL_0_) - Its quite interesting. | It's not that children have no memory at all - it's that, as we age, our brains overwrite unnecessary information with new information. *As adults,* we often can't remember things from when we were very young - but this doesn't say anything about a child's memory when they are still that age. Some suggest there's a link between language ability and memory - such that your memory becomes clearer and stronger the more your language ability develops. Maybe that plays a role as well. |
+ | Why does the throat become so dry when laying in bed/sleeping? | Generally because you are sleeping with your mouth open which dries out the mucous membrane. If you don't believe me, try keeping your mouth wide open and breath in and out for an hour and tell me how your throat feels. | Your throat is sensitive. It is trying to keep out chemicals and cocks. |
+ | The last British survivor of the Dambusters raid has been presented with the Bristol Lord Mayor's Medal. | George "Johnny" Johnson, who is in his 90s, was just 22 when he took part in the 1943 air raid on German dams using experimental "bouncing" bombs.
The 1955 film - The Dam Busters - cemented its place as one of the most famous episodes of World War Two.
Mr Johnson, who lives in Westbury on Trym, received the honour from Lord Mayor Councillor Clare Campion-Smith.
A bomb aimer, he joined the newly formed 617 Squadron in March 1943.
On 16 May of that year he was one of the 133-strong squadron who dodged anti-aircraft fire, power cables and mountainous terrain to drop the four-tonne skipping bomb on dams in the Ruhr Valley.
Codenamed Operation Chastise, eight of the 19 planes were lost, 53 men died and three were captured.
But the young sergeant survived the mission and has now been presented with a special Lord Mayor's Medal in recognition of his "work for his country".
"We had no idea of what the target was going to be until the day of the raid, when we did the briefing," he said.
"But it... | The Cardiff-born player scored 478 tries in 487 games for Wigan, becoming the greatest try-scorer in the history of the British game.
A pioneer among black players, Boston signed for the club in 1953 and also played for Blackpool.
He attended the ceremony in Wigan's Believe Square, where the bronze statue created by sculptor Steve Winterburn is located.
Source: Rugby League Hall of Fame
The 82-year-old recently revealed he was suffering from dementia.
Wigan Warriors head coach Shaun Wane said the statue was a "fitting" tribute, adding: "Doing this for Billy is just reward for what he's done for the town of Wigan.
"Whenever anyone speaks about Wigan rugby league, they tend to mention Billy Boston."
Club director Kris Radlinski added: "His charity work over the years is unwavering, as is his support of Wigan rugby league and his love for the club and town."
More than £90,000 was raised for the statue, which included a large donation from former Wigan Athletic chairman Dave Whelan and £... |
+* Loss: [MatryoshkaLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters:
+ ```json
+ {
+ "loss": "CachedGISTEmbedLoss",
+ "matryoshka_dims": [
+ 2048,
+ 1024,
+ 512,
+ 256,
+ 128,
+ 64,
+ 32
+ ],
+ "matryoshka_weights": [
+ 1,
+ 0.35,
+ 0.15,
+ 0.05,
+ 0.05,
+ 0.033,
+ 0.025
+ ],
+ "n_dims_per_step": -1
+ }
+ ```
+
+### Evaluation Dataset
+
+#### global_dataset
+
+* Dataset: global_dataset
+* Size: 1,071 evaluation samples
+* Columns: sentence1, sentence2, and negative_1
+* Approximate statistics based on the first 1000 samples:
+ | | sentence1 | sentence2 | negative_1 |
+ |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
+ | type | string | string | string |
+ | details | - min: 6 tokens
- mean: 21.58 tokens
- max: 85 tokens
| - min: 6 tokens
- mean: 102.34 tokens
- max: 456 tokens
| - min: 6 tokens
- mean: 95.47 tokens
- max: 434 tokens
|
+* Samples:
+ | sentence1 | sentence2 | negative_1 |
+ |:----------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+ | He was born in Carter County , Tennessee and was later moved to Arkansas . | He was born in Carter County , Tennessee and later moved to Arkansas . | He was born on December 21 , 1965 in Oxon Hill , Maryland , and attended High School in New Haven , Connecticut . |
+ | who sings the theme song for kath and kim | Kath & Kim The title sequence shows the main five regular characters over a white background. In Series 3, it was amended to include Epponnee-Rae and Cujo. The five regular cast members are then credited over aerial shots of suburban houses. Szubanski is credited as "Special Guest Star," despite appearing in every episode. The theme song is "The Joker," performed by Gina Riley. A re-recording of it debuted in the telemovie opening sequence and has continued into Series 4. | Portland composer and vocalist Natasha Kmeto recently dropped by KEXP's Audioasis to perform a magnificent feat of multitasking. Surreal and symphonic, Kmeto's songs stretch the boundaries of contemporary pop and EDM with percussive, intriguing compositions that sound pleasing but aren't easily forgotten. As Kmeto croons over a background of delicate digital noise in the sleek single "Inevitable," it's hard not to be moved by this intricate, confessional, visceral, meticulously produced music. SET LIST "Inevitable" Watch Natasha Kmeto's full performance on KEXP's YouTube channel. |
+ | who plays the yellow power ranger in the 2017 movie | Power Rangers (film) Saban's Power Rangers[4] (or simply Power Rangers) is a 2017 American superhero film based on the franchise of the same name, directed by Dean Israelite and written by John Gatins. It is the third Power Rangers film, and is a reboot. The film features the main characters of the Mighty Morphin Power Rangers television series with a new cast, starring Dacre Montgomery, Naomi Scott, RJ Cyler, Becky G, Ludi Lin, Bill Hader, Bryan Cranston, and Elizabeth Banks. | Most reviewers have awarded the action-packed epic four or five stars, with The Daily Telegraph describing it as a "Krakatoan eruption of craziness".
The film sees British actor Tom Hardy take on Mel Gibson's role as "Road Warrior" Max Rockatansky.
Charlize Theron also appears in the futuristic drama, which is set for release in the UK and US on Friday.
Also awarding it five stars, Time Out's David Ehrlich said watching director George Miller's film was like "a tornado tearing through a tea party".
He added: "Fury Road steers this macho franchise in a brilliant new direction, forging a mythical portrait about the need for female rule in a world where men need to be saved from themselves."
Jamie Graham from Total Film said the blockbuster had "some of the greatest action ever put on screen".
He wrote: "In the battle of the 2015 behemoths, the maxed-out madness of Mad Max: Fury Road sets an extraordinarily high bar - then pole-vaults clean over it and smashes the entire rig to smithereen... |
+* Loss: [MatryoshkaLoss](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#matryoshkaloss) with these parameters:
+ ```json
+ {
+ "loss": "CachedGISTEmbedLoss",
+ "matryoshka_dims": [
+ 2048,
+ 1024,
+ 512,
+ 256,
+ 128,
+ 64,
+ 32
+ ],
+ "matryoshka_weights": [
+ 1,
+ 0.35,
+ 0.15,
+ 0.05,
+ 0.05,
+ 0.033,
+ 0.025
+ ],
+ "n_dims_per_step": -1
+ }
+ ```
+
+### Training Hyperparameters
+#### Non-Default Hyperparameters
+
+- `eval_strategy`: steps
+- `per_device_train_batch_size`: 192
+- `per_device_eval_batch_size`: 128
+- `learning_rate`: 0.0001
+- `weight_decay`: 0.005
+- `lr_scheduler_type`: cosine_with_min_lr
+- `lr_scheduler_kwargs`: {'num_cycles': 0.5, 'min_lr': 1e-05}
+- `warmup_ratio`: 0.2
+- `save_safetensors`: False
+- `fp16`: True
+- `remove_unused_columns`: False
+- `push_to_hub`: True
+- `hub_model_id`: bobox/XLMRoBERTaM3-CustomPoolinMRL-v1.07-2048MLP-step1-checkpoints-tmp
+- `hub_strategy`: all_checkpoints
+- `hub_private_repo`: False
+- `batch_sampler`: no_duplicates
+
+#### All Hyperparameters
+Click to expand
+
+- `overwrite_output_dir`: False
+- `do_predict`: False
+- `eval_strategy`: steps
+- `prediction_loss_only`: True
+- `per_device_train_batch_size`: 192
+- `per_device_eval_batch_size`: 128
+- `per_gpu_train_batch_size`: None
+- `per_gpu_eval_batch_size`: None
+- `gradient_accumulation_steps`: 1
+- `eval_accumulation_steps`: None
+- `torch_empty_cache_steps`: None
+- `learning_rate`: 0.0001
+- `weight_decay`: 0.005
+- `adam_beta1`: 0.9
+- `adam_beta2`: 0.999
+- `adam_epsilon`: 1e-08
+- `max_grad_norm`: 1.0
+- `num_train_epochs`: 3
+- `max_steps`: -1
+- `lr_scheduler_type`: cosine_with_min_lr
+- `lr_scheduler_kwargs`: {'num_cycles': 0.5, 'min_lr': 1e-05}
+- `warmup_ratio`: 0.2
+- `warmup_steps`: 0
+- `log_level`: passive
+- `log_level_replica`: warning
+- `log_on_each_node`: True
+- `logging_nan_inf_filter`: True
+- `save_safetensors`: False
+- `save_on_each_node`: False
+- `save_only_model`: False
+- `restore_callback_states_from_checkpoint`: False
+- `no_cuda`: False
+- `use_cpu`: False
+- `use_mps_device`: False
+- `seed`: 42
+- `data_seed`: None
+- `jit_mode_eval`: False
+- `use_ipex`: False
+- `bf16`: False
+- `fp16`: True
+- `fp16_opt_level`: O1
+- `half_precision_backend`: auto
+- `bf16_full_eval`: False
+- `fp16_full_eval`: False
+- `tf32`: None
+- `local_rank`: 0
+- `ddp_backend`: None
+- `tpu_num_cores`: None
+- `tpu_metrics_debug`: False
+- `debug`: []
+- `dataloader_drop_last`: False
+- `dataloader_num_workers`: 0
+- `dataloader_prefetch_factor`: None
+- `past_index`: -1
+- `disable_tqdm`: False
+- `remove_unused_columns`: False
+- `label_names`: None
+- `load_best_model_at_end`: False
+- `ignore_data_skip`: False
+- `fsdp`: []
+- `fsdp_min_num_params`: 0
+- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
+- `tp_size`: 0
+- `fsdp_transformer_layer_cls_to_wrap`: None
+- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
+- `deepspeed`: None
+- `label_smoothing_factor`: 0.0
+- `optim`: adamw_torch
+- `optim_args`: None
+- `adafactor`: False
+- `group_by_length`: False
+- `length_column_name`: length
+- `ddp_find_unused_parameters`: None
+- `ddp_bucket_cap_mb`: None
+- `ddp_broadcast_buffers`: False
+- `dataloader_pin_memory`: True
+- `dataloader_persistent_workers`: False
+- `skip_memory_metrics`: True
+- `use_legacy_prediction_loop`: False
+- `push_to_hub`: True
+- `resume_from_checkpoint`: None
+- `hub_model_id`: bobox/XLMRoBERTaM3-CustomPoolinMRL-v1.07-2048MLP-step1-checkpoints-tmp
+- `hub_strategy`: all_checkpoints
+- `hub_private_repo`: False
+- `hub_always_push`: False
+- `gradient_checkpointing`: False
+- `gradient_checkpointing_kwargs`: None
+- `include_inputs_for_metrics`: False
+- `include_for_metrics`: []
+- `eval_do_concat_batches`: True
+- `fp16_backend`: auto
+- `push_to_hub_model_id`: None
+- `push_to_hub_organization`: None
+- `mp_parameters`:
+- `auto_find_batch_size`: False
+- `full_determinism`: False
+- `torchdynamo`: None
+- `ray_scope`: last
+- `ddp_timeout`: 1800
+- `torch_compile`: False
+- `torch_compile_backend`: None
+- `torch_compile_mode`: None
+- `include_tokens_per_second`: False
+- `include_num_input_tokens_seen`: False
+- `neftune_noise_alpha`: None
+- `optim_target_modules`: None
+- `batch_eval_metrics`: False
+- `eval_on_start`: False
+- `use_liger_kernel`: False
+- `eval_use_gather_object`: False
+- `average_tokens_across_devices`: False
+- `prompts`: None
+- `batch_sampler`: no_duplicates
+- `multi_dataset_batch_sampler`: proportional
+
+
+
+### Training Logs
+Click to expand
+
+| Epoch | Step | Training Loss | global dataset loss | sts-test_spearman_cosine | sts-test-1024_spearman_cosine | sts-test-512_spearman_cosine | sts-test-256_spearman_cosine | sts-test-128_spearman_cosine | sts-test-64_spearman_cosine | sts-test-32_spearman_cosine | allNLI-triplets_cosine_accuracy | allNLI--triplets-1024_cosine_accuracy | allNLI--triplets-512_cosine_accuracy | allNLI--triplets-256_cosine_accuracy | allNLI--triplets-128_cosine_accuracy | allNLI--triplets-32_cosine_accuracy |
+|:------:|:----:|:-------------:|:-------------------:|:------------------------:|:-----------------------------:|:----------------------------:|:----------------------------:|:----------------------------:|:---------------------------:|:---------------------------:|:-------------------------------:|:-------------------------------------:|:------------------------------------:|:------------------------------------:|:------------------------------------:|:-----------------------------------:|
+| 0.0013 | 1 | 11.01 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0026 | 2 | 11.01 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0039 | 3 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0052 | 4 | 11.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0065 | 5 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0078 | 6 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0091 | 7 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0104 | 8 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0117 | 9 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0130 | 10 | 11.0099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0143 | 11 | 11.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0156 | 12 | 11.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0169 | 13 | 11.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0182 | 14 | 11.0097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0195 | 15 | 11.0096 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0208 | 16 | 11.0095 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0221 | 17 | 11.0096 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0234 | 18 | 11.0094 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0247 | 19 | 11.0095 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0260 | 20 | 11.0093 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0273 | 21 | 11.0091 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0286 | 22 | 11.0091 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0299 | 23 | 11.0089 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0312 | 24 | 11.0088 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0325 | 25 | 11.0087 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0338 | 26 | 11.0083 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0351 | 27 | 11.0083 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0364 | 28 | 11.0077 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0377 | 29 | 11.0075 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0390 | 30 | 11.0053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0403 | 31 | 11.007 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0416 | 32 | 11.006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0429 | 33 | 11.0056 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0442 | 34 | 11.0045 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0455 | 35 | 11.0035 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0468 | 36 | 11.0017 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0481 | 37 | 11.0001 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0494 | 38 | 10.9974 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0506 | 39 | 10.9955 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0519 | 40 | 10.9926 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0532 | 41 | 10.9886 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0545 | 42 | 10.983 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0558 | 43 | 10.977 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0571 | 44 | 10.9703 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0584 | 45 | 10.9547 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0597 | 46 | 10.9435 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0610 | 47 | 10.9263 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0623 | 48 | 10.9014 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0636 | 49 | 10.8546 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0649 | 50 | 10.8134 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0662 | 51 | 10.7805 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0675 | 52 | 10.7495 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0688 | 53 | 10.6997 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0701 | 54 | 10.5774 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0714 | 55 | 10.5443 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0727 | 56 | 10.4336 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0740 | 57 | 10.3595 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0753 | 58 | 10.2175 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0766 | 59 | 10.667 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0779 | 60 | 10.0476 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0792 | 61 | 10.0367 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0805 | 62 | 10.0531 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0818 | 63 | 9.7963 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0831 | 64 | 9.4349 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0844 | 65 | 9.5733 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0857 | 66 | 9.0604 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0870 | 67 | 8.7291 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0883 | 68 | 8.38 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0896 | 69 | 8.2196 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0909 | 70 | 7.8955 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0922 | 71 | 7.2905 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0935 | 72 | 7.3614 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0948 | 73 | 6.6031 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0961 | 74 | 5.9224 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0974 | 75 | 5.7627 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.0987 | 76 | 5.2194 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1 | 77 | 5.3339 | 1.7795 | 0.9061 | 0.9033 | 0.9014 | 0.8961 | 0.8942 | 0.8899 | 0.8781 | 0.9375 | 0.9453 | 0.9375 | 0.9297 | 0.9062 | 0.9141 |
+| 0.1013 | 78 | 5.0749 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1026 | 79 | 4.7881 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1039 | 80 | 4.2653 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1052 | 81 | 4.0863 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1065 | 82 | 4.2845 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1078 | 83 | 3.8356 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1091 | 84 | 3.5652 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1104 | 85 | 4.1484 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1117 | 86 | 3.4415 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1130 | 87 | 3.129 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1143 | 88 | 3.5604 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1156 | 89 | 3.6036 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1169 | 90 | 2.8276 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1182 | 91 | 3.4752 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1195 | 92 | 3.3796 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1208 | 93 | 2.7567 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1221 | 94 | 3.2728 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1234 | 95 | 2.8496 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1247 | 96 | 2.6072 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1260 | 97 | 2.5959 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1273 | 98 | 2.4787 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1286 | 99 | 2.1368 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1299 | 100 | 2.233 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1312 | 101 | 2.4691 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1325 | 102 | 2.3472 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1338 | 103 | 2.0578 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1351 | 104 | 2.3437 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1364 | 105 | 2.4822 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1377 | 106 | 2.2261 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1390 | 107 | 2.3113 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1403 | 108 | 2.2162 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1416 | 109 | 2.0638 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1429 | 110 | 1.9822 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1442 | 111 | 1.9165 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1455 | 112 | 2.0633 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1468 | 113 | 1.9012 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1481 | 114 | 1.8559 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1494 | 115 | 2.1804 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1506 | 116 | 1.9728 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1519 | 117 | 1.795 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1532 | 118 | 1.5135 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1545 | 119 | 1.8776 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1558 | 120 | 1.9858 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1571 | 121 | 1.6143 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1584 | 122 | 1.7321 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1597 | 123 | 1.7272 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1610 | 124 | 1.5508 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1623 | 125 | 1.2961 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1636 | 126 | 1.6425 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1649 | 127 | 1.5193 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1662 | 128 | 1.6626 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1675 | 129 | 2.0871 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1688 | 130 | 1.5114 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1701 | 131 | 1.4414 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1714 | 132 | 1.5323 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1727 | 133 | 1.2476 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1740 | 134 | 1.4671 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1753 | 135 | 1.5581 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1766 | 136 | 1.5237 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1779 | 137 | 1.2613 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1792 | 138 | 1.2949 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1805 | 139 | 1.3592 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1818 | 140 | 1.3918 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1831 | 141 | 1.0371 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1844 | 142 | 1.5028 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1857 | 143 | 1.2347 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1870 | 144 | 1.2731 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1883 | 145 | 1.2688 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1896 | 146 | 1.3688 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1909 | 147 | 1.8782 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1922 | 148 | 1.0597 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1935 | 149 | 1.229 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1948 | 150 | 1.1233 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1961 | 151 | 1.2579 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1974 | 152 | 1.1547 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.1987 | 153 | 1.3986 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2 | 154 | 1.3071 | 0.8410 | 0.9182 | 0.9158 | 0.9152 | 0.9113 | 0.9080 | 0.9005 | 0.8867 | 0.9453 | 0.9375 | 0.9219 | 0.9062 | 0.9062 | 0.9141 |
+| 0.2013 | 155 | 1.5306 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2026 | 156 | 1.0775 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2039 | 157 | 1.2954 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2052 | 158 | 1.1233 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2065 | 159 | 1.2492 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2078 | 160 | 1.2897 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2091 | 161 | 1.0752 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2104 | 162 | 1.269 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2117 | 163 | 1.1273 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2130 | 164 | 1.2208 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2143 | 165 | 1.2356 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2156 | 166 | 1.1039 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2169 | 167 | 1.0146 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2182 | 168 | 1.3097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2195 | 169 | 1.2947 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2208 | 170 | 1.291 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2221 | 171 | 1.0295 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2234 | 172 | 1.0948 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2247 | 173 | 1.1393 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2260 | 174 | 1.0063 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2273 | 175 | 1.1769 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2286 | 176 | 1.1731 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2299 | 177 | 0.7476 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2312 | 178 | 1.022 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2325 | 179 | 0.9579 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2338 | 180 | 1.0753 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2351 | 181 | 1.2243 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2364 | 182 | 1.2154 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2377 | 183 | 0.8147 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2390 | 184 | 1.1086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2403 | 185 | 1.0155 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2416 | 186 | 1.1898 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2429 | 187 | 1.11 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2442 | 188 | 1.3128 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2455 | 189 | 1.0642 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2468 | 190 | 0.8932 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2481 | 191 | 1.1683 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2494 | 192 | 1.0554 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2506 | 193 | 1.2186 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2519 | 194 | 1.027 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2532 | 195 | 0.799 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2545 | 196 | 1.099 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2558 | 197 | 0.7717 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2571 | 198 | 1.1011 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2584 | 199 | 1.0083 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2597 | 200 | 1.1488 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2610 | 201 | 1.0453 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2623 | 202 | 1.2942 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2636 | 203 | 1.0279 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2649 | 204 | 0.9493 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2662 | 205 | 1.2698 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2675 | 206 | 1.066 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2688 | 207 | 1.0352 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2701 | 208 | 0.8514 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2714 | 209 | 1.094 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2727 | 210 | 1.0087 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2740 | 211 | 0.9035 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2753 | 212 | 0.9475 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2766 | 213 | 0.9838 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2779 | 214 | 1.1285 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2792 | 215 | 0.7406 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2805 | 216 | 0.9309 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2818 | 217 | 0.8669 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2831 | 218 | 1.0496 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2844 | 219 | 0.9547 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2857 | 220 | 0.7442 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2870 | 221 | 0.6949 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2883 | 222 | 0.958 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2896 | 223 | 0.7528 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2909 | 224 | 0.8338 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2922 | 225 | 1.108 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2935 | 226 | 1.1113 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2948 | 227 | 0.8191 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2961 | 228 | 0.8167 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2974 | 229 | 0.8165 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.2987 | 230 | 0.8569 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3 | 231 | 0.943 | 0.6770 | 0.9229 | 0.9210 | 0.9212 | 0.9174 | 0.9159 | 0.9075 | 0.8928 | 0.9453 | 0.9531 | 0.9375 | 0.9219 | 0.8984 | 0.9141 |
+| 0.3013 | 232 | 0.8316 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3026 | 233 | 0.6811 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3039 | 234 | 0.7579 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3052 | 235 | 0.8088 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3065 | 236 | 0.8242 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3078 | 237 | 0.9401 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3091 | 238 | 0.9815 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3104 | 239 | 0.6338 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3117 | 240 | 1.1544 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3130 | 241 | 0.7693 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3143 | 242 | 0.745 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3156 | 243 | 0.722 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3169 | 244 | 0.7834 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3182 | 245 | 0.8727 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3195 | 246 | 0.657 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3208 | 247 | 0.9334 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3221 | 248 | 0.9641 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3234 | 249 | 0.9539 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3247 | 250 | 0.916 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3260 | 251 | 0.7535 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3273 | 252 | 1.0744 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3286 | 253 | 0.7207 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3299 | 254 | 0.9337 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3312 | 255 | 0.7114 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3325 | 256 | 0.6995 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3338 | 257 | 0.8138 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3351 | 258 | 1.0225 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3364 | 259 | 0.9528 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3377 | 260 | 0.9607 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3390 | 261 | 0.971 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3403 | 262 | 0.7819 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3416 | 263 | 0.6537 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3429 | 264 | 1.0185 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3442 | 265 | 1.0832 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3455 | 266 | 0.533 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3468 | 267 | 0.9556 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3481 | 268 | 0.8863 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3494 | 269 | 1.0875 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3506 | 270 | 0.9424 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3519 | 271 | 0.7181 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3532 | 272 | 0.9829 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3545 | 273 | 0.8581 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3558 | 274 | 0.669 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3571 | 275 | 1.0623 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3584 | 276 | 0.8738 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3597 | 277 | 1.0384 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3610 | 278 | 0.9253 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3623 | 279 | 0.6211 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3636 | 280 | 0.7466 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3649 | 281 | 0.8584 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3662 | 282 | 0.8048 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3675 | 283 | 0.7548 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3688 | 284 | 0.7822 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3701 | 285 | 1.0389 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3714 | 286 | 0.7156 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3727 | 287 | 0.7989 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3740 | 288 | 1.0917 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3753 | 289 | 0.9575 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3766 | 290 | 0.9086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3779 | 291 | 1.0582 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3792 | 292 | 0.879 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3805 | 293 | 0.6524 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3818 | 294 | 0.767 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3831 | 295 | 0.8842 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3844 | 296 | 0.9167 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3857 | 297 | 1.0106 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3870 | 298 | 0.8014 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3883 | 299 | 0.7783 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3896 | 300 | 0.8608 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3909 | 301 | 0.6383 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3922 | 302 | 0.9668 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3935 | 303 | 0.9447 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3948 | 304 | 0.6609 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3961 | 305 | 0.8104 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3974 | 306 | 0.7231 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.3987 | 307 | 0.853 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4 | 308 | 0.8893 | 0.6459 | 0.9304 | 0.9281 | 0.9266 | 0.9250 | 0.9175 | 0.9150 | 0.9039 | 0.9453 | 0.9453 | 0.9375 | 0.9297 | 0.9141 | 0.9141 |
+| 0.4013 | 309 | 0.6674 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4026 | 310 | 0.7047 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4039 | 311 | 0.778 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4052 | 312 | 0.6232 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4065 | 313 | 0.8989 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4078 | 314 | 0.9614 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4091 | 315 | 0.9365 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4104 | 316 | 0.8101 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4117 | 317 | 0.8199 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4130 | 318 | 0.8435 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4143 | 319 | 0.9404 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4156 | 320 | 0.6967 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4169 | 321 | 0.8238 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4182 | 322 | 0.9732 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4195 | 323 | 0.6953 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4208 | 324 | 0.6633 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4221 | 325 | 0.9486 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4234 | 326 | 0.9642 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4247 | 327 | 0.7443 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4260 | 328 | 1.0133 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4273 | 329 | 1.0489 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4286 | 330 | 0.5865 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4299 | 331 | 0.7757 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4312 | 332 | 0.9716 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4325 | 333 | 0.7289 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4338 | 334 | 0.864 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4351 | 335 | 0.706 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4364 | 336 | 0.7336 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4377 | 337 | 0.6631 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4390 | 338 | 0.8824 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4403 | 339 | 0.7209 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4416 | 340 | 0.6866 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4429 | 341 | 0.8544 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4442 | 342 | 0.7208 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4455 | 343 | 0.9237 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4468 | 344 | 0.8569 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4481 | 345 | 0.9145 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4494 | 346 | 0.8172 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4506 | 347 | 0.8755 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4519 | 348 | 0.7049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4532 | 349 | 0.8571 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4545 | 350 | 0.71 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4558 | 351 | 0.7568 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4571 | 352 | 0.5732 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4584 | 353 | 0.7801 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4597 | 354 | 0.6895 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4610 | 355 | 0.8078 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4623 | 356 | 0.9609 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4636 | 357 | 0.9131 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4649 | 358 | 0.8842 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4662 | 359 | 0.7314 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4675 | 360 | 0.7894 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4688 | 361 | 0.7982 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4701 | 362 | 0.8729 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4714 | 363 | 0.9753 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4727 | 364 | 0.6694 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4740 | 365 | 0.7563 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4753 | 366 | 0.7814 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4766 | 367 | 0.5552 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4779 | 368 | 1.0459 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4792 | 369 | 0.9098 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4805 | 370 | 0.7817 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4818 | 371 | 0.6203 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4831 | 372 | 0.7885 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4844 | 373 | 0.7767 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4857 | 374 | 0.8126 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4870 | 375 | 0.7252 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4883 | 376 | 0.8657 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4896 | 377 | 0.8491 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4909 | 378 | 0.784 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4922 | 379 | 0.6108 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4935 | 380 | 1.0215 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4948 | 381 | 0.8208 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4961 | 382 | 0.7456 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4974 | 383 | 1.0181 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.4987 | 384 | 0.8263 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5 | 385 | 0.7218 | 0.6584 | 0.9217 | 0.9193 | 0.9186 | 0.9130 | 0.9097 | 0.9105 | 0.8955 | 0.9609 | 0.9531 | 0.9375 | 0.9375 | 0.9219 | 0.9062 |
+| 0.5013 | 386 | 0.6829 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5026 | 387 | 0.8304 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5039 | 388 | 0.6935 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5052 | 389 | 0.9569 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5065 | 390 | 0.7174 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5078 | 391 | 0.8543 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5091 | 392 | 0.8769 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5104 | 393 | 0.5945 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5117 | 394 | 0.7787 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5130 | 395 | 0.9674 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5143 | 396 | 0.7011 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5156 | 397 | 0.9993 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5169 | 398 | 0.6368 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5182 | 399 | 0.6236 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5195 | 400 | 0.6543 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5208 | 401 | 0.6927 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5221 | 402 | 1.1117 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5234 | 403 | 0.8847 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5247 | 404 | 0.9657 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5260 | 405 | 0.8722 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5273 | 406 | 0.7465 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5286 | 407 | 0.5488 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5299 | 408 | 0.6888 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5312 | 409 | 0.7561 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5325 | 410 | 0.5411 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5338 | 411 | 0.7663 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5351 | 412 | 1.0137 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5364 | 413 | 0.667 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5377 | 414 | 0.7111 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5390 | 415 | 0.8531 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5403 | 416 | 0.8561 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5416 | 417 | 0.612 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5429 | 418 | 0.7229 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5442 | 419 | 0.8387 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5455 | 420 | 0.7439 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5468 | 421 | 0.5846 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5481 | 422 | 0.5976 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5494 | 423 | 0.7378 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5506 | 424 | 0.8756 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5519 | 425 | 0.6755 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5532 | 426 | 0.6566 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5545 | 427 | 0.5624 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5558 | 428 | 0.5887 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5571 | 429 | 0.9906 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5584 | 430 | 0.735 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5597 | 431 | 0.8759 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5610 | 432 | 0.8514 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5623 | 433 | 0.7531 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5636 | 434 | 0.6816 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5649 | 435 | 0.8911 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5662 | 436 | 0.7555 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5675 | 437 | 0.8079 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5688 | 438 | 0.6203 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5701 | 439 | 0.6348 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5714 | 440 | 0.5986 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5727 | 441 | 0.8259 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5740 | 442 | 0.7051 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5753 | 443 | 0.591 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5766 | 444 | 0.7197 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5779 | 445 | 0.6624 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5792 | 446 | 0.9108 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5805 | 447 | 0.7401 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5818 | 448 | 0.7475 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5831 | 449 | 1.0367 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5844 | 450 | 0.819 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5857 | 451 | 0.9914 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5870 | 452 | 0.5852 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5883 | 453 | 0.8283 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5896 | 454 | 0.9477 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5909 | 455 | 0.7091 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5922 | 456 | 0.958 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5935 | 457 | 0.6203 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5948 | 458 | 0.6183 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5961 | 459 | 0.7303 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5974 | 460 | 0.9273 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.5987 | 461 | 0.7091 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6 | 462 | 0.9906 | 0.6540 | 0.9273 | 0.9263 | 0.9279 | 0.9217 | 0.9135 | 0.9083 | 0.8991 | 0.9453 | 0.9453 | 0.9453 | 0.9375 | 0.9297 | 0.9219 |
+| 0.6013 | 463 | 0.9442 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6026 | 464 | 0.7803 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6039 | 465 | 0.6228 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6052 | 466 | 0.7387 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6065 | 467 | 0.9448 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6078 | 468 | 0.7838 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6091 | 469 | 0.9093 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6104 | 470 | 0.6647 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6117 | 471 | 0.5482 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6130 | 472 | 0.6356 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6143 | 473 | 0.8282 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6156 | 474 | 0.8235 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6169 | 475 | 0.6899 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6182 | 476 | 0.6827 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6195 | 477 | 0.7347 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6208 | 478 | 0.8705 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6221 | 479 | 0.8298 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6234 | 480 | 0.7419 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6247 | 481 | 1.1283 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6260 | 482 | 0.7298 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6273 | 483 | 0.8972 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6286 | 484 | 0.7502 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6299 | 485 | 0.6169 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6312 | 486 | 0.7648 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6325 | 487 | 0.8191 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6338 | 488 | 0.8094 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6351 | 489 | 0.9006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6364 | 490 | 0.8384 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6377 | 491 | 0.6431 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6390 | 492 | 0.7507 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6403 | 493 | 1.0524 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6416 | 494 | 0.9618 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6429 | 495 | 0.8119 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6442 | 496 | 0.6538 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6455 | 497 | 0.8877 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6468 | 498 | 1.0556 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6481 | 499 | 0.6788 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6494 | 500 | 0.9637 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6506 | 501 | 0.8143 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6519 | 502 | 0.6673 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6532 | 503 | 0.8358 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6545 | 504 | 0.7588 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6558 | 505 | 0.8241 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6571 | 506 | 0.925 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6584 | 507 | 0.6336 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6597 | 508 | 0.6907 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6610 | 509 | 0.8237 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6623 | 510 | 0.828 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6636 | 511 | 0.6624 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6649 | 512 | 0.8069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6662 | 513 | 0.566 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6675 | 514 | 1.0294 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6688 | 515 | 0.8889 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6701 | 516 | 0.6539 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6714 | 517 | 0.7279 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6727 | 518 | 0.7805 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6740 | 519 | 0.6657 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6753 | 520 | 0.5069 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6766 | 521 | 0.9967 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6779 | 522 | 0.719 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6792 | 523 | 0.4709 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6805 | 524 | 0.7421 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6818 | 525 | 0.7005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6831 | 526 | 0.9615 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6844 | 527 | 0.7011 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6857 | 528 | 0.7021 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6870 | 529 | 0.7954 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6883 | 530 | 1.0179 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6896 | 531 | 0.6674 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6909 | 532 | 0.7634 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6922 | 533 | 0.5306 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6935 | 534 | 0.6792 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6948 | 535 | 0.8223 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6961 | 536 | 0.9346 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6974 | 537 | 0.7369 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.6987 | 538 | 0.7636 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7 | 539 | 0.632 | 0.6428 | 0.9242 | 0.9253 | 0.9243 | 0.9223 | 0.9169 | 0.9143 | 0.9123 | 0.9297 | 0.9297 | 0.9219 | 0.9141 | 0.8984 | 0.8984 |
+| 0.7013 | 540 | 0.672 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7026 | 541 | 0.8924 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7039 | 542 | 0.822 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7052 | 543 | 0.4934 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7065 | 544 | 0.5939 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7078 | 545 | 0.5593 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7091 | 546 | 0.737 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7104 | 547 | 0.6734 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7117 | 548 | 0.949 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7130 | 549 | 0.9343 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7143 | 550 | 0.4662 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7156 | 551 | 0.6602 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7169 | 552 | 0.6273 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7182 | 553 | 0.665 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7195 | 554 | 0.7826 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7208 | 555 | 1.0379 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7221 | 556 | 0.5787 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7234 | 557 | 0.6639 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7247 | 558 | 0.5511 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7260 | 559 | 0.6313 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7273 | 560 | 0.7771 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7286 | 561 | 0.4077 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7299 | 562 | 0.6628 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7312 | 563 | 0.5646 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7325 | 564 | 0.5972 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7338 | 565 | 1.0101 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7351 | 566 | 0.9496 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7364 | 567 | 0.6219 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7377 | 568 | 0.7275 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7390 | 569 | 0.905 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7403 | 570 | 0.7303 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7416 | 571 | 0.6518 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7429 | 572 | 0.6725 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7442 | 573 | 0.8026 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7455 | 574 | 0.6042 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7468 | 575 | 0.7192 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7481 | 576 | 0.6089 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7494 | 577 | 0.92 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7506 | 578 | 0.81 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7519 | 579 | 0.7004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7532 | 580 | 0.636 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7545 | 581 | 0.6059 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7558 | 582 | 0.5995 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7571 | 583 | 0.8563 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7584 | 584 | 0.6008 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7597 | 585 | 0.6576 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7610 | 586 | 0.5438 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7623 | 587 | 0.6347 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7636 | 588 | 0.6002 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7649 | 589 | 0.726 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7662 | 590 | 0.8955 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7675 | 591 | 0.5638 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7688 | 592 | 0.6315 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7701 | 593 | 0.7253 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7714 | 594 | 0.6148 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7727 | 595 | 0.4651 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7740 | 596 | 0.5097 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7753 | 597 | 0.7487 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7766 | 598 | 0.7269 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7779 | 599 | 0.8212 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7792 | 600 | 0.822 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7805 | 601 | 0.6172 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7818 | 602 | 0.6488 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7831 | 603 | 0.8928 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7844 | 604 | 0.7068 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7857 | 605 | 0.6558 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7870 | 606 | 0.6012 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7883 | 607 | 0.7806 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7896 | 608 | 0.5802 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7909 | 609 | 0.7414 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7922 | 610 | 0.6715 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7935 | 611 | 0.6947 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7948 | 612 | 0.6908 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7961 | 613 | 0.587 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7974 | 614 | 0.642 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.7987 | 615 | 0.5082 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8 | 616 | 0.6574 | 0.6188 | 0.9289 | 0.9274 | 0.9280 | 0.9270 | 0.9238 | 0.9215 | 0.9138 | 0.9453 | 0.9453 | 0.9453 | 0.9453 | 0.9297 | 0.9297 |
+| 0.8013 | 617 | 0.5828 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8026 | 618 | 0.8306 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8039 | 619 | 0.6332 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8052 | 620 | 0.6006 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8065 | 621 | 0.5365 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8078 | 622 | 0.6386 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8091 | 623 | 0.684 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8104 | 624 | 0.9585 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8117 | 625 | 0.6687 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8130 | 626 | 0.7117 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8143 | 627 | 0.7052 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8156 | 628 | 0.7474 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8169 | 629 | 0.7309 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8182 | 630 | 0.6185 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8195 | 631 | 0.7055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8208 | 632 | 0.8739 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8221 | 633 | 0.7134 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8234 | 634 | 0.7014 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8247 | 635 | 0.6907 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8260 | 636 | 0.6316 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8273 | 637 | 1.013 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8286 | 638 | 0.7022 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8299 | 639 | 0.5908 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8312 | 640 | 0.6008 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8325 | 641 | 0.6932 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8338 | 642 | 0.5935 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8351 | 643 | 0.7817 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8364 | 644 | 0.5923 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8377 | 645 | 0.5689 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8390 | 646 | 0.5501 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8403 | 647 | 0.7422 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8416 | 648 | 0.9285 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8429 | 649 | 0.6054 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8442 | 650 | 0.6312 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8455 | 651 | 0.9075 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8468 | 652 | 0.5705 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8481 | 653 | 0.9817 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8494 | 654 | 0.6819 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8506 | 655 | 0.9261 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8519 | 656 | 0.3939 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8532 | 657 | 0.7817 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8545 | 658 | 0.5585 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8558 | 659 | 0.4809 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8571 | 660 | 0.9696 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8584 | 661 | 0.5146 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8597 | 662 | 0.8808 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8610 | 663 | 0.5708 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8623 | 664 | 0.4975 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8636 | 665 | 0.5205 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8649 | 666 | 0.9157 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8662 | 667 | 0.7465 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8675 | 668 | 0.8935 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8688 | 669 | 0.7008 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8701 | 670 | 0.4422 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8714 | 671 | 0.4216 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8727 | 672 | 0.739 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8740 | 673 | 0.5473 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8753 | 674 | 0.6101 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8766 | 675 | 0.6453 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8779 | 676 | 0.6317 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8792 | 677 | 0.6964 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8805 | 678 | 0.617 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8818 | 679 | 0.4839 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8831 | 680 | 0.6383 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8844 | 681 | 0.595 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8857 | 682 | 0.5053 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8870 | 683 | 0.8086 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8883 | 684 | 0.7465 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8896 | 685 | 0.9109 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8909 | 686 | 0.8457 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8922 | 687 | 0.7412 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8935 | 688 | 0.6259 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8948 | 689 | 0.8406 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8961 | 690 | 0.5437 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8974 | 691 | 0.7564 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.8987 | 692 | 0.7379 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9 | 693 | 0.6142 | 0.6449 | 0.9256 | 0.9256 | 0.9270 | 0.9244 | 0.9201 | 0.9186 | 0.9080 | 0.9375 | 0.9453 | 0.9375 | 0.9375 | 0.9062 | 0.9062 |
+| 0.9013 | 694 | 0.5799 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9026 | 695 | 0.5429 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9039 | 696 | 0.6803 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9052 | 697 | 0.5692 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9065 | 698 | 0.665 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9078 | 699 | 0.7442 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9091 | 700 | 0.5755 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9104 | 701 | 0.9233 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9117 | 702 | 0.8297 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9130 | 703 | 0.7288 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9143 | 704 | 0.8476 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9156 | 705 | 0.6464 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9169 | 706 | 0.8881 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9182 | 707 | 0.7609 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9195 | 708 | 0.6834 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9208 | 709 | 0.8218 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9221 | 710 | 0.7413 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9234 | 711 | 0.6379 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9247 | 712 | 0.8378 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9260 | 713 | 0.5754 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9273 | 714 | 0.7367 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9286 | 715 | 0.6389 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9299 | 716 | 0.4474 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9312 | 717 | 0.6341 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9325 | 718 | 0.4793 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9338 | 719 | 0.7057 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9351 | 720 | 0.9687 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9364 | 721 | 0.7017 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9377 | 722 | 0.7511 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9390 | 723 | 0.7509 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9403 | 724 | 0.5742 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9416 | 725 | 0.4425 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9429 | 726 | 0.5795 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9442 | 727 | 0.5996 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9455 | 728 | 0.6483 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9468 | 729 | 0.6893 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9481 | 730 | 0.5996 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9494 | 731 | 0.7856 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9506 | 732 | 0.7376 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9519 | 733 | 0.4758 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9532 | 734 | 0.52 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9545 | 735 | 0.6945 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9558 | 736 | 0.5963 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9571 | 737 | 0.5071 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9584 | 738 | 0.7511 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9597 | 739 | 0.4206 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9610 | 740 | 0.4838 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9623 | 741 | 0.6777 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9636 | 742 | 0.7201 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9649 | 743 | 0.5443 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9662 | 744 | 0.7611 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9675 | 745 | 0.7433 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9688 | 746 | 0.8613 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9701 | 747 | 0.4005 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9714 | 748 | 0.5841 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9727 | 749 | 0.7196 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9740 | 750 | 0.5471 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9753 | 751 | 0.7167 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9766 | 752 | 0.7802 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9779 | 753 | 0.4997 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9792 | 754 | 0.6485 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9805 | 755 | 0.7916 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9818 | 756 | 0.6993 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9831 | 757 | 0.7207 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9844 | 758 | 0.6119 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9857 | 759 | 0.7745 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9870 | 760 | 0.5289 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9883 | 761 | 0.5566 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9896 | 762 | 0.8124 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9909 | 763 | 0.632 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9922 | 764 | 0.5499 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9935 | 765 | 0.7703 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9948 | 766 | 0.5896 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9961 | 767 | 0.8109 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9974 | 768 | 0.6031 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 0.9987 | 769 | 0.5933 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0 | 770 | 0.5364 | 0.6764 | 0.9294 | 0.9289 | 0.9303 | 0.9310 | 0.9267 | 0.9216 | 0.9142 | 0.9453 | 0.9453 | 0.9453 | 0.9453 | 0.9375 | 0.9219 |
+| 1.0013 | 771 | 0.5231 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0026 | 772 | 0.5183 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0039 | 773 | 0.5389 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0052 | 774 | 0.6834 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0065 | 775 | 0.5017 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0078 | 776 | 0.6083 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0091 | 777 | 0.5966 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0104 | 778 | 0.6968 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0117 | 779 | 0.6885 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0130 | 780 | 0.4656 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0143 | 781 | 0.5064 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0156 | 782 | 0.5485 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0169 | 783 | 0.6034 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0182 | 784 | 0.5317 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0195 | 785 | 0.6418 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0208 | 786 | 0.5612 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0221 | 787 | 0.6258 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0234 | 788 | 0.44 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0247 | 789 | 0.5192 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0260 | 790 | 0.7895 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0273 | 791 | 0.5412 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0286 | 792 | 0.5783 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0299 | 793 | 0.7388 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0312 | 794 | 0.5155 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0325 | 795 | 0.5151 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0338 | 796 | 0.6496 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0351 | 797 | 0.5297 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0364 | 798 | 0.7569 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0377 | 799 | 0.5531 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0390 | 800 | 0.6787 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0403 | 801 | 0.6457 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0416 | 802 | 0.5036 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0429 | 803 | 0.6496 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0442 | 804 | 0.5846 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0455 | 805 | 0.6225 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0468 | 806 | 0.4491 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0481 | 807 | 0.5019 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0494 | 808 | 0.6818 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0506 | 809 | 0.6625 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0519 | 810 | 0.3794 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0532 | 811 | 0.573 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0545 | 812 | 0.4855 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0558 | 813 | 0.5567 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0571 | 814 | 0.5972 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0584 | 815 | 0.493 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0597 | 816 | 0.4468 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0610 | 817 | 0.6582 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0623 | 818 | 0.6123 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0636 | 819 | 0.49 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0649 | 820 | 0.5702 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0662 | 821 | 0.6142 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0675 | 822 | 0.4589 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0688 | 823 | 0.6596 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0701 | 824 | 0.6645 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0714 | 825 | 0.5507 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0727 | 826 | 0.5414 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0740 | 827 | 0.4573 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0753 | 828 | 0.4044 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0766 | 829 | 0.5288 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0779 | 830 | 0.473 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0792 | 831 | 0.4046 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0805 | 832 | 0.5648 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0818 | 833 | 0.7126 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0831 | 834 | 0.5931 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0844 | 835 | 0.5465 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0857 | 836 | 0.4738 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0870 | 837 | 0.5459 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0883 | 838 | 0.4774 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0896 | 839 | 0.3781 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0909 | 840 | 0.4029 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0922 | 841 | 0.516 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0935 | 842 | 0.558 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0948 | 843 | 0.6146 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0961 | 844 | 0.6729 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0974 | 845 | 0.4631 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.0987 | 846 | 0.5166 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1 | 847 | 0.6677 | 0.7927 | 0.9202 | 0.9212 | 0.9221 | 0.9214 | 0.9179 | 0.9132 | 0.9062 | 0.9297 | 0.9297 | 0.9219 | 0.9219 | 0.9219 | 0.9141 |
+| 1.1013 | 848 | 0.4758 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1026 | 849 | 0.6897 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1039 | 850 | 0.5571 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1052 | 851 | 0.7302 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1065 | 852 | 0.5423 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1078 | 853 | 0.6124 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1091 | 854 | 0.5472 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1104 | 855 | 0.6467 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1117 | 856 | 0.5758 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1130 | 857 | 0.5486 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1143 | 858 | 0.7601 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1156 | 859 | 0.48 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1169 | 860 | 0.3987 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1182 | 861 | 0.5175 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1195 | 862 | 0.546 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1208 | 863 | 0.4782 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1221 | 864 | 0.5394 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1234 | 865 | 0.5649 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1247 | 866 | 0.6247 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1260 | 867 | 0.5004 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1273 | 868 | 0.4551 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1286 | 869 | 0.4896 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1299 | 870 | 0.5858 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1312 | 871 | 0.6222 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1325 | 872 | 0.6812 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1338 | 873 | 0.6444 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1351 | 874 | 0.4732 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1364 | 875 | 0.4766 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1377 | 876 | 0.5361 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1390 | 877 | 0.6461 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1403 | 878 | 0.6055 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1416 | 879 | 0.5212 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1429 | 880 | 0.5049 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1442 | 881 | 0.6102 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1455 | 882 | 0.5279 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1468 | 883 | 0.6135 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1481 | 884 | 0.4196 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1494 | 885 | 0.634 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1506 | 886 | 0.5867 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1519 | 887 | 0.5583 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1532 | 888 | 0.4851 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1545 | 889 | 0.5288 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1558 | 890 | 0.8795 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1571 | 891 | 0.4514 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1584 | 892 | 0.5311 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1597 | 893 | 0.2328 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1610 | 894 | 0.4935 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1623 | 895 | 0.605 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1636 | 896 | 0.5583 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1649 | 897 | 0.7555 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1662 | 898 | 0.3022 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1675 | 899 | 0.4693 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1688 | 900 | 0.6197 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1701 | 901 | 0.5576 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1714 | 902 | 0.6272 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1727 | 903 | 0.6684 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1740 | 904 | 0.6356 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1753 | 905 | 0.729 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1766 | 906 | 0.5553 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1779 | 907 | 0.466 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1792 | 908 | 0.4107 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1805 | 909 | 0.8187 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1818 | 910 | 0.5597 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1831 | 911 | 0.6618 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1844 | 912 | 0.5037 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1857 | 913 | 0.6033 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1870 | 914 | 0.3695 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1883 | 915 | 0.5632 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1896 | 916 | 0.6387 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1909 | 917 | 0.4989 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1922 | 918 | 0.7159 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1935 | 919 | 0.4639 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1948 | 920 | 0.52 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1961 | 921 | 0.431 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1974 | 922 | 0.5008 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.1987 | 923 | 0.5209 | - | - | - | - | - | - | - | - | - | - | - | - | - | - |
+| 1.2 | 924 | 0.4301 | 0.6651 | 0.9258 | 0.9259 | 0.9273 | 0.9269 | 0.9248 | 0.9223 | 0.9154 | 0.9297 | 0.9297 | 0.9297 | 0.9297 | 0.9297 | 0.9219 |
+
+
+
+### Framework Versions
+- Python: 3.11.11
+- Sentence Transformers: 4.1.0
+- Transformers: 4.51.1
+- PyTorch: 2.5.1+cu124
+- Accelerate: 1.3.0
+- Datasets: 3.5.0
+- Tokenizers: 0.21.0
+
+## Citation
+
+### BibTeX
+
+#### Sentence Transformers
+```bibtex
+@inproceedings{reimers-2019-sentence-bert,
+ title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
+ author = "Reimers, Nils and Gurevych, Iryna",
+ booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
+ month = "11",
+ year = "2019",
+ publisher = "Association for Computational Linguistics",
+ url = "https://arxiv.org/abs/1908.10084",
+}
+```
+
+#### MatryoshkaLoss
+```bibtex
+@misc{kusupati2024matryoshka,
+ title={Matryoshka Representation Learning},
+ author={Aditya Kusupati and Gantavya Bhatt and Aniket Rege and Matthew Wallingford and Aditya Sinha and Vivek Ramanujan and William Howard-Snyder and Kaifeng Chen and Sham Kakade and Prateek Jain and Ali Farhadi},
+ year={2024},
+ eprint={2205.13147},
+ archivePrefix={arXiv},
+ primaryClass={cs.LG}
+}
+```
+
+
+
+
+
+
\ No newline at end of file
diff --git a/checkpoint-924/config.json b/checkpoint-924/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d7e808503f6dfe049e22510820b28dda2b2eee2
--- /dev/null
+++ b/checkpoint-924/config.json
@@ -0,0 +1,27 @@
+{
+ "architectures": [
+ "XLMRobertaModel"
+ ],
+ "attention_probs_dropout_prob": 0.1,
+ "bos_token_id": 0,
+ "classifier_dropout": null,
+ "eos_token_id": 2,
+ "hidden_act": "gelu",
+ "hidden_dropout_prob": 0.1,
+ "hidden_size": 1024,
+ "initializer_range": 0.02,
+ "intermediate_size": 4096,
+ "layer_norm_eps": 1e-05,
+ "max_position_embeddings": 8194,
+ "model_type": "xlm-roberta",
+ "num_attention_heads": 16,
+ "num_hidden_layers": 24,
+ "output_past": true,
+ "pad_token_id": 1,
+ "position_embedding_type": "absolute",
+ "torch_dtype": "float32",
+ "transformers_version": "4.51.1",
+ "type_vocab_size": 1,
+ "use_cache": true,
+ "vocab_size": 250002
+}
diff --git a/checkpoint-924/config_sentence_transformers.json b/checkpoint-924/config_sentence_transformers.json
new file mode 100644
index 0000000000000000000000000000000000000000..62e46a37396f1211d0647fc417c0f6f7a664cf41
--- /dev/null
+++ b/checkpoint-924/config_sentence_transformers.json
@@ -0,0 +1,10 @@
+{
+ "__version__": {
+ "sentence_transformers": "4.1.0",
+ "transformers": "4.51.1",
+ "pytorch": "2.5.1+cu124"
+ },
+ "prompts": {},
+ "default_prompt_name": null,
+ "similarity_fn_name": "cosine"
+}
\ No newline at end of file
diff --git a/checkpoint-924/modules.json b/checkpoint-924/modules.json
new file mode 100644
index 0000000000000000000000000000000000000000..4d28f1d12104554608cffe035493213a32207dd1
--- /dev/null
+++ b/checkpoint-924/modules.json
@@ -0,0 +1,14 @@
+[
+ {
+ "idx": 0,
+ "name": "0",
+ "path": "",
+ "type": "sentence_transformers.models.Transformer"
+ },
+ {
+ "idx": 1,
+ "name": "1",
+ "path": "1_AdvancedWeightedPooling",
+ "type": "__main__.AdvancedWeightedPooling"
+ }
+]
\ No newline at end of file
diff --git a/checkpoint-924/optimizer.pt b/checkpoint-924/optimizer.pt
new file mode 100644
index 0000000000000000000000000000000000000000..1129b50dc9202862a7bea1bec89fdca4f930b364
--- /dev/null
+++ b/checkpoint-924/optimizer.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bc51977a91aab9b66d630a22a70c170a8a9e9a63d5316cad9f2e95d7556c3d0a
+size 167933422
diff --git a/checkpoint-924/pytorch_model.bin b/checkpoint-924/pytorch_model.bin
new file mode 100644
index 0000000000000000000000000000000000000000..3ca31fa89c1fe43177cde98ca92cf417eea1bf86
--- /dev/null
+++ b/checkpoint-924/pytorch_model.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0e969b03be79a3f792216f1febef058e86c2b7f27374fb1e044d87938ceea1ba
+size 2271151270
diff --git a/checkpoint-924/rng_state.pth b/checkpoint-924/rng_state.pth
new file mode 100644
index 0000000000000000000000000000000000000000..ed264349e4c8bc32931a99dffccf31b3431b8080
--- /dev/null
+++ b/checkpoint-924/rng_state.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5cb77afc6c0e9c344104d80b837584feecb643b940b1faedb57e5740a4548567
+size 14244
diff --git a/checkpoint-924/scaler.pt b/checkpoint-924/scaler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..7c5bde2ee10c0de968705ae5057810a6a8f989e4
--- /dev/null
+++ b/checkpoint-924/scaler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0ed2b6134a84c253f297eb2e87e16a7970a6377fcbd0badb276a91233005d68d
+size 988
diff --git a/checkpoint-924/scheduler.pt b/checkpoint-924/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..3e99a04e1ab0c666e2dd79b0ae34c73ddac56d86
--- /dev/null
+++ b/checkpoint-924/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b6d1cc3d0c54c50790a6fb4bb5e5bb9c831e9e7b8b6521b9c99d007da017e8fa
+size 1000
diff --git a/checkpoint-924/sentence_bert_config.json b/checkpoint-924/sentence_bert_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f789d99277496b282d19020415c5ba9ca79ac875
--- /dev/null
+++ b/checkpoint-924/sentence_bert_config.json
@@ -0,0 +1,4 @@
+{
+ "max_seq_length": 512,
+ "do_lower_case": false
+}
\ No newline at end of file
diff --git a/checkpoint-924/sentencepiece.bpe.model b/checkpoint-924/sentencepiece.bpe.model
new file mode 100644
index 0000000000000000000000000000000000000000..7a3f40a75f870bc1f21700cd414dc2acc431583c
--- /dev/null
+++ b/checkpoint-924/sentencepiece.bpe.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
+size 5069051
diff --git a/checkpoint-924/special_tokens_map.json b/checkpoint-924/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..b1879d702821e753ffe4245048eee415d54a9385
--- /dev/null
+++ b/checkpoint-924/special_tokens_map.json
@@ -0,0 +1,51 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "cls_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "mask_token": {
+ "content": "",
+ "lstrip": true,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "sep_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-924/tokenizer.json b/checkpoint-924/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..225ca0f803c45cce8b17cafe2dde2c7682e91938
--- /dev/null
+++ b/checkpoint-924/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d9a6af42442a3e3e9f05f618eae0bb2d98ca4f6a6406cb80ef7a4fa865204d61
+size 17083052
diff --git a/checkpoint-924/tokenizer_config.json b/checkpoint-924/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..95bd7c849ee6a47d5c92805af18d187239c1ba4a
--- /dev/null
+++ b/checkpoint-924/tokenizer_config.json
@@ -0,0 +1,56 @@
+{
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "3": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "250001": {
+ "content": "",
+ "lstrip": true,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": true,
+ "cls_token": "",
+ "eos_token": "",
+ "extra_special_tokens": {},
+ "mask_token": "",
+ "model_max_length": 8192,
+ "pad_token": "",
+ "sep_token": "",
+ "sp_model_kwargs": {},
+ "tokenizer_class": "XLMRobertaTokenizer",
+ "unk_token": ""
+}
diff --git a/checkpoint-924/trainer_state.json b/checkpoint-924/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..cfa0ffce9cbabf3a58d65be437c90d464241a49f
--- /dev/null
+++ b/checkpoint-924/trainer_state.json
@@ -0,0 +1,6850 @@
+{
+ "best_global_step": null,
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.2,
+ "eval_steps": 77,
+ "global_step": 924,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0012987012987012987,
+ "grad_norm": 0.011165409348905087,
+ "learning_rate": 0.0,
+ "loss": 11.01,
+ "step": 1
+ },
+ {
+ "epoch": 0.0025974025974025974,
+ "grad_norm": 0.011394021101295948,
+ "learning_rate": 2.1645021645021646e-07,
+ "loss": 11.01,
+ "step": 2
+ },
+ {
+ "epoch": 0.003896103896103896,
+ "grad_norm": 0.012114278972148895,
+ "learning_rate": 4.329004329004329e-07,
+ "loss": 11.0099,
+ "step": 3
+ },
+ {
+ "epoch": 0.005194805194805195,
+ "grad_norm": 0.011571550741791725,
+ "learning_rate": 6.493506493506494e-07,
+ "loss": 11.0097,
+ "step": 4
+ },
+ {
+ "epoch": 0.006493506493506494,
+ "grad_norm": 0.012371700257062912,
+ "learning_rate": 8.658008658008658e-07,
+ "loss": 11.0099,
+ "step": 5
+ },
+ {
+ "epoch": 0.007792207792207792,
+ "grad_norm": 0.012187338434159756,
+ "learning_rate": 1.0822510822510822e-06,
+ "loss": 11.0099,
+ "step": 6
+ },
+ {
+ "epoch": 0.00909090909090909,
+ "grad_norm": 0.012087634764611721,
+ "learning_rate": 1.2987012987012988e-06,
+ "loss": 11.0099,
+ "step": 7
+ },
+ {
+ "epoch": 0.01038961038961039,
+ "grad_norm": 0.011910870671272278,
+ "learning_rate": 1.5151515151515152e-06,
+ "loss": 11.0099,
+ "step": 8
+ },
+ {
+ "epoch": 0.011688311688311689,
+ "grad_norm": 0.01282087154686451,
+ "learning_rate": 1.7316017316017317e-06,
+ "loss": 11.0099,
+ "step": 9
+ },
+ {
+ "epoch": 0.012987012987012988,
+ "grad_norm": 0.01281706988811493,
+ "learning_rate": 1.948051948051948e-06,
+ "loss": 11.0099,
+ "step": 10
+ },
+ {
+ "epoch": 0.014285714285714285,
+ "grad_norm": 0.013667456805706024,
+ "learning_rate": 2.1645021645021643e-06,
+ "loss": 11.0097,
+ "step": 11
+ },
+ {
+ "epoch": 0.015584415584415584,
+ "grad_norm": 0.01482036616653204,
+ "learning_rate": 2.3809523809523808e-06,
+ "loss": 11.0097,
+ "step": 12
+ },
+ {
+ "epoch": 0.016883116883116882,
+ "grad_norm": 0.014698008075356483,
+ "learning_rate": 2.5974025974025976e-06,
+ "loss": 11.0097,
+ "step": 13
+ },
+ {
+ "epoch": 0.01818181818181818,
+ "grad_norm": 0.014225203543901443,
+ "learning_rate": 2.813852813852814e-06,
+ "loss": 11.0097,
+ "step": 14
+ },
+ {
+ "epoch": 0.01948051948051948,
+ "grad_norm": 0.016268573701381683,
+ "learning_rate": 3.0303030303030305e-06,
+ "loss": 11.0096,
+ "step": 15
+ },
+ {
+ "epoch": 0.02077922077922078,
+ "grad_norm": 0.017191331833600998,
+ "learning_rate": 3.2467532467532465e-06,
+ "loss": 11.0095,
+ "step": 16
+ },
+ {
+ "epoch": 0.02207792207792208,
+ "grad_norm": 0.016200311481952667,
+ "learning_rate": 3.4632034632034634e-06,
+ "loss": 11.0096,
+ "step": 17
+ },
+ {
+ "epoch": 0.023376623376623377,
+ "grad_norm": 0.019106043502688408,
+ "learning_rate": 3.67965367965368e-06,
+ "loss": 11.0094,
+ "step": 18
+ },
+ {
+ "epoch": 0.024675324675324677,
+ "grad_norm": 0.017216097563505173,
+ "learning_rate": 3.896103896103896e-06,
+ "loss": 11.0095,
+ "step": 19
+ },
+ {
+ "epoch": 0.025974025974025976,
+ "grad_norm": 0.020744245499372482,
+ "learning_rate": 4.112554112554113e-06,
+ "loss": 11.0093,
+ "step": 20
+ },
+ {
+ "epoch": 0.02727272727272727,
+ "grad_norm": 0.02340623177587986,
+ "learning_rate": 4.329004329004329e-06,
+ "loss": 11.0091,
+ "step": 21
+ },
+ {
+ "epoch": 0.02857142857142857,
+ "grad_norm": 0.023833388462662697,
+ "learning_rate": 4.5454545454545455e-06,
+ "loss": 11.0091,
+ "step": 22
+ },
+ {
+ "epoch": 0.02987012987012987,
+ "grad_norm": 0.027158446609973907,
+ "learning_rate": 4.7619047619047615e-06,
+ "loss": 11.0089,
+ "step": 23
+ },
+ {
+ "epoch": 0.03116883116883117,
+ "grad_norm": 0.029171699658036232,
+ "learning_rate": 4.978354978354978e-06,
+ "loss": 11.0088,
+ "step": 24
+ },
+ {
+ "epoch": 0.032467532467532464,
+ "grad_norm": 0.03013516403734684,
+ "learning_rate": 5.194805194805195e-06,
+ "loss": 11.0087,
+ "step": 25
+ },
+ {
+ "epoch": 0.033766233766233764,
+ "grad_norm": 0.035284992307424545,
+ "learning_rate": 5.411255411255411e-06,
+ "loss": 11.0083,
+ "step": 26
+ },
+ {
+ "epoch": 0.03506493506493506,
+ "grad_norm": 0.03512846305966377,
+ "learning_rate": 5.627705627705628e-06,
+ "loss": 11.0083,
+ "step": 27
+ },
+ {
+ "epoch": 0.03636363636363636,
+ "grad_norm": 0.044411927461624146,
+ "learning_rate": 5.844155844155844e-06,
+ "loss": 11.0077,
+ "step": 28
+ },
+ {
+ "epoch": 0.03766233766233766,
+ "grad_norm": 0.04630338028073311,
+ "learning_rate": 6.060606060606061e-06,
+ "loss": 11.0075,
+ "step": 29
+ },
+ {
+ "epoch": 0.03896103896103896,
+ "grad_norm": 0.05681515112519264,
+ "learning_rate": 6.277056277056277e-06,
+ "loss": 11.0053,
+ "step": 30
+ },
+ {
+ "epoch": 0.04025974025974026,
+ "grad_norm": 0.05574386194348335,
+ "learning_rate": 6.493506493506493e-06,
+ "loss": 11.007,
+ "step": 31
+ },
+ {
+ "epoch": 0.04155844155844156,
+ "grad_norm": 0.07001659274101257,
+ "learning_rate": 6.709956709956711e-06,
+ "loss": 11.006,
+ "step": 32
+ },
+ {
+ "epoch": 0.04285714285714286,
+ "grad_norm": 0.07613399624824524,
+ "learning_rate": 6.926406926406927e-06,
+ "loss": 11.0056,
+ "step": 33
+ },
+ {
+ "epoch": 0.04415584415584416,
+ "grad_norm": 0.0930153876543045,
+ "learning_rate": 7.142857142857143e-06,
+ "loss": 11.0045,
+ "step": 34
+ },
+ {
+ "epoch": 0.045454545454545456,
+ "grad_norm": 0.11075685918331146,
+ "learning_rate": 7.35930735930736e-06,
+ "loss": 11.0035,
+ "step": 35
+ },
+ {
+ "epoch": 0.046753246753246755,
+ "grad_norm": 0.1421954482793808,
+ "learning_rate": 7.5757575757575764e-06,
+ "loss": 11.0017,
+ "step": 36
+ },
+ {
+ "epoch": 0.048051948051948054,
+ "grad_norm": 0.165273517370224,
+ "learning_rate": 7.792207792207792e-06,
+ "loss": 11.0001,
+ "step": 37
+ },
+ {
+ "epoch": 0.04935064935064935,
+ "grad_norm": 0.2073814570903778,
+ "learning_rate": 8.008658008658008e-06,
+ "loss": 10.9974,
+ "step": 38
+ },
+ {
+ "epoch": 0.05064935064935065,
+ "grad_norm": 0.240090012550354,
+ "learning_rate": 8.225108225108225e-06,
+ "loss": 10.9955,
+ "step": 39
+ },
+ {
+ "epoch": 0.05194805194805195,
+ "grad_norm": 0.2896934747695923,
+ "learning_rate": 8.441558441558442e-06,
+ "loss": 10.9926,
+ "step": 40
+ },
+ {
+ "epoch": 0.053246753246753244,
+ "grad_norm": 0.3400895595550537,
+ "learning_rate": 8.658008658008657e-06,
+ "loss": 10.9886,
+ "step": 41
+ },
+ {
+ "epoch": 0.05454545454545454,
+ "grad_norm": 0.4271922707557678,
+ "learning_rate": 8.874458874458876e-06,
+ "loss": 10.983,
+ "step": 42
+ },
+ {
+ "epoch": 0.05584415584415584,
+ "grad_norm": 0.5322330594062805,
+ "learning_rate": 9.090909090909091e-06,
+ "loss": 10.977,
+ "step": 43
+ },
+ {
+ "epoch": 0.05714285714285714,
+ "grad_norm": 0.6354700922966003,
+ "learning_rate": 9.307359307359308e-06,
+ "loss": 10.9703,
+ "step": 44
+ },
+ {
+ "epoch": 0.05844155844155844,
+ "grad_norm": 0.8793215155601501,
+ "learning_rate": 9.523809523809523e-06,
+ "loss": 10.9547,
+ "step": 45
+ },
+ {
+ "epoch": 0.05974025974025974,
+ "grad_norm": 1.008970856666565,
+ "learning_rate": 9.740259740259742e-06,
+ "loss": 10.9435,
+ "step": 46
+ },
+ {
+ "epoch": 0.06103896103896104,
+ "grad_norm": 1.2915687561035156,
+ "learning_rate": 9.956709956709957e-06,
+ "loss": 10.9263,
+ "step": 47
+ },
+ {
+ "epoch": 0.06233766233766234,
+ "grad_norm": 1.5786389112472534,
+ "learning_rate": 1.0173160173160174e-05,
+ "loss": 10.9014,
+ "step": 48
+ },
+ {
+ "epoch": 0.06363636363636363,
+ "grad_norm": 2.4445056915283203,
+ "learning_rate": 1.038961038961039e-05,
+ "loss": 10.8546,
+ "step": 49
+ },
+ {
+ "epoch": 0.06493506493506493,
+ "grad_norm": 2.5852527618408203,
+ "learning_rate": 1.0606060606060607e-05,
+ "loss": 10.8134,
+ "step": 50
+ },
+ {
+ "epoch": 0.06623376623376623,
+ "grad_norm": 2.70867919921875,
+ "learning_rate": 1.0822510822510823e-05,
+ "loss": 10.7805,
+ "step": 51
+ },
+ {
+ "epoch": 0.06753246753246753,
+ "grad_norm": 2.796766757965088,
+ "learning_rate": 1.103896103896104e-05,
+ "loss": 10.7495,
+ "step": 52
+ },
+ {
+ "epoch": 0.06883116883116883,
+ "grad_norm": 2.8984150886535645,
+ "learning_rate": 1.1255411255411256e-05,
+ "loss": 10.6997,
+ "step": 53
+ },
+ {
+ "epoch": 0.07012987012987013,
+ "grad_norm": 3.7665364742279053,
+ "learning_rate": 1.1471861471861473e-05,
+ "loss": 10.5774,
+ "step": 54
+ },
+ {
+ "epoch": 0.07142857142857142,
+ "grad_norm": 4.654474258422852,
+ "learning_rate": 1.1688311688311688e-05,
+ "loss": 10.5443,
+ "step": 55
+ },
+ {
+ "epoch": 0.07272727272727272,
+ "grad_norm": 5.21140718460083,
+ "learning_rate": 1.1904761904761905e-05,
+ "loss": 10.4336,
+ "step": 56
+ },
+ {
+ "epoch": 0.07402597402597402,
+ "grad_norm": 5.744906425476074,
+ "learning_rate": 1.2121212121212122e-05,
+ "loss": 10.3595,
+ "step": 57
+ },
+ {
+ "epoch": 0.07532467532467532,
+ "grad_norm": 7.051860332489014,
+ "learning_rate": 1.2337662337662339e-05,
+ "loss": 10.2175,
+ "step": 58
+ },
+ {
+ "epoch": 0.07662337662337662,
+ "grad_norm": 11.466012954711914,
+ "learning_rate": 1.2554112554112554e-05,
+ "loss": 10.667,
+ "step": 59
+ },
+ {
+ "epoch": 0.07792207792207792,
+ "grad_norm": 7.726934432983398,
+ "learning_rate": 1.2770562770562773e-05,
+ "loss": 10.0476,
+ "step": 60
+ },
+ {
+ "epoch": 0.07922077922077922,
+ "grad_norm": 8.461792945861816,
+ "learning_rate": 1.2987012987012986e-05,
+ "loss": 10.0367,
+ "step": 61
+ },
+ {
+ "epoch": 0.08051948051948052,
+ "grad_norm": 9.503273010253906,
+ "learning_rate": 1.3203463203463205e-05,
+ "loss": 10.0531,
+ "step": 62
+ },
+ {
+ "epoch": 0.08181818181818182,
+ "grad_norm": 9.650754928588867,
+ "learning_rate": 1.3419913419913421e-05,
+ "loss": 9.7963,
+ "step": 63
+ },
+ {
+ "epoch": 0.08311688311688312,
+ "grad_norm": 11.210810661315918,
+ "learning_rate": 1.3636363636363637e-05,
+ "loss": 9.4349,
+ "step": 64
+ },
+ {
+ "epoch": 0.08441558441558442,
+ "grad_norm": 11.565103530883789,
+ "learning_rate": 1.3852813852813853e-05,
+ "loss": 9.5733,
+ "step": 65
+ },
+ {
+ "epoch": 0.08571428571428572,
+ "grad_norm": 13.807746887207031,
+ "learning_rate": 1.406926406926407e-05,
+ "loss": 9.0604,
+ "step": 66
+ },
+ {
+ "epoch": 0.08701298701298701,
+ "grad_norm": 14.853765487670898,
+ "learning_rate": 1.4285714285714285e-05,
+ "loss": 8.7291,
+ "step": 67
+ },
+ {
+ "epoch": 0.08831168831168831,
+ "grad_norm": 15.971939086914062,
+ "learning_rate": 1.4502164502164502e-05,
+ "loss": 8.38,
+ "step": 68
+ },
+ {
+ "epoch": 0.08961038961038961,
+ "grad_norm": 16.851449966430664,
+ "learning_rate": 1.471861471861472e-05,
+ "loss": 8.2196,
+ "step": 69
+ },
+ {
+ "epoch": 0.09090909090909091,
+ "grad_norm": 18.741369247436523,
+ "learning_rate": 1.4935064935064936e-05,
+ "loss": 7.8955,
+ "step": 70
+ },
+ {
+ "epoch": 0.09220779220779221,
+ "grad_norm": 18.92623519897461,
+ "learning_rate": 1.5151515151515153e-05,
+ "loss": 7.2905,
+ "step": 71
+ },
+ {
+ "epoch": 0.09350649350649351,
+ "grad_norm": 20.634117126464844,
+ "learning_rate": 1.5367965367965366e-05,
+ "loss": 7.3614,
+ "step": 72
+ },
+ {
+ "epoch": 0.09480519480519481,
+ "grad_norm": 20.857202529907227,
+ "learning_rate": 1.5584415584415583e-05,
+ "loss": 6.6031,
+ "step": 73
+ },
+ {
+ "epoch": 0.09610389610389611,
+ "grad_norm": 22.039953231811523,
+ "learning_rate": 1.5800865800865803e-05,
+ "loss": 5.9224,
+ "step": 74
+ },
+ {
+ "epoch": 0.09740259740259741,
+ "grad_norm": 22.81475257873535,
+ "learning_rate": 1.6017316017316017e-05,
+ "loss": 5.7627,
+ "step": 75
+ },
+ {
+ "epoch": 0.0987012987012987,
+ "grad_norm": 22.588449478149414,
+ "learning_rate": 1.6233766233766234e-05,
+ "loss": 5.2194,
+ "step": 76
+ },
+ {
+ "epoch": 0.1,
+ "grad_norm": 24.180652618408203,
+ "learning_rate": 1.645021645021645e-05,
+ "loss": 5.3339,
+ "step": 77
+ },
+ {
+ "epoch": 0.1,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9375,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9375,
+ "eval_global_dataset_loss": 1.779486060142517,
+ "eval_global_dataset_runtime": 113.4191,
+ "eval_global_dataset_samples_per_second": 9.443,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.9140625,
+ "eval_sts-test-1024_pearson_cosine": 0.8570094674494524,
+ "eval_sts-test-1024_spearman_cosine": 0.9032869635214775,
+ "eval_sts-test-128_pearson_cosine": 0.8454774972598778,
+ "eval_sts-test-128_spearman_cosine": 0.8941657316165149,
+ "eval_sts-test-256_pearson_cosine": 0.846547357013354,
+ "eval_sts-test-256_spearman_cosine": 0.8961152942356522,
+ "eval_sts-test-32_pearson_cosine": 0.8158296423835969,
+ "eval_sts-test-32_spearman_cosine": 0.8781449456856982,
+ "eval_sts-test-512_pearson_cosine": 0.8548761563192739,
+ "eval_sts-test-512_spearman_cosine": 0.9013753538201782,
+ "eval_sts-test-64_pearson_cosine": 0.8326302932611978,
+ "eval_sts-test-64_spearman_cosine": 0.8899336232311702,
+ "eval_sts-test_pearson_cosine": 0.8635863698279433,
+ "eval_sts-test_spearman_cosine": 0.906139519148046,
+ "step": 77
+ },
+ {
+ "epoch": 0.1012987012987013,
+ "grad_norm": 25.592409133911133,
+ "learning_rate": 1.6666666666666667e-05,
+ "loss": 5.0749,
+ "step": 78
+ },
+ {
+ "epoch": 0.1025974025974026,
+ "grad_norm": 25.978023529052734,
+ "learning_rate": 1.6883116883116884e-05,
+ "loss": 4.7881,
+ "step": 79
+ },
+ {
+ "epoch": 0.1038961038961039,
+ "grad_norm": 29.861833572387695,
+ "learning_rate": 1.70995670995671e-05,
+ "loss": 4.2653,
+ "step": 80
+ },
+ {
+ "epoch": 0.10519480519480519,
+ "grad_norm": 28.283815383911133,
+ "learning_rate": 1.7316017316017315e-05,
+ "loss": 4.0863,
+ "step": 81
+ },
+ {
+ "epoch": 0.10649350649350649,
+ "grad_norm": 29.428701400756836,
+ "learning_rate": 1.7532467532467535e-05,
+ "loss": 4.2845,
+ "step": 82
+ },
+ {
+ "epoch": 0.10779220779220779,
+ "grad_norm": 27.637096405029297,
+ "learning_rate": 1.7748917748917752e-05,
+ "loss": 3.8356,
+ "step": 83
+ },
+ {
+ "epoch": 0.10909090909090909,
+ "grad_norm": 28.08953857421875,
+ "learning_rate": 1.7965367965367965e-05,
+ "loss": 3.5652,
+ "step": 84
+ },
+ {
+ "epoch": 0.11038961038961038,
+ "grad_norm": 28.57408332824707,
+ "learning_rate": 1.8181818181818182e-05,
+ "loss": 4.1484,
+ "step": 85
+ },
+ {
+ "epoch": 0.11168831168831168,
+ "grad_norm": 26.506061553955078,
+ "learning_rate": 1.83982683982684e-05,
+ "loss": 3.4415,
+ "step": 86
+ },
+ {
+ "epoch": 0.11298701298701298,
+ "grad_norm": 26.066709518432617,
+ "learning_rate": 1.8614718614718616e-05,
+ "loss": 3.129,
+ "step": 87
+ },
+ {
+ "epoch": 0.11428571428571428,
+ "grad_norm": 27.7386531829834,
+ "learning_rate": 1.8831168831168833e-05,
+ "loss": 3.5604,
+ "step": 88
+ },
+ {
+ "epoch": 0.11558441558441558,
+ "grad_norm": 26.726911544799805,
+ "learning_rate": 1.9047619047619046e-05,
+ "loss": 3.6036,
+ "step": 89
+ },
+ {
+ "epoch": 0.11688311688311688,
+ "grad_norm": 22.78415870666504,
+ "learning_rate": 1.9264069264069266e-05,
+ "loss": 2.8276,
+ "step": 90
+ },
+ {
+ "epoch": 0.11818181818181818,
+ "grad_norm": 23.38465118408203,
+ "learning_rate": 1.9480519480519483e-05,
+ "loss": 3.4752,
+ "step": 91
+ },
+ {
+ "epoch": 0.11948051948051948,
+ "grad_norm": 24.04189109802246,
+ "learning_rate": 1.9696969696969697e-05,
+ "loss": 3.3796,
+ "step": 92
+ },
+ {
+ "epoch": 0.12077922077922078,
+ "grad_norm": 20.70046043395996,
+ "learning_rate": 1.9913419913419914e-05,
+ "loss": 2.7567,
+ "step": 93
+ },
+ {
+ "epoch": 0.12207792207792208,
+ "grad_norm": 22.61043930053711,
+ "learning_rate": 2.012987012987013e-05,
+ "loss": 3.2728,
+ "step": 94
+ },
+ {
+ "epoch": 0.12337662337662338,
+ "grad_norm": 21.59564208984375,
+ "learning_rate": 2.0346320346320347e-05,
+ "loss": 2.8496,
+ "step": 95
+ },
+ {
+ "epoch": 0.12467532467532468,
+ "grad_norm": 19.719247817993164,
+ "learning_rate": 2.0562770562770564e-05,
+ "loss": 2.6072,
+ "step": 96
+ },
+ {
+ "epoch": 0.12597402597402596,
+ "grad_norm": 19.73863410949707,
+ "learning_rate": 2.077922077922078e-05,
+ "loss": 2.5959,
+ "step": 97
+ },
+ {
+ "epoch": 0.12727272727272726,
+ "grad_norm": 20.621767044067383,
+ "learning_rate": 2.0995670995670998e-05,
+ "loss": 2.4787,
+ "step": 98
+ },
+ {
+ "epoch": 0.12857142857142856,
+ "grad_norm": 18.53163719177246,
+ "learning_rate": 2.1212121212121215e-05,
+ "loss": 2.1368,
+ "step": 99
+ },
+ {
+ "epoch": 0.12987012987012986,
+ "grad_norm": 19.333633422851562,
+ "learning_rate": 2.1428571428571428e-05,
+ "loss": 2.233,
+ "step": 100
+ },
+ {
+ "epoch": 0.13116883116883116,
+ "grad_norm": 20.839101791381836,
+ "learning_rate": 2.1645021645021645e-05,
+ "loss": 2.4691,
+ "step": 101
+ },
+ {
+ "epoch": 0.13246753246753246,
+ "grad_norm": 20.799968719482422,
+ "learning_rate": 2.1861471861471862e-05,
+ "loss": 2.3472,
+ "step": 102
+ },
+ {
+ "epoch": 0.13376623376623376,
+ "grad_norm": 18.40387725830078,
+ "learning_rate": 2.207792207792208e-05,
+ "loss": 2.0578,
+ "step": 103
+ },
+ {
+ "epoch": 0.13506493506493505,
+ "grad_norm": 19.460140228271484,
+ "learning_rate": 2.2294372294372296e-05,
+ "loss": 2.3437,
+ "step": 104
+ },
+ {
+ "epoch": 0.13636363636363635,
+ "grad_norm": 20.304363250732422,
+ "learning_rate": 2.2510822510822512e-05,
+ "loss": 2.4822,
+ "step": 105
+ },
+ {
+ "epoch": 0.13766233766233765,
+ "grad_norm": 19.452438354492188,
+ "learning_rate": 2.272727272727273e-05,
+ "loss": 2.2261,
+ "step": 106
+ },
+ {
+ "epoch": 0.13896103896103895,
+ "grad_norm": 18.387685775756836,
+ "learning_rate": 2.2943722943722946e-05,
+ "loss": 2.3113,
+ "step": 107
+ },
+ {
+ "epoch": 0.14025974025974025,
+ "grad_norm": 19.084980010986328,
+ "learning_rate": 2.3160173160173163e-05,
+ "loss": 2.2162,
+ "step": 108
+ },
+ {
+ "epoch": 0.14155844155844155,
+ "grad_norm": 18.559663772583008,
+ "learning_rate": 2.3376623376623376e-05,
+ "loss": 2.0638,
+ "step": 109
+ },
+ {
+ "epoch": 0.14285714285714285,
+ "grad_norm": 17.65582847595215,
+ "learning_rate": 2.3593073593073593e-05,
+ "loss": 1.9822,
+ "step": 110
+ },
+ {
+ "epoch": 0.14415584415584415,
+ "grad_norm": 17.26728057861328,
+ "learning_rate": 2.380952380952381e-05,
+ "loss": 1.9165,
+ "step": 111
+ },
+ {
+ "epoch": 0.14545454545454545,
+ "grad_norm": 17.52690887451172,
+ "learning_rate": 2.4025974025974027e-05,
+ "loss": 2.0633,
+ "step": 112
+ },
+ {
+ "epoch": 0.14675324675324675,
+ "grad_norm": 17.66120147705078,
+ "learning_rate": 2.4242424242424244e-05,
+ "loss": 1.9012,
+ "step": 113
+ },
+ {
+ "epoch": 0.14805194805194805,
+ "grad_norm": 17.794565200805664,
+ "learning_rate": 2.4458874458874457e-05,
+ "loss": 1.8559,
+ "step": 114
+ },
+ {
+ "epoch": 0.14935064935064934,
+ "grad_norm": 18.78278923034668,
+ "learning_rate": 2.4675324675324678e-05,
+ "loss": 2.1804,
+ "step": 115
+ },
+ {
+ "epoch": 0.15064935064935064,
+ "grad_norm": 17.93096351623535,
+ "learning_rate": 2.4891774891774894e-05,
+ "loss": 1.9728,
+ "step": 116
+ },
+ {
+ "epoch": 0.15194805194805194,
+ "grad_norm": 16.865169525146484,
+ "learning_rate": 2.5108225108225108e-05,
+ "loss": 1.795,
+ "step": 117
+ },
+ {
+ "epoch": 0.15324675324675324,
+ "grad_norm": 16.265134811401367,
+ "learning_rate": 2.5324675324675325e-05,
+ "loss": 1.5135,
+ "step": 118
+ },
+ {
+ "epoch": 0.15454545454545454,
+ "grad_norm": 18.573184967041016,
+ "learning_rate": 2.5541125541125545e-05,
+ "loss": 1.8776,
+ "step": 119
+ },
+ {
+ "epoch": 0.15584415584415584,
+ "grad_norm": 18.620136260986328,
+ "learning_rate": 2.575757575757576e-05,
+ "loss": 1.9858,
+ "step": 120
+ },
+ {
+ "epoch": 0.15714285714285714,
+ "grad_norm": 17.044523239135742,
+ "learning_rate": 2.5974025974025972e-05,
+ "loss": 1.6143,
+ "step": 121
+ },
+ {
+ "epoch": 0.15844155844155844,
+ "grad_norm": 17.63654136657715,
+ "learning_rate": 2.6190476190476192e-05,
+ "loss": 1.7321,
+ "step": 122
+ },
+ {
+ "epoch": 0.15974025974025974,
+ "grad_norm": 17.7145938873291,
+ "learning_rate": 2.640692640692641e-05,
+ "loss": 1.7272,
+ "step": 123
+ },
+ {
+ "epoch": 0.16103896103896104,
+ "grad_norm": 16.036033630371094,
+ "learning_rate": 2.6623376623376623e-05,
+ "loss": 1.5508,
+ "step": 124
+ },
+ {
+ "epoch": 0.16233766233766234,
+ "grad_norm": 16.252811431884766,
+ "learning_rate": 2.6839826839826843e-05,
+ "loss": 1.2961,
+ "step": 125
+ },
+ {
+ "epoch": 0.16363636363636364,
+ "grad_norm": 17.2477970123291,
+ "learning_rate": 2.7056277056277056e-05,
+ "loss": 1.6425,
+ "step": 126
+ },
+ {
+ "epoch": 0.16493506493506493,
+ "grad_norm": 16.966596603393555,
+ "learning_rate": 2.7272727272727273e-05,
+ "loss": 1.5193,
+ "step": 127
+ },
+ {
+ "epoch": 0.16623376623376623,
+ "grad_norm": 16.886764526367188,
+ "learning_rate": 2.7489177489177493e-05,
+ "loss": 1.6626,
+ "step": 128
+ },
+ {
+ "epoch": 0.16753246753246753,
+ "grad_norm": 18.556442260742188,
+ "learning_rate": 2.7705627705627707e-05,
+ "loss": 2.0871,
+ "step": 129
+ },
+ {
+ "epoch": 0.16883116883116883,
+ "grad_norm": 15.574275970458984,
+ "learning_rate": 2.792207792207792e-05,
+ "loss": 1.5114,
+ "step": 130
+ },
+ {
+ "epoch": 0.17012987012987013,
+ "grad_norm": 16.09569549560547,
+ "learning_rate": 2.813852813852814e-05,
+ "loss": 1.4414,
+ "step": 131
+ },
+ {
+ "epoch": 0.17142857142857143,
+ "grad_norm": 16.894790649414062,
+ "learning_rate": 2.8354978354978357e-05,
+ "loss": 1.5323,
+ "step": 132
+ },
+ {
+ "epoch": 0.17272727272727273,
+ "grad_norm": 14.77379035949707,
+ "learning_rate": 2.857142857142857e-05,
+ "loss": 1.2476,
+ "step": 133
+ },
+ {
+ "epoch": 0.17402597402597403,
+ "grad_norm": 16.156719207763672,
+ "learning_rate": 2.878787878787879e-05,
+ "loss": 1.4671,
+ "step": 134
+ },
+ {
+ "epoch": 0.17532467532467533,
+ "grad_norm": 16.27682113647461,
+ "learning_rate": 2.9004329004329005e-05,
+ "loss": 1.5581,
+ "step": 135
+ },
+ {
+ "epoch": 0.17662337662337663,
+ "grad_norm": 16.400676727294922,
+ "learning_rate": 2.922077922077922e-05,
+ "loss": 1.5237,
+ "step": 136
+ },
+ {
+ "epoch": 0.17792207792207793,
+ "grad_norm": 15.038251876831055,
+ "learning_rate": 2.943722943722944e-05,
+ "loss": 1.2613,
+ "step": 137
+ },
+ {
+ "epoch": 0.17922077922077922,
+ "grad_norm": 15.061074256896973,
+ "learning_rate": 2.9653679653679655e-05,
+ "loss": 1.2949,
+ "step": 138
+ },
+ {
+ "epoch": 0.18051948051948052,
+ "grad_norm": 15.902937889099121,
+ "learning_rate": 2.9870129870129872e-05,
+ "loss": 1.3592,
+ "step": 139
+ },
+ {
+ "epoch": 0.18181818181818182,
+ "grad_norm": 15.730782508850098,
+ "learning_rate": 3.0086580086580092e-05,
+ "loss": 1.3918,
+ "step": 140
+ },
+ {
+ "epoch": 0.18311688311688312,
+ "grad_norm": 13.916067123413086,
+ "learning_rate": 3.0303030303030306e-05,
+ "loss": 1.0371,
+ "step": 141
+ },
+ {
+ "epoch": 0.18441558441558442,
+ "grad_norm": 16.503026962280273,
+ "learning_rate": 3.051948051948052e-05,
+ "loss": 1.5028,
+ "step": 142
+ },
+ {
+ "epoch": 0.18571428571428572,
+ "grad_norm": 14.891773223876953,
+ "learning_rate": 3.073593073593073e-05,
+ "loss": 1.2347,
+ "step": 143
+ },
+ {
+ "epoch": 0.18701298701298702,
+ "grad_norm": 14.444226264953613,
+ "learning_rate": 3.095238095238095e-05,
+ "loss": 1.2731,
+ "step": 144
+ },
+ {
+ "epoch": 0.18831168831168832,
+ "grad_norm": 14.655235290527344,
+ "learning_rate": 3.1168831168831166e-05,
+ "loss": 1.2688,
+ "step": 145
+ },
+ {
+ "epoch": 0.18961038961038962,
+ "grad_norm": 15.224197387695312,
+ "learning_rate": 3.1385281385281387e-05,
+ "loss": 1.3688,
+ "step": 146
+ },
+ {
+ "epoch": 0.19090909090909092,
+ "grad_norm": 17.209598541259766,
+ "learning_rate": 3.160173160173161e-05,
+ "loss": 1.8782,
+ "step": 147
+ },
+ {
+ "epoch": 0.19220779220779222,
+ "grad_norm": 12.981212615966797,
+ "learning_rate": 3.181818181818182e-05,
+ "loss": 1.0597,
+ "step": 148
+ },
+ {
+ "epoch": 0.19350649350649352,
+ "grad_norm": 14.368124961853027,
+ "learning_rate": 3.2034632034632034e-05,
+ "loss": 1.229,
+ "step": 149
+ },
+ {
+ "epoch": 0.19480519480519481,
+ "grad_norm": 13.885086059570312,
+ "learning_rate": 3.2251082251082254e-05,
+ "loss": 1.1233,
+ "step": 150
+ },
+ {
+ "epoch": 0.1961038961038961,
+ "grad_norm": 14.75069522857666,
+ "learning_rate": 3.246753246753247e-05,
+ "loss": 1.2579,
+ "step": 151
+ },
+ {
+ "epoch": 0.1974025974025974,
+ "grad_norm": 14.663084030151367,
+ "learning_rate": 3.268398268398268e-05,
+ "loss": 1.1547,
+ "step": 152
+ },
+ {
+ "epoch": 0.1987012987012987,
+ "grad_norm": 16.447593688964844,
+ "learning_rate": 3.29004329004329e-05,
+ "loss": 1.3986,
+ "step": 153
+ },
+ {
+ "epoch": 0.2,
+ "grad_norm": 16.009765625,
+ "learning_rate": 3.311688311688312e-05,
+ "loss": 1.3071,
+ "step": 154
+ },
+ {
+ "epoch": 0.2,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9375,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.921875,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.8410314917564392,
+ "eval_global_dataset_runtime": 113.466,
+ "eval_global_dataset_samples_per_second": 9.439,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.9140625,
+ "eval_sts-test-1024_pearson_cosine": 0.8637686212043777,
+ "eval_sts-test-1024_spearman_cosine": 0.9158389959804479,
+ "eval_sts-test-128_pearson_cosine": 0.8587735042324499,
+ "eval_sts-test-128_spearman_cosine": 0.9079734327816954,
+ "eval_sts-test-256_pearson_cosine": 0.8614854438059253,
+ "eval_sts-test-256_spearman_cosine": 0.9113397849846575,
+ "eval_sts-test-32_pearson_cosine": 0.8204607604412608,
+ "eval_sts-test-32_spearman_cosine": 0.8866968838230891,
+ "eval_sts-test-512_pearson_cosine": 0.8636960327159487,
+ "eval_sts-test-512_spearman_cosine": 0.9151565595521514,
+ "eval_sts-test-64_pearson_cosine": 0.8415761749347184,
+ "eval_sts-test-64_spearman_cosine": 0.9004612613745113,
+ "eval_sts-test_pearson_cosine": 0.8701225080029533,
+ "eval_sts-test_spearman_cosine": 0.9182271654330917,
+ "step": 154
+ },
+ {
+ "epoch": 0.2012987012987013,
+ "grad_norm": 16.777559280395508,
+ "learning_rate": 3.3333333333333335e-05,
+ "loss": 1.5306,
+ "step": 155
+ },
+ {
+ "epoch": 0.2025974025974026,
+ "grad_norm": 14.096006393432617,
+ "learning_rate": 3.3549783549783555e-05,
+ "loss": 1.0775,
+ "step": 156
+ },
+ {
+ "epoch": 0.2038961038961039,
+ "grad_norm": 15.965473175048828,
+ "learning_rate": 3.376623376623377e-05,
+ "loss": 1.2954,
+ "step": 157
+ },
+ {
+ "epoch": 0.2051948051948052,
+ "grad_norm": 13.835360527038574,
+ "learning_rate": 3.398268398268398e-05,
+ "loss": 1.1233,
+ "step": 158
+ },
+ {
+ "epoch": 0.2064935064935065,
+ "grad_norm": 14.972121238708496,
+ "learning_rate": 3.41991341991342e-05,
+ "loss": 1.2492,
+ "step": 159
+ },
+ {
+ "epoch": 0.2077922077922078,
+ "grad_norm": 14.487752914428711,
+ "learning_rate": 3.4415584415584416e-05,
+ "loss": 1.2897,
+ "step": 160
+ },
+ {
+ "epoch": 0.20909090909090908,
+ "grad_norm": 13.836915016174316,
+ "learning_rate": 3.463203463203463e-05,
+ "loss": 1.0752,
+ "step": 161
+ },
+ {
+ "epoch": 0.21038961038961038,
+ "grad_norm": 15.030878067016602,
+ "learning_rate": 3.484848484848485e-05,
+ "loss": 1.269,
+ "step": 162
+ },
+ {
+ "epoch": 0.21168831168831168,
+ "grad_norm": 14.309725761413574,
+ "learning_rate": 3.506493506493507e-05,
+ "loss": 1.1273,
+ "step": 163
+ },
+ {
+ "epoch": 0.21298701298701297,
+ "grad_norm": 14.790210723876953,
+ "learning_rate": 3.528138528138528e-05,
+ "loss": 1.2208,
+ "step": 164
+ },
+ {
+ "epoch": 0.21428571428571427,
+ "grad_norm": 14.586909294128418,
+ "learning_rate": 3.5497835497835503e-05,
+ "loss": 1.2356,
+ "step": 165
+ },
+ {
+ "epoch": 0.21558441558441557,
+ "grad_norm": 13.816767692565918,
+ "learning_rate": 3.571428571428572e-05,
+ "loss": 1.1039,
+ "step": 166
+ },
+ {
+ "epoch": 0.21688311688311687,
+ "grad_norm": 13.635117530822754,
+ "learning_rate": 3.593073593073593e-05,
+ "loss": 1.0146,
+ "step": 167
+ },
+ {
+ "epoch": 0.21818181818181817,
+ "grad_norm": 14.119977951049805,
+ "learning_rate": 3.6147186147186144e-05,
+ "loss": 1.3097,
+ "step": 168
+ },
+ {
+ "epoch": 0.21948051948051947,
+ "grad_norm": 14.68720531463623,
+ "learning_rate": 3.6363636363636364e-05,
+ "loss": 1.2947,
+ "step": 169
+ },
+ {
+ "epoch": 0.22077922077922077,
+ "grad_norm": 14.55096435546875,
+ "learning_rate": 3.6580086580086584e-05,
+ "loss": 1.291,
+ "step": 170
+ },
+ {
+ "epoch": 0.22207792207792207,
+ "grad_norm": 13.37848949432373,
+ "learning_rate": 3.67965367965368e-05,
+ "loss": 1.0295,
+ "step": 171
+ },
+ {
+ "epoch": 0.22337662337662337,
+ "grad_norm": 12.699470520019531,
+ "learning_rate": 3.701298701298702e-05,
+ "loss": 1.0948,
+ "step": 172
+ },
+ {
+ "epoch": 0.22467532467532467,
+ "grad_norm": 13.583250999450684,
+ "learning_rate": 3.722943722943723e-05,
+ "loss": 1.1393,
+ "step": 173
+ },
+ {
+ "epoch": 0.22597402597402597,
+ "grad_norm": 12.980260848999023,
+ "learning_rate": 3.7445887445887445e-05,
+ "loss": 1.0063,
+ "step": 174
+ },
+ {
+ "epoch": 0.22727272727272727,
+ "grad_norm": 14.338798522949219,
+ "learning_rate": 3.7662337662337665e-05,
+ "loss": 1.1769,
+ "step": 175
+ },
+ {
+ "epoch": 0.22857142857142856,
+ "grad_norm": 14.311129570007324,
+ "learning_rate": 3.787878787878788e-05,
+ "loss": 1.1731,
+ "step": 176
+ },
+ {
+ "epoch": 0.22987012987012986,
+ "grad_norm": 12.44087028503418,
+ "learning_rate": 3.809523809523809e-05,
+ "loss": 0.7476,
+ "step": 177
+ },
+ {
+ "epoch": 0.23116883116883116,
+ "grad_norm": 14.917692184448242,
+ "learning_rate": 3.831168831168831e-05,
+ "loss": 1.022,
+ "step": 178
+ },
+ {
+ "epoch": 0.23246753246753246,
+ "grad_norm": 13.485183715820312,
+ "learning_rate": 3.852813852813853e-05,
+ "loss": 0.9579,
+ "step": 179
+ },
+ {
+ "epoch": 0.23376623376623376,
+ "grad_norm": 13.994976997375488,
+ "learning_rate": 3.8744588744588746e-05,
+ "loss": 1.0753,
+ "step": 180
+ },
+ {
+ "epoch": 0.23506493506493506,
+ "grad_norm": 15.207565307617188,
+ "learning_rate": 3.8961038961038966e-05,
+ "loss": 1.2243,
+ "step": 181
+ },
+ {
+ "epoch": 0.23636363636363636,
+ "grad_norm": 15.602945327758789,
+ "learning_rate": 3.917748917748918e-05,
+ "loss": 1.2154,
+ "step": 182
+ },
+ {
+ "epoch": 0.23766233766233766,
+ "grad_norm": 12.87066650390625,
+ "learning_rate": 3.939393939393939e-05,
+ "loss": 0.8147,
+ "step": 183
+ },
+ {
+ "epoch": 0.23896103896103896,
+ "grad_norm": 14.42218017578125,
+ "learning_rate": 3.9610389610389614e-05,
+ "loss": 1.1086,
+ "step": 184
+ },
+ {
+ "epoch": 0.24025974025974026,
+ "grad_norm": 14.212447166442871,
+ "learning_rate": 3.982683982683983e-05,
+ "loss": 1.0155,
+ "step": 185
+ },
+ {
+ "epoch": 0.24155844155844156,
+ "grad_norm": 15.133934020996094,
+ "learning_rate": 4.004329004329004e-05,
+ "loss": 1.1898,
+ "step": 186
+ },
+ {
+ "epoch": 0.24285714285714285,
+ "grad_norm": 13.727578163146973,
+ "learning_rate": 4.025974025974026e-05,
+ "loss": 1.11,
+ "step": 187
+ },
+ {
+ "epoch": 0.24415584415584415,
+ "grad_norm": 15.756597518920898,
+ "learning_rate": 4.047619047619048e-05,
+ "loss": 1.3128,
+ "step": 188
+ },
+ {
+ "epoch": 0.24545454545454545,
+ "grad_norm": 13.591508865356445,
+ "learning_rate": 4.0692640692640695e-05,
+ "loss": 1.0642,
+ "step": 189
+ },
+ {
+ "epoch": 0.24675324675324675,
+ "grad_norm": 13.608804702758789,
+ "learning_rate": 4.0909090909090915e-05,
+ "loss": 0.8932,
+ "step": 190
+ },
+ {
+ "epoch": 0.24805194805194805,
+ "grad_norm": 13.768484115600586,
+ "learning_rate": 4.112554112554113e-05,
+ "loss": 1.1683,
+ "step": 191
+ },
+ {
+ "epoch": 0.24935064935064935,
+ "grad_norm": 13.267477989196777,
+ "learning_rate": 4.134199134199134e-05,
+ "loss": 1.0554,
+ "step": 192
+ },
+ {
+ "epoch": 0.2506493506493506,
+ "grad_norm": 14.142014503479004,
+ "learning_rate": 4.155844155844156e-05,
+ "loss": 1.2186,
+ "step": 193
+ },
+ {
+ "epoch": 0.2519480519480519,
+ "grad_norm": 13.442983627319336,
+ "learning_rate": 4.1774891774891775e-05,
+ "loss": 1.027,
+ "step": 194
+ },
+ {
+ "epoch": 0.2532467532467532,
+ "grad_norm": 12.284928321838379,
+ "learning_rate": 4.1991341991341996e-05,
+ "loss": 0.799,
+ "step": 195
+ },
+ {
+ "epoch": 0.2545454545454545,
+ "grad_norm": 14.540982246398926,
+ "learning_rate": 4.220779220779221e-05,
+ "loss": 1.099,
+ "step": 196
+ },
+ {
+ "epoch": 0.2558441558441558,
+ "grad_norm": 12.08164119720459,
+ "learning_rate": 4.242424242424243e-05,
+ "loss": 0.7717,
+ "step": 197
+ },
+ {
+ "epoch": 0.2571428571428571,
+ "grad_norm": 14.687990188598633,
+ "learning_rate": 4.264069264069264e-05,
+ "loss": 1.1011,
+ "step": 198
+ },
+ {
+ "epoch": 0.2584415584415584,
+ "grad_norm": 13.88438606262207,
+ "learning_rate": 4.2857142857142856e-05,
+ "loss": 1.0083,
+ "step": 199
+ },
+ {
+ "epoch": 0.2597402597402597,
+ "grad_norm": 13.972654342651367,
+ "learning_rate": 4.3073593073593077e-05,
+ "loss": 1.1488,
+ "step": 200
+ },
+ {
+ "epoch": 0.261038961038961,
+ "grad_norm": 14.187093734741211,
+ "learning_rate": 4.329004329004329e-05,
+ "loss": 1.0453,
+ "step": 201
+ },
+ {
+ "epoch": 0.2623376623376623,
+ "grad_norm": 14.916047096252441,
+ "learning_rate": 4.3506493506493503e-05,
+ "loss": 1.2942,
+ "step": 202
+ },
+ {
+ "epoch": 0.2636363636363636,
+ "grad_norm": 13.239340782165527,
+ "learning_rate": 4.3722943722943724e-05,
+ "loss": 1.0279,
+ "step": 203
+ },
+ {
+ "epoch": 0.2649350649350649,
+ "grad_norm": 12.49507999420166,
+ "learning_rate": 4.3939393939393944e-05,
+ "loss": 0.9493,
+ "step": 204
+ },
+ {
+ "epoch": 0.2662337662337662,
+ "grad_norm": 14.923321723937988,
+ "learning_rate": 4.415584415584416e-05,
+ "loss": 1.2698,
+ "step": 205
+ },
+ {
+ "epoch": 0.2675324675324675,
+ "grad_norm": 13.260164260864258,
+ "learning_rate": 4.437229437229438e-05,
+ "loss": 1.066,
+ "step": 206
+ },
+ {
+ "epoch": 0.2688311688311688,
+ "grad_norm": 13.298815727233887,
+ "learning_rate": 4.458874458874459e-05,
+ "loss": 1.0352,
+ "step": 207
+ },
+ {
+ "epoch": 0.2701298701298701,
+ "grad_norm": 12.438820838928223,
+ "learning_rate": 4.4805194805194805e-05,
+ "loss": 0.8514,
+ "step": 208
+ },
+ {
+ "epoch": 0.2714285714285714,
+ "grad_norm": 13.921923637390137,
+ "learning_rate": 4.5021645021645025e-05,
+ "loss": 1.094,
+ "step": 209
+ },
+ {
+ "epoch": 0.2727272727272727,
+ "grad_norm": 13.15014934539795,
+ "learning_rate": 4.523809523809524e-05,
+ "loss": 1.0087,
+ "step": 210
+ },
+ {
+ "epoch": 0.274025974025974,
+ "grad_norm": 12.404814720153809,
+ "learning_rate": 4.545454545454546e-05,
+ "loss": 0.9035,
+ "step": 211
+ },
+ {
+ "epoch": 0.2753246753246753,
+ "grad_norm": 13.696551322937012,
+ "learning_rate": 4.567099567099568e-05,
+ "loss": 0.9475,
+ "step": 212
+ },
+ {
+ "epoch": 0.2766233766233766,
+ "grad_norm": 13.52684211730957,
+ "learning_rate": 4.588744588744589e-05,
+ "loss": 0.9838,
+ "step": 213
+ },
+ {
+ "epoch": 0.2779220779220779,
+ "grad_norm": 14.523106575012207,
+ "learning_rate": 4.6103896103896106e-05,
+ "loss": 1.1285,
+ "step": 214
+ },
+ {
+ "epoch": 0.2792207792207792,
+ "grad_norm": 12.710862159729004,
+ "learning_rate": 4.6320346320346326e-05,
+ "loss": 0.7406,
+ "step": 215
+ },
+ {
+ "epoch": 0.2805194805194805,
+ "grad_norm": 13.21323299407959,
+ "learning_rate": 4.653679653679654e-05,
+ "loss": 0.9309,
+ "step": 216
+ },
+ {
+ "epoch": 0.2818181818181818,
+ "grad_norm": 13.333338737487793,
+ "learning_rate": 4.675324675324675e-05,
+ "loss": 0.8669,
+ "step": 217
+ },
+ {
+ "epoch": 0.2831168831168831,
+ "grad_norm": 14.696738243103027,
+ "learning_rate": 4.696969696969697e-05,
+ "loss": 1.0496,
+ "step": 218
+ },
+ {
+ "epoch": 0.2844155844155844,
+ "grad_norm": 14.372692108154297,
+ "learning_rate": 4.718614718614719e-05,
+ "loss": 0.9547,
+ "step": 219
+ },
+ {
+ "epoch": 0.2857142857142857,
+ "grad_norm": 11.897599220275879,
+ "learning_rate": 4.740259740259741e-05,
+ "loss": 0.7442,
+ "step": 220
+ },
+ {
+ "epoch": 0.287012987012987,
+ "grad_norm": 12.831521034240723,
+ "learning_rate": 4.761904761904762e-05,
+ "loss": 0.6949,
+ "step": 221
+ },
+ {
+ "epoch": 0.2883116883116883,
+ "grad_norm": 12.80624008178711,
+ "learning_rate": 4.783549783549784e-05,
+ "loss": 0.958,
+ "step": 222
+ },
+ {
+ "epoch": 0.2896103896103896,
+ "grad_norm": 11.822639465332031,
+ "learning_rate": 4.8051948051948054e-05,
+ "loss": 0.7528,
+ "step": 223
+ },
+ {
+ "epoch": 0.2909090909090909,
+ "grad_norm": 12.583551406860352,
+ "learning_rate": 4.826839826839827e-05,
+ "loss": 0.8338,
+ "step": 224
+ },
+ {
+ "epoch": 0.2922077922077922,
+ "grad_norm": 14.761630058288574,
+ "learning_rate": 4.848484848484849e-05,
+ "loss": 1.108,
+ "step": 225
+ },
+ {
+ "epoch": 0.2935064935064935,
+ "grad_norm": 14.278302192687988,
+ "learning_rate": 4.87012987012987e-05,
+ "loss": 1.1113,
+ "step": 226
+ },
+ {
+ "epoch": 0.2948051948051948,
+ "grad_norm": 12.455058097839355,
+ "learning_rate": 4.8917748917748915e-05,
+ "loss": 0.8191,
+ "step": 227
+ },
+ {
+ "epoch": 0.2961038961038961,
+ "grad_norm": 12.987092018127441,
+ "learning_rate": 4.9134199134199135e-05,
+ "loss": 0.8167,
+ "step": 228
+ },
+ {
+ "epoch": 0.2974025974025974,
+ "grad_norm": 11.661968231201172,
+ "learning_rate": 4.9350649350649355e-05,
+ "loss": 0.8165,
+ "step": 229
+ },
+ {
+ "epoch": 0.2987012987012987,
+ "grad_norm": 12.673922538757324,
+ "learning_rate": 4.956709956709957e-05,
+ "loss": 0.8569,
+ "step": 230
+ },
+ {
+ "epoch": 0.3,
+ "grad_norm": 13.494362831115723,
+ "learning_rate": 4.978354978354979e-05,
+ "loss": 0.943,
+ "step": 231
+ },
+ {
+ "epoch": 0.3,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.953125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.8984375,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9375,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.677043080329895,
+ "eval_global_dataset_runtime": 113.6829,
+ "eval_global_dataset_samples_per_second": 9.421,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.9140625,
+ "eval_sts-test-1024_pearson_cosine": 0.8787763074934598,
+ "eval_sts-test-1024_spearman_cosine": 0.9210331750283494,
+ "eval_sts-test-128_pearson_cosine": 0.8722131705651872,
+ "eval_sts-test-128_spearman_cosine": 0.9158898385684954,
+ "eval_sts-test-256_pearson_cosine": 0.8732650189534636,
+ "eval_sts-test-256_spearman_cosine": 0.9174469823390422,
+ "eval_sts-test-32_pearson_cosine": 0.8348374873613509,
+ "eval_sts-test-32_spearman_cosine": 0.8928345151210231,
+ "eval_sts-test-512_pearson_cosine": 0.8784119512407846,
+ "eval_sts-test-512_spearman_cosine": 0.9211549108025473,
+ "eval_sts-test-64_pearson_cosine": 0.8606305478646585,
+ "eval_sts-test-64_spearman_cosine": 0.9075380483657413,
+ "eval_sts-test_pearson_cosine": 0.8837500780432396,
+ "eval_sts-test_spearman_cosine": 0.9228735334971034,
+ "step": 231
+ },
+ {
+ "epoch": 0.3012987012987013,
+ "grad_norm": 12.865336418151855,
+ "learning_rate": 5e-05,
+ "loss": 0.8316,
+ "step": 232
+ },
+ {
+ "epoch": 0.3025974025974026,
+ "grad_norm": 11.271172523498535,
+ "learning_rate": 5.0216450216450216e-05,
+ "loss": 0.6811,
+ "step": 233
+ },
+ {
+ "epoch": 0.3038961038961039,
+ "grad_norm": 11.870718955993652,
+ "learning_rate": 5.043290043290043e-05,
+ "loss": 0.7579,
+ "step": 234
+ },
+ {
+ "epoch": 0.3051948051948052,
+ "grad_norm": 12.892427444458008,
+ "learning_rate": 5.064935064935065e-05,
+ "loss": 0.8088,
+ "step": 235
+ },
+ {
+ "epoch": 0.3064935064935065,
+ "grad_norm": 13.148665428161621,
+ "learning_rate": 5.086580086580087e-05,
+ "loss": 0.8242,
+ "step": 236
+ },
+ {
+ "epoch": 0.3077922077922078,
+ "grad_norm": 13.16065788269043,
+ "learning_rate": 5.108225108225109e-05,
+ "loss": 0.9401,
+ "step": 237
+ },
+ {
+ "epoch": 0.3090909090909091,
+ "grad_norm": 13.374222755432129,
+ "learning_rate": 5.1298701298701304e-05,
+ "loss": 0.9815,
+ "step": 238
+ },
+ {
+ "epoch": 0.3103896103896104,
+ "grad_norm": 11.324379920959473,
+ "learning_rate": 5.151515151515152e-05,
+ "loss": 0.6338,
+ "step": 239
+ },
+ {
+ "epoch": 0.3116883116883117,
+ "grad_norm": 14.477818489074707,
+ "learning_rate": 5.173160173160173e-05,
+ "loss": 1.1544,
+ "step": 240
+ },
+ {
+ "epoch": 0.312987012987013,
+ "grad_norm": 13.306774139404297,
+ "learning_rate": 5.1948051948051944e-05,
+ "loss": 0.7693,
+ "step": 241
+ },
+ {
+ "epoch": 0.3142857142857143,
+ "grad_norm": 12.13784408569336,
+ "learning_rate": 5.216450216450217e-05,
+ "loss": 0.745,
+ "step": 242
+ },
+ {
+ "epoch": 0.3155844155844156,
+ "grad_norm": 12.215293884277344,
+ "learning_rate": 5.2380952380952384e-05,
+ "loss": 0.722,
+ "step": 243
+ },
+ {
+ "epoch": 0.3168831168831169,
+ "grad_norm": 13.042692184448242,
+ "learning_rate": 5.25974025974026e-05,
+ "loss": 0.7834,
+ "step": 244
+ },
+ {
+ "epoch": 0.3181818181818182,
+ "grad_norm": 12.925629615783691,
+ "learning_rate": 5.281385281385282e-05,
+ "loss": 0.8727,
+ "step": 245
+ },
+ {
+ "epoch": 0.3194805194805195,
+ "grad_norm": 11.345806121826172,
+ "learning_rate": 5.303030303030303e-05,
+ "loss": 0.657,
+ "step": 246
+ },
+ {
+ "epoch": 0.3207792207792208,
+ "grad_norm": 14.048829078674316,
+ "learning_rate": 5.3246753246753245e-05,
+ "loss": 0.9334,
+ "step": 247
+ },
+ {
+ "epoch": 0.3220779220779221,
+ "grad_norm": 14.103421211242676,
+ "learning_rate": 5.346320346320347e-05,
+ "loss": 0.9641,
+ "step": 248
+ },
+ {
+ "epoch": 0.3233766233766234,
+ "grad_norm": 13.120491027832031,
+ "learning_rate": 5.3679653679653686e-05,
+ "loss": 0.9539,
+ "step": 249
+ },
+ {
+ "epoch": 0.3246753246753247,
+ "grad_norm": 12.34601879119873,
+ "learning_rate": 5.38961038961039e-05,
+ "loss": 0.916,
+ "step": 250
+ },
+ {
+ "epoch": 0.32597402597402597,
+ "grad_norm": 11.876169204711914,
+ "learning_rate": 5.411255411255411e-05,
+ "loss": 0.7535,
+ "step": 251
+ },
+ {
+ "epoch": 0.32727272727272727,
+ "grad_norm": 13.08942985534668,
+ "learning_rate": 5.4329004329004326e-05,
+ "loss": 1.0744,
+ "step": 252
+ },
+ {
+ "epoch": 0.32857142857142857,
+ "grad_norm": 11.773645401000977,
+ "learning_rate": 5.4545454545454546e-05,
+ "loss": 0.7207,
+ "step": 253
+ },
+ {
+ "epoch": 0.32987012987012987,
+ "grad_norm": 13.143170356750488,
+ "learning_rate": 5.4761904761904766e-05,
+ "loss": 0.9337,
+ "step": 254
+ },
+ {
+ "epoch": 0.33116883116883117,
+ "grad_norm": 11.403676986694336,
+ "learning_rate": 5.497835497835499e-05,
+ "loss": 0.7114,
+ "step": 255
+ },
+ {
+ "epoch": 0.33246753246753247,
+ "grad_norm": 12.294326782226562,
+ "learning_rate": 5.51948051948052e-05,
+ "loss": 0.6995,
+ "step": 256
+ },
+ {
+ "epoch": 0.33376623376623377,
+ "grad_norm": 13.27484130859375,
+ "learning_rate": 5.5411255411255414e-05,
+ "loss": 0.8138,
+ "step": 257
+ },
+ {
+ "epoch": 0.33506493506493507,
+ "grad_norm": 13.444418907165527,
+ "learning_rate": 5.562770562770563e-05,
+ "loss": 1.0225,
+ "step": 258
+ },
+ {
+ "epoch": 0.33636363636363636,
+ "grad_norm": 13.552763938903809,
+ "learning_rate": 5.584415584415584e-05,
+ "loss": 0.9528,
+ "step": 259
+ },
+ {
+ "epoch": 0.33766233766233766,
+ "grad_norm": 13.628582954406738,
+ "learning_rate": 5.606060606060606e-05,
+ "loss": 0.9607,
+ "step": 260
+ },
+ {
+ "epoch": 0.33896103896103896,
+ "grad_norm": 13.577703475952148,
+ "learning_rate": 5.627705627705628e-05,
+ "loss": 0.971,
+ "step": 261
+ },
+ {
+ "epoch": 0.34025974025974026,
+ "grad_norm": 11.462188720703125,
+ "learning_rate": 5.64935064935065e-05,
+ "loss": 0.7819,
+ "step": 262
+ },
+ {
+ "epoch": 0.34155844155844156,
+ "grad_norm": 10.696789741516113,
+ "learning_rate": 5.6709956709956715e-05,
+ "loss": 0.6537,
+ "step": 263
+ },
+ {
+ "epoch": 0.34285714285714286,
+ "grad_norm": 12.647172927856445,
+ "learning_rate": 5.692640692640693e-05,
+ "loss": 1.0185,
+ "step": 264
+ },
+ {
+ "epoch": 0.34415584415584416,
+ "grad_norm": 13.042900085449219,
+ "learning_rate": 5.714285714285714e-05,
+ "loss": 1.0832,
+ "step": 265
+ },
+ {
+ "epoch": 0.34545454545454546,
+ "grad_norm": 9.650392532348633,
+ "learning_rate": 5.7359307359307355e-05,
+ "loss": 0.533,
+ "step": 266
+ },
+ {
+ "epoch": 0.34675324675324676,
+ "grad_norm": 12.420098304748535,
+ "learning_rate": 5.757575757575758e-05,
+ "loss": 0.9556,
+ "step": 267
+ },
+ {
+ "epoch": 0.34805194805194806,
+ "grad_norm": 11.905637741088867,
+ "learning_rate": 5.7792207792207796e-05,
+ "loss": 0.8863,
+ "step": 268
+ },
+ {
+ "epoch": 0.34935064935064936,
+ "grad_norm": 13.118106842041016,
+ "learning_rate": 5.800865800865801e-05,
+ "loss": 1.0875,
+ "step": 269
+ },
+ {
+ "epoch": 0.35064935064935066,
+ "grad_norm": 12.499323844909668,
+ "learning_rate": 5.822510822510823e-05,
+ "loss": 0.9424,
+ "step": 270
+ },
+ {
+ "epoch": 0.35194805194805195,
+ "grad_norm": 11.671981811523438,
+ "learning_rate": 5.844155844155844e-05,
+ "loss": 0.7181,
+ "step": 271
+ },
+ {
+ "epoch": 0.35324675324675325,
+ "grad_norm": 12.202942848205566,
+ "learning_rate": 5.8658008658008656e-05,
+ "loss": 0.9829,
+ "step": 272
+ },
+ {
+ "epoch": 0.35454545454545455,
+ "grad_norm": 12.920580863952637,
+ "learning_rate": 5.887445887445888e-05,
+ "loss": 0.8581,
+ "step": 273
+ },
+ {
+ "epoch": 0.35584415584415585,
+ "grad_norm": 10.84870433807373,
+ "learning_rate": 5.90909090909091e-05,
+ "loss": 0.669,
+ "step": 274
+ },
+ {
+ "epoch": 0.35714285714285715,
+ "grad_norm": 13.038583755493164,
+ "learning_rate": 5.930735930735931e-05,
+ "loss": 1.0623,
+ "step": 275
+ },
+ {
+ "epoch": 0.35844155844155845,
+ "grad_norm": 11.414346694946289,
+ "learning_rate": 5.9523809523809524e-05,
+ "loss": 0.8738,
+ "step": 276
+ },
+ {
+ "epoch": 0.35974025974025975,
+ "grad_norm": 12.988877296447754,
+ "learning_rate": 5.9740259740259744e-05,
+ "loss": 1.0384,
+ "step": 277
+ },
+ {
+ "epoch": 0.36103896103896105,
+ "grad_norm": 12.147465705871582,
+ "learning_rate": 5.995670995670996e-05,
+ "loss": 0.9253,
+ "step": 278
+ },
+ {
+ "epoch": 0.36233766233766235,
+ "grad_norm": 9.99681568145752,
+ "learning_rate": 6.0173160173160184e-05,
+ "loss": 0.6211,
+ "step": 279
+ },
+ {
+ "epoch": 0.36363636363636365,
+ "grad_norm": 10.887341499328613,
+ "learning_rate": 6.03896103896104e-05,
+ "loss": 0.7466,
+ "step": 280
+ },
+ {
+ "epoch": 0.36493506493506495,
+ "grad_norm": 11.995635032653809,
+ "learning_rate": 6.060606060606061e-05,
+ "loss": 0.8584,
+ "step": 281
+ },
+ {
+ "epoch": 0.36623376623376624,
+ "grad_norm": 11.818434715270996,
+ "learning_rate": 6.0822510822510825e-05,
+ "loss": 0.8048,
+ "step": 282
+ },
+ {
+ "epoch": 0.36753246753246754,
+ "grad_norm": 11.93957805633545,
+ "learning_rate": 6.103896103896104e-05,
+ "loss": 0.7548,
+ "step": 283
+ },
+ {
+ "epoch": 0.36883116883116884,
+ "grad_norm": 12.328822135925293,
+ "learning_rate": 6.125541125541126e-05,
+ "loss": 0.7822,
+ "step": 284
+ },
+ {
+ "epoch": 0.37012987012987014,
+ "grad_norm": 13.202193260192871,
+ "learning_rate": 6.147186147186147e-05,
+ "loss": 1.0389,
+ "step": 285
+ },
+ {
+ "epoch": 0.37142857142857144,
+ "grad_norm": 10.988061904907227,
+ "learning_rate": 6.16883116883117e-05,
+ "loss": 0.7156,
+ "step": 286
+ },
+ {
+ "epoch": 0.37272727272727274,
+ "grad_norm": 11.593925476074219,
+ "learning_rate": 6.19047619047619e-05,
+ "loss": 0.7989,
+ "step": 287
+ },
+ {
+ "epoch": 0.37402597402597404,
+ "grad_norm": 14.6128568649292,
+ "learning_rate": 6.212121212121213e-05,
+ "loss": 1.0917,
+ "step": 288
+ },
+ {
+ "epoch": 0.37532467532467534,
+ "grad_norm": 12.564693450927734,
+ "learning_rate": 6.233766233766233e-05,
+ "loss": 0.9575,
+ "step": 289
+ },
+ {
+ "epoch": 0.37662337662337664,
+ "grad_norm": 12.380928039550781,
+ "learning_rate": 6.255411255411255e-05,
+ "loss": 0.9086,
+ "step": 290
+ },
+ {
+ "epoch": 0.37792207792207794,
+ "grad_norm": 13.53039836883545,
+ "learning_rate": 6.277056277056277e-05,
+ "loss": 1.0582,
+ "step": 291
+ },
+ {
+ "epoch": 0.37922077922077924,
+ "grad_norm": 12.092082023620605,
+ "learning_rate": 6.2987012987013e-05,
+ "loss": 0.879,
+ "step": 292
+ },
+ {
+ "epoch": 0.38051948051948054,
+ "grad_norm": 10.496182441711426,
+ "learning_rate": 6.320346320346321e-05,
+ "loss": 0.6524,
+ "step": 293
+ },
+ {
+ "epoch": 0.38181818181818183,
+ "grad_norm": 11.645306587219238,
+ "learning_rate": 6.341991341991342e-05,
+ "loss": 0.767,
+ "step": 294
+ },
+ {
+ "epoch": 0.38311688311688313,
+ "grad_norm": 12.280860900878906,
+ "learning_rate": 6.363636363636364e-05,
+ "loss": 0.8842,
+ "step": 295
+ },
+ {
+ "epoch": 0.38441558441558443,
+ "grad_norm": 12.687444686889648,
+ "learning_rate": 6.385281385281385e-05,
+ "loss": 0.9167,
+ "step": 296
+ },
+ {
+ "epoch": 0.38571428571428573,
+ "grad_norm": 12.968870162963867,
+ "learning_rate": 6.406926406926407e-05,
+ "loss": 1.0106,
+ "step": 297
+ },
+ {
+ "epoch": 0.38701298701298703,
+ "grad_norm": 10.828211784362793,
+ "learning_rate": 6.428571428571429e-05,
+ "loss": 0.8014,
+ "step": 298
+ },
+ {
+ "epoch": 0.38831168831168833,
+ "grad_norm": 10.606407165527344,
+ "learning_rate": 6.450216450216451e-05,
+ "loss": 0.7783,
+ "step": 299
+ },
+ {
+ "epoch": 0.38961038961038963,
+ "grad_norm": 11.974769592285156,
+ "learning_rate": 6.471861471861473e-05,
+ "loss": 0.8608,
+ "step": 300
+ },
+ {
+ "epoch": 0.39090909090909093,
+ "grad_norm": 10.047250747680664,
+ "learning_rate": 6.493506493506494e-05,
+ "loss": 0.6383,
+ "step": 301
+ },
+ {
+ "epoch": 0.3922077922077922,
+ "grad_norm": 12.191349983215332,
+ "learning_rate": 6.515151515151516e-05,
+ "loss": 0.9668,
+ "step": 302
+ },
+ {
+ "epoch": 0.3935064935064935,
+ "grad_norm": 13.38426685333252,
+ "learning_rate": 6.536796536796536e-05,
+ "loss": 0.9447,
+ "step": 303
+ },
+ {
+ "epoch": 0.3948051948051948,
+ "grad_norm": 10.779813766479492,
+ "learning_rate": 6.55844155844156e-05,
+ "loss": 0.6609,
+ "step": 304
+ },
+ {
+ "epoch": 0.3961038961038961,
+ "grad_norm": 11.49394702911377,
+ "learning_rate": 6.58008658008658e-05,
+ "loss": 0.8104,
+ "step": 305
+ },
+ {
+ "epoch": 0.3974025974025974,
+ "grad_norm": 10.99763298034668,
+ "learning_rate": 6.601731601731602e-05,
+ "loss": 0.7231,
+ "step": 306
+ },
+ {
+ "epoch": 0.3987012987012987,
+ "grad_norm": 12.296140670776367,
+ "learning_rate": 6.623376623376624e-05,
+ "loss": 0.853,
+ "step": 307
+ },
+ {
+ "epoch": 0.4,
+ "grad_norm": 12.42627239227295,
+ "learning_rate": 6.645021645021645e-05,
+ "loss": 0.8893,
+ "step": 308
+ },
+ {
+ "epoch": 0.4,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9375,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.6458740234375,
+ "eval_global_dataset_runtime": 113.11,
+ "eval_global_dataset_samples_per_second": 9.469,
+ "eval_global_dataset_steps_per_second": 0.08,
+ "eval_sequential_score": 0.9140625,
+ "eval_sts-test-1024_pearson_cosine": 0.8917026424281715,
+ "eval_sts-test-1024_spearman_cosine": 0.9280852568183454,
+ "eval_sts-test-128_pearson_cosine": 0.8799111388722691,
+ "eval_sts-test-128_spearman_cosine": 0.9174530691277519,
+ "eval_sts-test-256_pearson_cosine": 0.8869619950989827,
+ "eval_sts-test-256_spearman_cosine": 0.924964524442147,
+ "eval_sts-test-32_pearson_cosine": 0.8506411556887592,
+ "eval_sts-test-32_spearman_cosine": 0.9038834688150462,
+ "eval_sts-test-512_pearson_cosine": 0.8886416592593962,
+ "eval_sts-test-512_spearman_cosine": 0.9265875487493185,
+ "eval_sts-test-64_pearson_cosine": 0.8675142483501502,
+ "eval_sts-test-64_spearman_cosine": 0.9149750300300387,
+ "eval_sts-test_pearson_cosine": 0.8953751075507514,
+ "eval_sts-test_spearman_cosine": 0.9303824824867356,
+ "step": 308
+ },
+ {
+ "epoch": 0.4012987012987013,
+ "grad_norm": 10.81874942779541,
+ "learning_rate": 6.666666666666667e-05,
+ "loss": 0.6674,
+ "step": 309
+ },
+ {
+ "epoch": 0.4025974025974026,
+ "grad_norm": 10.319266319274902,
+ "learning_rate": 6.688311688311688e-05,
+ "loss": 0.7047,
+ "step": 310
+ },
+ {
+ "epoch": 0.4038961038961039,
+ "grad_norm": 11.703706741333008,
+ "learning_rate": 6.709956709956711e-05,
+ "loss": 0.778,
+ "step": 311
+ },
+ {
+ "epoch": 0.4051948051948052,
+ "grad_norm": 9.888168334960938,
+ "learning_rate": 6.731601731601732e-05,
+ "loss": 0.6232,
+ "step": 312
+ },
+ {
+ "epoch": 0.4064935064935065,
+ "grad_norm": 11.571825981140137,
+ "learning_rate": 6.753246753246754e-05,
+ "loss": 0.8989,
+ "step": 313
+ },
+ {
+ "epoch": 0.4077922077922078,
+ "grad_norm": 12.945980072021484,
+ "learning_rate": 6.774891774891774e-05,
+ "loss": 0.9614,
+ "step": 314
+ },
+ {
+ "epoch": 0.4090909090909091,
+ "grad_norm": 12.342436790466309,
+ "learning_rate": 6.796536796536796e-05,
+ "loss": 0.9365,
+ "step": 315
+ },
+ {
+ "epoch": 0.4103896103896104,
+ "grad_norm": 11.575722694396973,
+ "learning_rate": 6.818181818181818e-05,
+ "loss": 0.8101,
+ "step": 316
+ },
+ {
+ "epoch": 0.4116883116883117,
+ "grad_norm": 11.095999717712402,
+ "learning_rate": 6.83982683982684e-05,
+ "loss": 0.8199,
+ "step": 317
+ },
+ {
+ "epoch": 0.412987012987013,
+ "grad_norm": 11.149064064025879,
+ "learning_rate": 6.861471861471862e-05,
+ "loss": 0.8435,
+ "step": 318
+ },
+ {
+ "epoch": 0.4142857142857143,
+ "grad_norm": 11.510180473327637,
+ "learning_rate": 6.883116883116883e-05,
+ "loss": 0.9404,
+ "step": 319
+ },
+ {
+ "epoch": 0.4155844155844156,
+ "grad_norm": 10.658203125,
+ "learning_rate": 6.904761904761905e-05,
+ "loss": 0.6967,
+ "step": 320
+ },
+ {
+ "epoch": 0.41688311688311686,
+ "grad_norm": 11.118837356567383,
+ "learning_rate": 6.926406926406926e-05,
+ "loss": 0.8238,
+ "step": 321
+ },
+ {
+ "epoch": 0.41818181818181815,
+ "grad_norm": 11.655184745788574,
+ "learning_rate": 6.948051948051948e-05,
+ "loss": 0.9732,
+ "step": 322
+ },
+ {
+ "epoch": 0.41948051948051945,
+ "grad_norm": 10.72670841217041,
+ "learning_rate": 6.96969696969697e-05,
+ "loss": 0.6953,
+ "step": 323
+ },
+ {
+ "epoch": 0.42077922077922075,
+ "grad_norm": 10.018539428710938,
+ "learning_rate": 6.991341991341992e-05,
+ "loss": 0.6633,
+ "step": 324
+ },
+ {
+ "epoch": 0.42207792207792205,
+ "grad_norm": 11.938111305236816,
+ "learning_rate": 7.012987012987014e-05,
+ "loss": 0.9486,
+ "step": 325
+ },
+ {
+ "epoch": 0.42337662337662335,
+ "grad_norm": 12.473401069641113,
+ "learning_rate": 7.034632034632035e-05,
+ "loss": 0.9642,
+ "step": 326
+ },
+ {
+ "epoch": 0.42467532467532465,
+ "grad_norm": 11.210603713989258,
+ "learning_rate": 7.056277056277057e-05,
+ "loss": 0.7443,
+ "step": 327
+ },
+ {
+ "epoch": 0.42597402597402595,
+ "grad_norm": 12.498502731323242,
+ "learning_rate": 7.077922077922077e-05,
+ "loss": 1.0133,
+ "step": 328
+ },
+ {
+ "epoch": 0.42727272727272725,
+ "grad_norm": 13.254681587219238,
+ "learning_rate": 7.099567099567101e-05,
+ "loss": 1.0489,
+ "step": 329
+ },
+ {
+ "epoch": 0.42857142857142855,
+ "grad_norm": 9.72973918914795,
+ "learning_rate": 7.121212121212121e-05,
+ "loss": 0.5865,
+ "step": 330
+ },
+ {
+ "epoch": 0.42987012987012985,
+ "grad_norm": 10.99699592590332,
+ "learning_rate": 7.142857142857143e-05,
+ "loss": 0.7757,
+ "step": 331
+ },
+ {
+ "epoch": 0.43116883116883115,
+ "grad_norm": 12.112956047058105,
+ "learning_rate": 7.164502164502165e-05,
+ "loss": 0.9716,
+ "step": 332
+ },
+ {
+ "epoch": 0.43246753246753245,
+ "grad_norm": 10.795978546142578,
+ "learning_rate": 7.186147186147186e-05,
+ "loss": 0.7289,
+ "step": 333
+ },
+ {
+ "epoch": 0.43376623376623374,
+ "grad_norm": 11.796692848205566,
+ "learning_rate": 7.207792207792208e-05,
+ "loss": 0.864,
+ "step": 334
+ },
+ {
+ "epoch": 0.43506493506493504,
+ "grad_norm": 10.812421798706055,
+ "learning_rate": 7.229437229437229e-05,
+ "loss": 0.706,
+ "step": 335
+ },
+ {
+ "epoch": 0.43636363636363634,
+ "grad_norm": 10.89258098602295,
+ "learning_rate": 7.251082251082252e-05,
+ "loss": 0.7336,
+ "step": 336
+ },
+ {
+ "epoch": 0.43766233766233764,
+ "grad_norm": 10.730430603027344,
+ "learning_rate": 7.272727272727273e-05,
+ "loss": 0.6631,
+ "step": 337
+ },
+ {
+ "epoch": 0.43896103896103894,
+ "grad_norm": 11.678521156311035,
+ "learning_rate": 7.294372294372295e-05,
+ "loss": 0.8824,
+ "step": 338
+ },
+ {
+ "epoch": 0.44025974025974024,
+ "grad_norm": 10.951140403747559,
+ "learning_rate": 7.316017316017317e-05,
+ "loss": 0.7209,
+ "step": 339
+ },
+ {
+ "epoch": 0.44155844155844154,
+ "grad_norm": 10.380850791931152,
+ "learning_rate": 7.337662337662338e-05,
+ "loss": 0.6866,
+ "step": 340
+ },
+ {
+ "epoch": 0.44285714285714284,
+ "grad_norm": 10.9320707321167,
+ "learning_rate": 7.35930735930736e-05,
+ "loss": 0.8544,
+ "step": 341
+ },
+ {
+ "epoch": 0.44415584415584414,
+ "grad_norm": 11.206174850463867,
+ "learning_rate": 7.380952380952382e-05,
+ "loss": 0.7208,
+ "step": 342
+ },
+ {
+ "epoch": 0.44545454545454544,
+ "grad_norm": 12.502874374389648,
+ "learning_rate": 7.402597402597404e-05,
+ "loss": 0.9237,
+ "step": 343
+ },
+ {
+ "epoch": 0.44675324675324674,
+ "grad_norm": 10.985047340393066,
+ "learning_rate": 7.424242424242424e-05,
+ "loss": 0.8569,
+ "step": 344
+ },
+ {
+ "epoch": 0.44805194805194803,
+ "grad_norm": 11.576885223388672,
+ "learning_rate": 7.445887445887446e-05,
+ "loss": 0.9145,
+ "step": 345
+ },
+ {
+ "epoch": 0.44935064935064933,
+ "grad_norm": 10.66639518737793,
+ "learning_rate": 7.467532467532467e-05,
+ "loss": 0.8172,
+ "step": 346
+ },
+ {
+ "epoch": 0.45064935064935063,
+ "grad_norm": 10.709420204162598,
+ "learning_rate": 7.489177489177489e-05,
+ "loss": 0.8755,
+ "step": 347
+ },
+ {
+ "epoch": 0.45194805194805193,
+ "grad_norm": 9.81844425201416,
+ "learning_rate": 7.510822510822511e-05,
+ "loss": 0.7049,
+ "step": 348
+ },
+ {
+ "epoch": 0.45324675324675323,
+ "grad_norm": 10.78894329071045,
+ "learning_rate": 7.532467532467533e-05,
+ "loss": 0.8571,
+ "step": 349
+ },
+ {
+ "epoch": 0.45454545454545453,
+ "grad_norm": 10.1858549118042,
+ "learning_rate": 7.554112554112555e-05,
+ "loss": 0.71,
+ "step": 350
+ },
+ {
+ "epoch": 0.45584415584415583,
+ "grad_norm": 10.635689735412598,
+ "learning_rate": 7.575757575757576e-05,
+ "loss": 0.7568,
+ "step": 351
+ },
+ {
+ "epoch": 0.45714285714285713,
+ "grad_norm": 9.103358268737793,
+ "learning_rate": 7.597402597402598e-05,
+ "loss": 0.5732,
+ "step": 352
+ },
+ {
+ "epoch": 0.4584415584415584,
+ "grad_norm": 11.04738712310791,
+ "learning_rate": 7.619047619047618e-05,
+ "loss": 0.7801,
+ "step": 353
+ },
+ {
+ "epoch": 0.4597402597402597,
+ "grad_norm": 10.552362442016602,
+ "learning_rate": 7.640692640692642e-05,
+ "loss": 0.6895,
+ "step": 354
+ },
+ {
+ "epoch": 0.461038961038961,
+ "grad_norm": 11.45176887512207,
+ "learning_rate": 7.662337662337662e-05,
+ "loss": 0.8078,
+ "step": 355
+ },
+ {
+ "epoch": 0.4623376623376623,
+ "grad_norm": 12.02926254272461,
+ "learning_rate": 7.683982683982685e-05,
+ "loss": 0.9609,
+ "step": 356
+ },
+ {
+ "epoch": 0.4636363636363636,
+ "grad_norm": 12.51374626159668,
+ "learning_rate": 7.705627705627707e-05,
+ "loss": 0.9131,
+ "step": 357
+ },
+ {
+ "epoch": 0.4649350649350649,
+ "grad_norm": 11.659627914428711,
+ "learning_rate": 7.727272727272727e-05,
+ "loss": 0.8842,
+ "step": 358
+ },
+ {
+ "epoch": 0.4662337662337662,
+ "grad_norm": 10.37750244140625,
+ "learning_rate": 7.748917748917749e-05,
+ "loss": 0.7314,
+ "step": 359
+ },
+ {
+ "epoch": 0.4675324675324675,
+ "grad_norm": 10.31059455871582,
+ "learning_rate": 7.770562770562771e-05,
+ "loss": 0.7894,
+ "step": 360
+ },
+ {
+ "epoch": 0.4688311688311688,
+ "grad_norm": 10.418132781982422,
+ "learning_rate": 7.792207792207793e-05,
+ "loss": 0.7982,
+ "step": 361
+ },
+ {
+ "epoch": 0.4701298701298701,
+ "grad_norm": 10.784585952758789,
+ "learning_rate": 7.813852813852814e-05,
+ "loss": 0.8729,
+ "step": 362
+ },
+ {
+ "epoch": 0.4714285714285714,
+ "grad_norm": 11.513368606567383,
+ "learning_rate": 7.835497835497836e-05,
+ "loss": 0.9753,
+ "step": 363
+ },
+ {
+ "epoch": 0.4727272727272727,
+ "grad_norm": 9.88028335571289,
+ "learning_rate": 7.857142857142858e-05,
+ "loss": 0.6694,
+ "step": 364
+ },
+ {
+ "epoch": 0.474025974025974,
+ "grad_norm": 10.181724548339844,
+ "learning_rate": 7.878787878787879e-05,
+ "loss": 0.7563,
+ "step": 365
+ },
+ {
+ "epoch": 0.4753246753246753,
+ "grad_norm": 10.725600242614746,
+ "learning_rate": 7.900432900432901e-05,
+ "loss": 0.7814,
+ "step": 366
+ },
+ {
+ "epoch": 0.4766233766233766,
+ "grad_norm": 9.441679000854492,
+ "learning_rate": 7.922077922077923e-05,
+ "loss": 0.5552,
+ "step": 367
+ },
+ {
+ "epoch": 0.4779220779220779,
+ "grad_norm": 12.737554550170898,
+ "learning_rate": 7.943722943722945e-05,
+ "loss": 1.0459,
+ "step": 368
+ },
+ {
+ "epoch": 0.4792207792207792,
+ "grad_norm": 10.601099014282227,
+ "learning_rate": 7.965367965367965e-05,
+ "loss": 0.9098,
+ "step": 369
+ },
+ {
+ "epoch": 0.4805194805194805,
+ "grad_norm": 10.562568664550781,
+ "learning_rate": 7.987012987012987e-05,
+ "loss": 0.7817,
+ "step": 370
+ },
+ {
+ "epoch": 0.4818181818181818,
+ "grad_norm": 9.046710968017578,
+ "learning_rate": 8.008658008658008e-05,
+ "loss": 0.6203,
+ "step": 371
+ },
+ {
+ "epoch": 0.4831168831168831,
+ "grad_norm": 10.283613204956055,
+ "learning_rate": 8.03030303030303e-05,
+ "loss": 0.7885,
+ "step": 372
+ },
+ {
+ "epoch": 0.4844155844155844,
+ "grad_norm": 10.934900283813477,
+ "learning_rate": 8.051948051948052e-05,
+ "loss": 0.7767,
+ "step": 373
+ },
+ {
+ "epoch": 0.4857142857142857,
+ "grad_norm": 10.616456985473633,
+ "learning_rate": 8.073593073593074e-05,
+ "loss": 0.8126,
+ "step": 374
+ },
+ {
+ "epoch": 0.487012987012987,
+ "grad_norm": 10.01803970336914,
+ "learning_rate": 8.095238095238096e-05,
+ "loss": 0.7252,
+ "step": 375
+ },
+ {
+ "epoch": 0.4883116883116883,
+ "grad_norm": 10.306028366088867,
+ "learning_rate": 8.116883116883117e-05,
+ "loss": 0.8657,
+ "step": 376
+ },
+ {
+ "epoch": 0.4896103896103896,
+ "grad_norm": 10.669329643249512,
+ "learning_rate": 8.138528138528139e-05,
+ "loss": 0.8491,
+ "step": 377
+ },
+ {
+ "epoch": 0.4909090909090909,
+ "grad_norm": 10.165072441101074,
+ "learning_rate": 8.16017316017316e-05,
+ "loss": 0.784,
+ "step": 378
+ },
+ {
+ "epoch": 0.4922077922077922,
+ "grad_norm": 8.929415702819824,
+ "learning_rate": 8.181818181818183e-05,
+ "loss": 0.6108,
+ "step": 379
+ },
+ {
+ "epoch": 0.4935064935064935,
+ "grad_norm": 11.677674293518066,
+ "learning_rate": 8.203463203463204e-05,
+ "loss": 1.0215,
+ "step": 380
+ },
+ {
+ "epoch": 0.4948051948051948,
+ "grad_norm": 10.382437705993652,
+ "learning_rate": 8.225108225108226e-05,
+ "loss": 0.8208,
+ "step": 381
+ },
+ {
+ "epoch": 0.4961038961038961,
+ "grad_norm": 10.082671165466309,
+ "learning_rate": 8.246753246753248e-05,
+ "loss": 0.7456,
+ "step": 382
+ },
+ {
+ "epoch": 0.4974025974025974,
+ "grad_norm": 10.928060531616211,
+ "learning_rate": 8.268398268398268e-05,
+ "loss": 1.0181,
+ "step": 383
+ },
+ {
+ "epoch": 0.4987012987012987,
+ "grad_norm": 10.922670364379883,
+ "learning_rate": 8.29004329004329e-05,
+ "loss": 0.8263,
+ "step": 384
+ },
+ {
+ "epoch": 0.5,
+ "grad_norm": 10.033293724060059,
+ "learning_rate": 8.311688311688312e-05,
+ "loss": 0.7218,
+ "step": 385
+ },
+ {
+ "epoch": 0.5,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.953125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9375,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9375,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9609375,
+ "eval_global_dataset_loss": 0.6583967208862305,
+ "eval_global_dataset_runtime": 112.7848,
+ "eval_global_dataset_samples_per_second": 9.496,
+ "eval_global_dataset_steps_per_second": 0.08,
+ "eval_sequential_score": 0.90625,
+ "eval_sts-test-1024_pearson_cosine": 0.8804763387747968,
+ "eval_sts-test-1024_spearman_cosine": 0.9192536844466944,
+ "eval_sts-test-128_pearson_cosine": 0.867682985932934,
+ "eval_sts-test-128_spearman_cosine": 0.9096662761358334,
+ "eval_sts-test-256_pearson_cosine": 0.8723111264791312,
+ "eval_sts-test-256_spearman_cosine": 0.9130394312202947,
+ "eval_sts-test-32_pearson_cosine": 0.8338645489107269,
+ "eval_sts-test-32_spearman_cosine": 0.8955438521897122,
+ "eval_sts-test-512_pearson_cosine": 0.8794209596972049,
+ "eval_sts-test-512_spearman_cosine": 0.9186070526578676,
+ "eval_sts-test-64_pearson_cosine": 0.8606438099441043,
+ "eval_sts-test-64_spearman_cosine": 0.9104955115859569,
+ "eval_sts-test_pearson_cosine": 0.8855632236901727,
+ "eval_sts-test_spearman_cosine": 0.9217009315544635,
+ "step": 385
+ },
+ {
+ "epoch": 0.5012987012987012,
+ "grad_norm": 9.382086753845215,
+ "learning_rate": 8.333333333333334e-05,
+ "loss": 0.6829,
+ "step": 386
+ },
+ {
+ "epoch": 0.5025974025974026,
+ "grad_norm": 10.253658294677734,
+ "learning_rate": 8.354978354978355e-05,
+ "loss": 0.8304,
+ "step": 387
+ },
+ {
+ "epoch": 0.5038961038961038,
+ "grad_norm": 9.05147647857666,
+ "learning_rate": 8.376623376623377e-05,
+ "loss": 0.6935,
+ "step": 388
+ },
+ {
+ "epoch": 0.5051948051948052,
+ "grad_norm": 10.728251457214355,
+ "learning_rate": 8.398268398268399e-05,
+ "loss": 0.9569,
+ "step": 389
+ },
+ {
+ "epoch": 0.5064935064935064,
+ "grad_norm": 9.828920364379883,
+ "learning_rate": 8.41991341991342e-05,
+ "loss": 0.7174,
+ "step": 390
+ },
+ {
+ "epoch": 0.5077922077922078,
+ "grad_norm": 10.326533317565918,
+ "learning_rate": 8.441558441558442e-05,
+ "loss": 0.8543,
+ "step": 391
+ },
+ {
+ "epoch": 0.509090909090909,
+ "grad_norm": 10.311280250549316,
+ "learning_rate": 8.463203463203464e-05,
+ "loss": 0.8769,
+ "step": 392
+ },
+ {
+ "epoch": 0.5103896103896104,
+ "grad_norm": 8.989762306213379,
+ "learning_rate": 8.484848484848486e-05,
+ "loss": 0.5945,
+ "step": 393
+ },
+ {
+ "epoch": 0.5116883116883116,
+ "grad_norm": 9.76150894165039,
+ "learning_rate": 8.506493506493507e-05,
+ "loss": 0.7787,
+ "step": 394
+ },
+ {
+ "epoch": 0.512987012987013,
+ "grad_norm": 11.173674583435059,
+ "learning_rate": 8.528138528138529e-05,
+ "loss": 0.9674,
+ "step": 395
+ },
+ {
+ "epoch": 0.5142857142857142,
+ "grad_norm": 8.645306587219238,
+ "learning_rate": 8.549783549783549e-05,
+ "loss": 0.7011,
+ "step": 396
+ },
+ {
+ "epoch": 0.5155844155844156,
+ "grad_norm": 11.850281715393066,
+ "learning_rate": 8.571428571428571e-05,
+ "loss": 0.9993,
+ "step": 397
+ },
+ {
+ "epoch": 0.5168831168831168,
+ "grad_norm": 8.96986198425293,
+ "learning_rate": 8.593073593073593e-05,
+ "loss": 0.6368,
+ "step": 398
+ },
+ {
+ "epoch": 0.5181818181818182,
+ "grad_norm": 9.281974792480469,
+ "learning_rate": 8.614718614718615e-05,
+ "loss": 0.6236,
+ "step": 399
+ },
+ {
+ "epoch": 0.5194805194805194,
+ "grad_norm": 9.10240650177002,
+ "learning_rate": 8.636363636363637e-05,
+ "loss": 0.6543,
+ "step": 400
+ },
+ {
+ "epoch": 0.5207792207792208,
+ "grad_norm": 9.484097480773926,
+ "learning_rate": 8.658008658008658e-05,
+ "loss": 0.6927,
+ "step": 401
+ },
+ {
+ "epoch": 0.522077922077922,
+ "grad_norm": 12.794879913330078,
+ "learning_rate": 8.67965367965368e-05,
+ "loss": 1.1117,
+ "step": 402
+ },
+ {
+ "epoch": 0.5233766233766234,
+ "grad_norm": 10.31647777557373,
+ "learning_rate": 8.701298701298701e-05,
+ "loss": 0.8847,
+ "step": 403
+ },
+ {
+ "epoch": 0.5246753246753246,
+ "grad_norm": 11.432804107666016,
+ "learning_rate": 8.722943722943724e-05,
+ "loss": 0.9657,
+ "step": 404
+ },
+ {
+ "epoch": 0.525974025974026,
+ "grad_norm": 11.18320369720459,
+ "learning_rate": 8.744588744588745e-05,
+ "loss": 0.8722,
+ "step": 405
+ },
+ {
+ "epoch": 0.5272727272727272,
+ "grad_norm": 9.664923667907715,
+ "learning_rate": 8.766233766233767e-05,
+ "loss": 0.7465,
+ "step": 406
+ },
+ {
+ "epoch": 0.5285714285714286,
+ "grad_norm": 8.084552764892578,
+ "learning_rate": 8.787878787878789e-05,
+ "loss": 0.5488,
+ "step": 407
+ },
+ {
+ "epoch": 0.5298701298701298,
+ "grad_norm": 9.583297729492188,
+ "learning_rate": 8.80952380952381e-05,
+ "loss": 0.6888,
+ "step": 408
+ },
+ {
+ "epoch": 0.5311688311688312,
+ "grad_norm": 10.47243595123291,
+ "learning_rate": 8.831168831168831e-05,
+ "loss": 0.7561,
+ "step": 409
+ },
+ {
+ "epoch": 0.5324675324675324,
+ "grad_norm": 9.173786163330078,
+ "learning_rate": 8.852813852813854e-05,
+ "loss": 0.5411,
+ "step": 410
+ },
+ {
+ "epoch": 0.5337662337662338,
+ "grad_norm": 10.696986198425293,
+ "learning_rate": 8.874458874458876e-05,
+ "loss": 0.7663,
+ "step": 411
+ },
+ {
+ "epoch": 0.535064935064935,
+ "grad_norm": 12.49531078338623,
+ "learning_rate": 8.896103896103896e-05,
+ "loss": 1.0137,
+ "step": 412
+ },
+ {
+ "epoch": 0.5363636363636364,
+ "grad_norm": 9.877222061157227,
+ "learning_rate": 8.917748917748918e-05,
+ "loss": 0.667,
+ "step": 413
+ },
+ {
+ "epoch": 0.5376623376623376,
+ "grad_norm": 9.855676651000977,
+ "learning_rate": 8.93939393939394e-05,
+ "loss": 0.7111,
+ "step": 414
+ },
+ {
+ "epoch": 0.538961038961039,
+ "grad_norm": 11.593993186950684,
+ "learning_rate": 8.961038961038961e-05,
+ "loss": 0.8531,
+ "step": 415
+ },
+ {
+ "epoch": 0.5402597402597402,
+ "grad_norm": 10.321015357971191,
+ "learning_rate": 8.982683982683983e-05,
+ "loss": 0.8561,
+ "step": 416
+ },
+ {
+ "epoch": 0.5415584415584416,
+ "grad_norm": 8.318321228027344,
+ "learning_rate": 9.004329004329005e-05,
+ "loss": 0.612,
+ "step": 417
+ },
+ {
+ "epoch": 0.5428571428571428,
+ "grad_norm": 10.25160026550293,
+ "learning_rate": 9.025974025974027e-05,
+ "loss": 0.7229,
+ "step": 418
+ },
+ {
+ "epoch": 0.5441558441558442,
+ "grad_norm": 9.849309921264648,
+ "learning_rate": 9.047619047619048e-05,
+ "loss": 0.8387,
+ "step": 419
+ },
+ {
+ "epoch": 0.5454545454545454,
+ "grad_norm": 9.49033260345459,
+ "learning_rate": 9.06926406926407e-05,
+ "loss": 0.7439,
+ "step": 420
+ },
+ {
+ "epoch": 0.5467532467532468,
+ "grad_norm": 8.027220726013184,
+ "learning_rate": 9.090909090909092e-05,
+ "loss": 0.5846,
+ "step": 421
+ },
+ {
+ "epoch": 0.548051948051948,
+ "grad_norm": 8.108675003051758,
+ "learning_rate": 9.112554112554112e-05,
+ "loss": 0.5976,
+ "step": 422
+ },
+ {
+ "epoch": 0.5493506493506494,
+ "grad_norm": 9.802972793579102,
+ "learning_rate": 9.134199134199136e-05,
+ "loss": 0.7378,
+ "step": 423
+ },
+ {
+ "epoch": 0.5506493506493506,
+ "grad_norm": 10.613837242126465,
+ "learning_rate": 9.155844155844156e-05,
+ "loss": 0.8756,
+ "step": 424
+ },
+ {
+ "epoch": 0.551948051948052,
+ "grad_norm": 10.099786758422852,
+ "learning_rate": 9.177489177489178e-05,
+ "loss": 0.6755,
+ "step": 425
+ },
+ {
+ "epoch": 0.5532467532467532,
+ "grad_norm": 9.524248123168945,
+ "learning_rate": 9.199134199134199e-05,
+ "loss": 0.6566,
+ "step": 426
+ },
+ {
+ "epoch": 0.5545454545454546,
+ "grad_norm": 9.682258605957031,
+ "learning_rate": 9.220779220779221e-05,
+ "loss": 0.5624,
+ "step": 427
+ },
+ {
+ "epoch": 0.5558441558441558,
+ "grad_norm": 9.256147384643555,
+ "learning_rate": 9.242424242424242e-05,
+ "loss": 0.5887,
+ "step": 428
+ },
+ {
+ "epoch": 0.5571428571428572,
+ "grad_norm": 11.301840782165527,
+ "learning_rate": 9.264069264069265e-05,
+ "loss": 0.9906,
+ "step": 429
+ },
+ {
+ "epoch": 0.5584415584415584,
+ "grad_norm": 10.433177947998047,
+ "learning_rate": 9.285714285714286e-05,
+ "loss": 0.735,
+ "step": 430
+ },
+ {
+ "epoch": 0.5597402597402598,
+ "grad_norm": 11.203728675842285,
+ "learning_rate": 9.307359307359308e-05,
+ "loss": 0.8759,
+ "step": 431
+ },
+ {
+ "epoch": 0.561038961038961,
+ "grad_norm": 10.540921211242676,
+ "learning_rate": 9.32900432900433e-05,
+ "loss": 0.8514,
+ "step": 432
+ },
+ {
+ "epoch": 0.5623376623376624,
+ "grad_norm": 9.670626640319824,
+ "learning_rate": 9.35064935064935e-05,
+ "loss": 0.7531,
+ "step": 433
+ },
+ {
+ "epoch": 0.5636363636363636,
+ "grad_norm": 9.004586219787598,
+ "learning_rate": 9.372294372294373e-05,
+ "loss": 0.6816,
+ "step": 434
+ },
+ {
+ "epoch": 0.564935064935065,
+ "grad_norm": 9.889915466308594,
+ "learning_rate": 9.393939393939395e-05,
+ "loss": 0.8911,
+ "step": 435
+ },
+ {
+ "epoch": 0.5662337662337662,
+ "grad_norm": 9.216524124145508,
+ "learning_rate": 9.415584415584417e-05,
+ "loss": 0.7555,
+ "step": 436
+ },
+ {
+ "epoch": 0.5675324675324676,
+ "grad_norm": 9.710390090942383,
+ "learning_rate": 9.437229437229437e-05,
+ "loss": 0.8079,
+ "step": 437
+ },
+ {
+ "epoch": 0.5688311688311688,
+ "grad_norm": 8.755694389343262,
+ "learning_rate": 9.45887445887446e-05,
+ "loss": 0.6203,
+ "step": 438
+ },
+ {
+ "epoch": 0.5701298701298702,
+ "grad_norm": 9.18278694152832,
+ "learning_rate": 9.480519480519481e-05,
+ "loss": 0.6348,
+ "step": 439
+ },
+ {
+ "epoch": 0.5714285714285714,
+ "grad_norm": 8.409049987792969,
+ "learning_rate": 9.502164502164502e-05,
+ "loss": 0.5986,
+ "step": 440
+ },
+ {
+ "epoch": 0.5727272727272728,
+ "grad_norm": 10.397635459899902,
+ "learning_rate": 9.523809523809524e-05,
+ "loss": 0.8259,
+ "step": 441
+ },
+ {
+ "epoch": 0.574025974025974,
+ "grad_norm": 9.205062866210938,
+ "learning_rate": 9.545454545454546e-05,
+ "loss": 0.7051,
+ "step": 442
+ },
+ {
+ "epoch": 0.5753246753246753,
+ "grad_norm": 9.241551399230957,
+ "learning_rate": 9.567099567099568e-05,
+ "loss": 0.591,
+ "step": 443
+ },
+ {
+ "epoch": 0.5766233766233766,
+ "grad_norm": 9.01000690460205,
+ "learning_rate": 9.588744588744589e-05,
+ "loss": 0.7197,
+ "step": 444
+ },
+ {
+ "epoch": 0.577922077922078,
+ "grad_norm": 8.933067321777344,
+ "learning_rate": 9.610389610389611e-05,
+ "loss": 0.6624,
+ "step": 445
+ },
+ {
+ "epoch": 0.5792207792207792,
+ "grad_norm": 10.92744255065918,
+ "learning_rate": 9.632034632034633e-05,
+ "loss": 0.9108,
+ "step": 446
+ },
+ {
+ "epoch": 0.5805194805194805,
+ "grad_norm": 9.574989318847656,
+ "learning_rate": 9.653679653679654e-05,
+ "loss": 0.7401,
+ "step": 447
+ },
+ {
+ "epoch": 0.5818181818181818,
+ "grad_norm": 9.929916381835938,
+ "learning_rate": 9.675324675324677e-05,
+ "loss": 0.7475,
+ "step": 448
+ },
+ {
+ "epoch": 0.5831168831168831,
+ "grad_norm": 10.541657447814941,
+ "learning_rate": 9.696969696969698e-05,
+ "loss": 1.0367,
+ "step": 449
+ },
+ {
+ "epoch": 0.5844155844155844,
+ "grad_norm": 9.241097450256348,
+ "learning_rate": 9.71861471861472e-05,
+ "loss": 0.819,
+ "step": 450
+ },
+ {
+ "epoch": 0.5857142857142857,
+ "grad_norm": 10.230196952819824,
+ "learning_rate": 9.74025974025974e-05,
+ "loss": 0.9914,
+ "step": 451
+ },
+ {
+ "epoch": 0.587012987012987,
+ "grad_norm": 7.502562999725342,
+ "learning_rate": 9.761904761904762e-05,
+ "loss": 0.5852,
+ "step": 452
+ },
+ {
+ "epoch": 0.5883116883116883,
+ "grad_norm": 9.432659149169922,
+ "learning_rate": 9.783549783549783e-05,
+ "loss": 0.8283,
+ "step": 453
+ },
+ {
+ "epoch": 0.5896103896103896,
+ "grad_norm": 8.886880874633789,
+ "learning_rate": 9.805194805194806e-05,
+ "loss": 0.9477,
+ "step": 454
+ },
+ {
+ "epoch": 0.5909090909090909,
+ "grad_norm": 8.474105834960938,
+ "learning_rate": 9.826839826839827e-05,
+ "loss": 0.7091,
+ "step": 455
+ },
+ {
+ "epoch": 0.5922077922077922,
+ "grad_norm": 9.925548553466797,
+ "learning_rate": 9.848484848484849e-05,
+ "loss": 0.958,
+ "step": 456
+ },
+ {
+ "epoch": 0.5935064935064935,
+ "grad_norm": 7.961300849914551,
+ "learning_rate": 9.870129870129871e-05,
+ "loss": 0.6203,
+ "step": 457
+ },
+ {
+ "epoch": 0.5948051948051948,
+ "grad_norm": 8.644482612609863,
+ "learning_rate": 9.891774891774892e-05,
+ "loss": 0.6183,
+ "step": 458
+ },
+ {
+ "epoch": 0.5961038961038961,
+ "grad_norm": 8.991077423095703,
+ "learning_rate": 9.913419913419914e-05,
+ "loss": 0.7303,
+ "step": 459
+ },
+ {
+ "epoch": 0.5974025974025974,
+ "grad_norm": 10.966276168823242,
+ "learning_rate": 9.935064935064936e-05,
+ "loss": 0.9273,
+ "step": 460
+ },
+ {
+ "epoch": 0.5987012987012987,
+ "grad_norm": 10.023669242858887,
+ "learning_rate": 9.956709956709958e-05,
+ "loss": 0.7091,
+ "step": 461
+ },
+ {
+ "epoch": 0.6,
+ "grad_norm": 11.322511672973633,
+ "learning_rate": 9.978354978354978e-05,
+ "loss": 0.9906,
+ "step": 462
+ },
+ {
+ "epoch": 0.6,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9375,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9453125,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.6539728045463562,
+ "eval_global_dataset_runtime": 113.7266,
+ "eval_global_dataset_samples_per_second": 9.417,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.921875,
+ "eval_sts-test-1024_pearson_cosine": 0.8797687244140173,
+ "eval_sts-test-1024_spearman_cosine": 0.9263218783244517,
+ "eval_sts-test-128_pearson_cosine": 0.865709098903634,
+ "eval_sts-test-128_spearman_cosine": 0.9134647903371974,
+ "eval_sts-test-256_pearson_cosine": 0.873391317761029,
+ "eval_sts-test-256_spearman_cosine": 0.9216776585388081,
+ "eval_sts-test-32_pearson_cosine": 0.835229116152323,
+ "eval_sts-test-32_spearman_cosine": 0.8990595097392636,
+ "eval_sts-test-512_pearson_cosine": 0.8784096835085939,
+ "eval_sts-test-512_spearman_cosine": 0.9279259261727042,
+ "eval_sts-test-64_pearson_cosine": 0.8518886914166497,
+ "eval_sts-test-64_spearman_cosine": 0.9082587957582707,
+ "eval_sts-test_pearson_cosine": 0.8813813147500178,
+ "eval_sts-test_spearman_cosine": 0.9273104444202163,
+ "step": 462
+ },
+ {
+ "epoch": 0.6012987012987013,
+ "grad_norm": 10.7688627243042,
+ "learning_rate": 0.0001,
+ "loss": 0.9442,
+ "step": 463
+ },
+ {
+ "epoch": 0.6025974025974026,
+ "grad_norm": 9.374990463256836,
+ "learning_rate": 9.999993497531978e-05,
+ "loss": 0.7803,
+ "step": 464
+ },
+ {
+ "epoch": 0.6038961038961039,
+ "grad_norm": 7.978273868560791,
+ "learning_rate": 9.999973990146702e-05,
+ "loss": 0.6228,
+ "step": 465
+ },
+ {
+ "epoch": 0.6051948051948052,
+ "grad_norm": 8.63952350616455,
+ "learning_rate": 9.999941477900548e-05,
+ "loss": 0.7387,
+ "step": 466
+ },
+ {
+ "epoch": 0.6064935064935065,
+ "grad_norm": 9.687905311584473,
+ "learning_rate": 9.999895960887477e-05,
+ "loss": 0.9448,
+ "step": 467
+ },
+ {
+ "epoch": 0.6077922077922078,
+ "grad_norm": 8.742199897766113,
+ "learning_rate": 9.999837439239033e-05,
+ "loss": 0.7838,
+ "step": 468
+ },
+ {
+ "epoch": 0.6090909090909091,
+ "grad_norm": 9.454602241516113,
+ "learning_rate": 9.99976591312434e-05,
+ "loss": 0.9093,
+ "step": 469
+ },
+ {
+ "epoch": 0.6103896103896104,
+ "grad_norm": 7.550654411315918,
+ "learning_rate": 9.999681382750109e-05,
+ "loss": 0.6647,
+ "step": 470
+ },
+ {
+ "epoch": 0.6116883116883117,
+ "grad_norm": 7.778524398803711,
+ "learning_rate": 9.999583848360633e-05,
+ "loss": 0.5482,
+ "step": 471
+ },
+ {
+ "epoch": 0.612987012987013,
+ "grad_norm": 8.33474063873291,
+ "learning_rate": 9.999473310237784e-05,
+ "loss": 0.6356,
+ "step": 472
+ },
+ {
+ "epoch": 0.6142857142857143,
+ "grad_norm": 10.372974395751953,
+ "learning_rate": 9.999349768701014e-05,
+ "loss": 0.8282,
+ "step": 473
+ },
+ {
+ "epoch": 0.6155844155844156,
+ "grad_norm": 9.575736045837402,
+ "learning_rate": 9.999213224107359e-05,
+ "loss": 0.8235,
+ "step": 474
+ },
+ {
+ "epoch": 0.6168831168831169,
+ "grad_norm": 8.910548210144043,
+ "learning_rate": 9.99906367685143e-05,
+ "loss": 0.6899,
+ "step": 475
+ },
+ {
+ "epoch": 0.6181818181818182,
+ "grad_norm": 8.8406400680542,
+ "learning_rate": 9.998901127365415e-05,
+ "loss": 0.6827,
+ "step": 476
+ },
+ {
+ "epoch": 0.6194805194805195,
+ "grad_norm": 9.354793548583984,
+ "learning_rate": 9.998725576119081e-05,
+ "loss": 0.7347,
+ "step": 477
+ },
+ {
+ "epoch": 0.6207792207792208,
+ "grad_norm": 9.497842788696289,
+ "learning_rate": 9.998537023619768e-05,
+ "loss": 0.8705,
+ "step": 478
+ },
+ {
+ "epoch": 0.6220779220779221,
+ "grad_norm": 9.095974922180176,
+ "learning_rate": 9.998335470412392e-05,
+ "loss": 0.8298,
+ "step": 479
+ },
+ {
+ "epoch": 0.6233766233766234,
+ "grad_norm": 8.34008502960205,
+ "learning_rate": 9.998120917079436e-05,
+ "loss": 0.7419,
+ "step": 480
+ },
+ {
+ "epoch": 0.6246753246753247,
+ "grad_norm": 9.542933464050293,
+ "learning_rate": 9.997893364240958e-05,
+ "loss": 1.1283,
+ "step": 481
+ },
+ {
+ "epoch": 0.625974025974026,
+ "grad_norm": 7.765565395355225,
+ "learning_rate": 9.997652812554582e-05,
+ "loss": 0.7298,
+ "step": 482
+ },
+ {
+ "epoch": 0.6272727272727273,
+ "grad_norm": 8.254328727722168,
+ "learning_rate": 9.997399262715498e-05,
+ "loss": 0.8972,
+ "step": 483
+ },
+ {
+ "epoch": 0.6285714285714286,
+ "grad_norm": 7.9485883712768555,
+ "learning_rate": 9.997132715456464e-05,
+ "loss": 0.7502,
+ "step": 484
+ },
+ {
+ "epoch": 0.6298701298701299,
+ "grad_norm": 7.377613544464111,
+ "learning_rate": 9.996853171547794e-05,
+ "loss": 0.6169,
+ "step": 485
+ },
+ {
+ "epoch": 0.6311688311688312,
+ "grad_norm": 8.520637512207031,
+ "learning_rate": 9.996560631797367e-05,
+ "loss": 0.7648,
+ "step": 486
+ },
+ {
+ "epoch": 0.6324675324675325,
+ "grad_norm": 8.45702075958252,
+ "learning_rate": 9.996255097050623e-05,
+ "loss": 0.8191,
+ "step": 487
+ },
+ {
+ "epoch": 0.6337662337662338,
+ "grad_norm": 9.343992233276367,
+ "learning_rate": 9.995936568190549e-05,
+ "loss": 0.8094,
+ "step": 488
+ },
+ {
+ "epoch": 0.6350649350649351,
+ "grad_norm": 9.630705833435059,
+ "learning_rate": 9.995605046137689e-05,
+ "loss": 0.9006,
+ "step": 489
+ },
+ {
+ "epoch": 0.6363636363636364,
+ "grad_norm": 9.293671607971191,
+ "learning_rate": 9.995260531850137e-05,
+ "loss": 0.8384,
+ "step": 490
+ },
+ {
+ "epoch": 0.6376623376623377,
+ "grad_norm": 8.072113990783691,
+ "learning_rate": 9.994903026323536e-05,
+ "loss": 0.6431,
+ "step": 491
+ },
+ {
+ "epoch": 0.638961038961039,
+ "grad_norm": 7.910200595855713,
+ "learning_rate": 9.994532530591071e-05,
+ "loss": 0.7507,
+ "step": 492
+ },
+ {
+ "epoch": 0.6402597402597403,
+ "grad_norm": 9.722458839416504,
+ "learning_rate": 9.994149045723469e-05,
+ "loss": 1.0524,
+ "step": 493
+ },
+ {
+ "epoch": 0.6415584415584416,
+ "grad_norm": 9.438432693481445,
+ "learning_rate": 9.993752572828996e-05,
+ "loss": 0.9618,
+ "step": 494
+ },
+ {
+ "epoch": 0.6428571428571429,
+ "grad_norm": 8.785840034484863,
+ "learning_rate": 9.993343113053453e-05,
+ "loss": 0.8119,
+ "step": 495
+ },
+ {
+ "epoch": 0.6441558441558441,
+ "grad_norm": 8.101314544677734,
+ "learning_rate": 9.992920667580176e-05,
+ "loss": 0.6538,
+ "step": 496
+ },
+ {
+ "epoch": 0.6454545454545455,
+ "grad_norm": 8.958012580871582,
+ "learning_rate": 9.99248523763002e-05,
+ "loss": 0.8877,
+ "step": 497
+ },
+ {
+ "epoch": 0.6467532467532467,
+ "grad_norm": 11.046570777893066,
+ "learning_rate": 9.992036824461376e-05,
+ "loss": 1.0556,
+ "step": 498
+ },
+ {
+ "epoch": 0.6480519480519481,
+ "grad_norm": 8.324684143066406,
+ "learning_rate": 9.991575429370151e-05,
+ "loss": 0.6788,
+ "step": 499
+ },
+ {
+ "epoch": 0.6493506493506493,
+ "grad_norm": 9.95218276977539,
+ "learning_rate": 9.99110105368977e-05,
+ "loss": 0.9637,
+ "step": 500
+ },
+ {
+ "epoch": 0.6506493506493507,
+ "grad_norm": 9.085275650024414,
+ "learning_rate": 9.990613698791173e-05,
+ "loss": 0.8143,
+ "step": 501
+ },
+ {
+ "epoch": 0.6519480519480519,
+ "grad_norm": 8.417174339294434,
+ "learning_rate": 9.990113366082806e-05,
+ "loss": 0.6673,
+ "step": 502
+ },
+ {
+ "epoch": 0.6532467532467533,
+ "grad_norm": 8.649909019470215,
+ "learning_rate": 9.989600057010624e-05,
+ "loss": 0.8358,
+ "step": 503
+ },
+ {
+ "epoch": 0.6545454545454545,
+ "grad_norm": 8.382777214050293,
+ "learning_rate": 9.989073773058086e-05,
+ "loss": 0.7588,
+ "step": 504
+ },
+ {
+ "epoch": 0.6558441558441559,
+ "grad_norm": 8.671955108642578,
+ "learning_rate": 9.98853451574614e-05,
+ "loss": 0.8241,
+ "step": 505
+ },
+ {
+ "epoch": 0.6571428571428571,
+ "grad_norm": 8.806478500366211,
+ "learning_rate": 9.987982286633239e-05,
+ "loss": 0.925,
+ "step": 506
+ },
+ {
+ "epoch": 0.6584415584415585,
+ "grad_norm": 7.872350215911865,
+ "learning_rate": 9.987417087315311e-05,
+ "loss": 0.6336,
+ "step": 507
+ },
+ {
+ "epoch": 0.6597402597402597,
+ "grad_norm": 8.123810768127441,
+ "learning_rate": 9.986838919425777e-05,
+ "loss": 0.6907,
+ "step": 508
+ },
+ {
+ "epoch": 0.6610389610389611,
+ "grad_norm": 9.4236421585083,
+ "learning_rate": 9.986247784635533e-05,
+ "loss": 0.8237,
+ "step": 509
+ },
+ {
+ "epoch": 0.6623376623376623,
+ "grad_norm": 9.10471248626709,
+ "learning_rate": 9.98564368465295e-05,
+ "loss": 0.828,
+ "step": 510
+ },
+ {
+ "epoch": 0.6636363636363637,
+ "grad_norm": 8.953938484191895,
+ "learning_rate": 9.98502662122387e-05,
+ "loss": 0.6624,
+ "step": 511
+ },
+ {
+ "epoch": 0.6649350649350649,
+ "grad_norm": 9.512434959411621,
+ "learning_rate": 9.984396596131595e-05,
+ "loss": 0.8069,
+ "step": 512
+ },
+ {
+ "epoch": 0.6662337662337663,
+ "grad_norm": 7.959551811218262,
+ "learning_rate": 9.98375361119689e-05,
+ "loss": 0.566,
+ "step": 513
+ },
+ {
+ "epoch": 0.6675324675324675,
+ "grad_norm": 10.5604887008667,
+ "learning_rate": 9.983097668277975e-05,
+ "loss": 1.0294,
+ "step": 514
+ },
+ {
+ "epoch": 0.6688311688311688,
+ "grad_norm": 9.543386459350586,
+ "learning_rate": 9.98242876927051e-05,
+ "loss": 0.8889,
+ "step": 515
+ },
+ {
+ "epoch": 0.6701298701298701,
+ "grad_norm": 7.930560111999512,
+ "learning_rate": 9.981746916107607e-05,
+ "loss": 0.6539,
+ "step": 516
+ },
+ {
+ "epoch": 0.6714285714285714,
+ "grad_norm": 8.005714416503906,
+ "learning_rate": 9.981052110759813e-05,
+ "loss": 0.7279,
+ "step": 517
+ },
+ {
+ "epoch": 0.6727272727272727,
+ "grad_norm": 8.585083961486816,
+ "learning_rate": 9.980344355235102e-05,
+ "loss": 0.7805,
+ "step": 518
+ },
+ {
+ "epoch": 0.674025974025974,
+ "grad_norm": 7.590928077697754,
+ "learning_rate": 9.979623651578881e-05,
+ "loss": 0.6657,
+ "step": 519
+ },
+ {
+ "epoch": 0.6753246753246753,
+ "grad_norm": 6.786378860473633,
+ "learning_rate": 9.978890001873971e-05,
+ "loss": 0.5069,
+ "step": 520
+ },
+ {
+ "epoch": 0.6766233766233766,
+ "grad_norm": 9.419742584228516,
+ "learning_rate": 9.978143408240611e-05,
+ "loss": 0.9967,
+ "step": 521
+ },
+ {
+ "epoch": 0.6779220779220779,
+ "grad_norm": 8.207934379577637,
+ "learning_rate": 9.977383872836444e-05,
+ "loss": 0.719,
+ "step": 522
+ },
+ {
+ "epoch": 0.6792207792207792,
+ "grad_norm": 7.318924427032471,
+ "learning_rate": 9.976611397856519e-05,
+ "loss": 0.4709,
+ "step": 523
+ },
+ {
+ "epoch": 0.6805194805194805,
+ "grad_norm": 8.9629545211792,
+ "learning_rate": 9.975825985533274e-05,
+ "loss": 0.7421,
+ "step": 524
+ },
+ {
+ "epoch": 0.6818181818181818,
+ "grad_norm": 8.531639099121094,
+ "learning_rate": 9.975027638136544e-05,
+ "loss": 0.7005,
+ "step": 525
+ },
+ {
+ "epoch": 0.6831168831168831,
+ "grad_norm": 10.551216125488281,
+ "learning_rate": 9.974216357973539e-05,
+ "loss": 0.9615,
+ "step": 526
+ },
+ {
+ "epoch": 0.6844155844155844,
+ "grad_norm": 9.172273635864258,
+ "learning_rate": 9.973392147388847e-05,
+ "loss": 0.7011,
+ "step": 527
+ },
+ {
+ "epoch": 0.6857142857142857,
+ "grad_norm": 8.73135757446289,
+ "learning_rate": 9.972555008764424e-05,
+ "loss": 0.7021,
+ "step": 528
+ },
+ {
+ "epoch": 0.687012987012987,
+ "grad_norm": 9.322381019592285,
+ "learning_rate": 9.971704944519594e-05,
+ "loss": 0.7954,
+ "step": 529
+ },
+ {
+ "epoch": 0.6883116883116883,
+ "grad_norm": 10.328460693359375,
+ "learning_rate": 9.970841957111022e-05,
+ "loss": 1.0179,
+ "step": 530
+ },
+ {
+ "epoch": 0.6896103896103896,
+ "grad_norm": 7.732416152954102,
+ "learning_rate": 9.969966049032736e-05,
+ "loss": 0.6674,
+ "step": 531
+ },
+ {
+ "epoch": 0.6909090909090909,
+ "grad_norm": 8.953222274780273,
+ "learning_rate": 9.969077222816096e-05,
+ "loss": 0.7634,
+ "step": 532
+ },
+ {
+ "epoch": 0.6922077922077922,
+ "grad_norm": 6.733580589294434,
+ "learning_rate": 9.968175481029797e-05,
+ "loss": 0.5306,
+ "step": 533
+ },
+ {
+ "epoch": 0.6935064935064935,
+ "grad_norm": 7.927433013916016,
+ "learning_rate": 9.96726082627986e-05,
+ "loss": 0.6792,
+ "step": 534
+ },
+ {
+ "epoch": 0.6948051948051948,
+ "grad_norm": 8.450961112976074,
+ "learning_rate": 9.966333261209626e-05,
+ "loss": 0.8223,
+ "step": 535
+ },
+ {
+ "epoch": 0.6961038961038961,
+ "grad_norm": 8.986429214477539,
+ "learning_rate": 9.965392788499741e-05,
+ "loss": 0.9346,
+ "step": 536
+ },
+ {
+ "epoch": 0.6974025974025974,
+ "grad_norm": 7.740746021270752,
+ "learning_rate": 9.96443941086816e-05,
+ "loss": 0.7369,
+ "step": 537
+ },
+ {
+ "epoch": 0.6987012987012987,
+ "grad_norm": 8.5305757522583,
+ "learning_rate": 9.963473131070133e-05,
+ "loss": 0.7636,
+ "step": 538
+ },
+ {
+ "epoch": 0.7,
+ "grad_norm": 7.068668842315674,
+ "learning_rate": 9.96249395189819e-05,
+ "loss": 0.632,
+ "step": 539
+ },
+ {
+ "epoch": 0.7,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.8984375,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.8984375,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.921875,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9296875,
+ "eval_global_dataset_loss": 0.6427525877952576,
+ "eval_global_dataset_runtime": 113.506,
+ "eval_global_dataset_samples_per_second": 9.436,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.8984375,
+ "eval_sts-test-1024_pearson_cosine": 0.8873943324412861,
+ "eval_sts-test-1024_spearman_cosine": 0.9253282995791614,
+ "eval_sts-test-128_pearson_cosine": 0.8722931553862875,
+ "eval_sts-test-128_spearman_cosine": 0.9168565638341831,
+ "eval_sts-test-256_pearson_cosine": 0.8785673618962662,
+ "eval_sts-test-256_spearman_cosine": 0.9222670029044829,
+ "eval_sts-test-32_pearson_cosine": 0.8547551484487256,
+ "eval_sts-test-32_spearman_cosine": 0.9122828791882953,
+ "eval_sts-test-512_pearson_cosine": 0.8852620294819122,
+ "eval_sts-test-512_spearman_cosine": 0.9242760012251404,
+ "eval_sts-test-64_pearson_cosine": 0.8669129026803459,
+ "eval_sts-test-64_spearman_cosine": 0.9143251758236597,
+ "eval_sts-test_pearson_cosine": 0.8900120725090723,
+ "eval_sts-test_spearman_cosine": 0.9241635746572048,
+ "step": 539
+ },
+ {
+ "epoch": 0.7012987012987013,
+ "grad_norm": 8.080013275146484,
+ "learning_rate": 9.961501876182148e-05,
+ "loss": 0.672,
+ "step": 540
+ },
+ {
+ "epoch": 0.7025974025974026,
+ "grad_norm": 8.661029815673828,
+ "learning_rate": 9.960496906789089e-05,
+ "loss": 0.8924,
+ "step": 541
+ },
+ {
+ "epoch": 0.7038961038961039,
+ "grad_norm": 8.918992042541504,
+ "learning_rate": 9.959479046623367e-05,
+ "loss": 0.822,
+ "step": 542
+ },
+ {
+ "epoch": 0.7051948051948052,
+ "grad_norm": 7.410845756530762,
+ "learning_rate": 9.958448298626576e-05,
+ "loss": 0.4934,
+ "step": 543
+ },
+ {
+ "epoch": 0.7064935064935065,
+ "grad_norm": 7.634463310241699,
+ "learning_rate": 9.957404665777567e-05,
+ "loss": 0.5939,
+ "step": 544
+ },
+ {
+ "epoch": 0.7077922077922078,
+ "grad_norm": 7.014540195465088,
+ "learning_rate": 9.956348151092421e-05,
+ "loss": 0.5593,
+ "step": 545
+ },
+ {
+ "epoch": 0.7090909090909091,
+ "grad_norm": 8.57436466217041,
+ "learning_rate": 9.955278757624455e-05,
+ "loss": 0.737,
+ "step": 546
+ },
+ {
+ "epoch": 0.7103896103896103,
+ "grad_norm": 8.323346138000488,
+ "learning_rate": 9.954196488464198e-05,
+ "loss": 0.6734,
+ "step": 547
+ },
+ {
+ "epoch": 0.7116883116883117,
+ "grad_norm": 9.893049240112305,
+ "learning_rate": 9.953101346739391e-05,
+ "loss": 0.949,
+ "step": 548
+ },
+ {
+ "epoch": 0.712987012987013,
+ "grad_norm": 9.634085655212402,
+ "learning_rate": 9.951993335614982e-05,
+ "loss": 0.9343,
+ "step": 549
+ },
+ {
+ "epoch": 0.7142857142857143,
+ "grad_norm": 6.592012882232666,
+ "learning_rate": 9.950872458293105e-05,
+ "loss": 0.4662,
+ "step": 550
+ },
+ {
+ "epoch": 0.7155844155844155,
+ "grad_norm": 7.7272186279296875,
+ "learning_rate": 9.949738718013078e-05,
+ "loss": 0.6602,
+ "step": 551
+ },
+ {
+ "epoch": 0.7168831168831169,
+ "grad_norm": 7.066654682159424,
+ "learning_rate": 9.948592118051398e-05,
+ "loss": 0.6273,
+ "step": 552
+ },
+ {
+ "epoch": 0.7181818181818181,
+ "grad_norm": 7.870489120483398,
+ "learning_rate": 9.94743266172172e-05,
+ "loss": 0.665,
+ "step": 553
+ },
+ {
+ "epoch": 0.7194805194805195,
+ "grad_norm": 8.590832710266113,
+ "learning_rate": 9.946260352374857e-05,
+ "loss": 0.7826,
+ "step": 554
+ },
+ {
+ "epoch": 0.7207792207792207,
+ "grad_norm": 9.697837829589844,
+ "learning_rate": 9.945075193398767e-05,
+ "loss": 1.0379,
+ "step": 555
+ },
+ {
+ "epoch": 0.7220779220779221,
+ "grad_norm": 7.371159076690674,
+ "learning_rate": 9.943877188218541e-05,
+ "loss": 0.5787,
+ "step": 556
+ },
+ {
+ "epoch": 0.7233766233766233,
+ "grad_norm": 7.7747907638549805,
+ "learning_rate": 9.942666340296398e-05,
+ "loss": 0.6639,
+ "step": 557
+ },
+ {
+ "epoch": 0.7246753246753247,
+ "grad_norm": 6.700680732727051,
+ "learning_rate": 9.941442653131673e-05,
+ "loss": 0.5511,
+ "step": 558
+ },
+ {
+ "epoch": 0.7259740259740259,
+ "grad_norm": 7.617847442626953,
+ "learning_rate": 9.9402061302608e-05,
+ "loss": 0.6313,
+ "step": 559
+ },
+ {
+ "epoch": 0.7272727272727273,
+ "grad_norm": 8.378815650939941,
+ "learning_rate": 9.938956775257318e-05,
+ "loss": 0.7771,
+ "step": 560
+ },
+ {
+ "epoch": 0.7285714285714285,
+ "grad_norm": 5.534984111785889,
+ "learning_rate": 9.937694591731841e-05,
+ "loss": 0.4077,
+ "step": 561
+ },
+ {
+ "epoch": 0.7298701298701299,
+ "grad_norm": 8.180028915405273,
+ "learning_rate": 9.936419583332062e-05,
+ "loss": 0.6628,
+ "step": 562
+ },
+ {
+ "epoch": 0.7311688311688311,
+ "grad_norm": 7.878411769866943,
+ "learning_rate": 9.935131753742737e-05,
+ "loss": 0.5646,
+ "step": 563
+ },
+ {
+ "epoch": 0.7324675324675325,
+ "grad_norm": 7.941530704498291,
+ "learning_rate": 9.933831106685679e-05,
+ "loss": 0.5972,
+ "step": 564
+ },
+ {
+ "epoch": 0.7337662337662337,
+ "grad_norm": 10.171013832092285,
+ "learning_rate": 9.932517645919738e-05,
+ "loss": 1.0101,
+ "step": 565
+ },
+ {
+ "epoch": 0.7350649350649351,
+ "grad_norm": 9.271905899047852,
+ "learning_rate": 9.931191375240792e-05,
+ "loss": 0.9496,
+ "step": 566
+ },
+ {
+ "epoch": 0.7363636363636363,
+ "grad_norm": 7.886529445648193,
+ "learning_rate": 9.92985229848175e-05,
+ "loss": 0.6219,
+ "step": 567
+ },
+ {
+ "epoch": 0.7376623376623377,
+ "grad_norm": 7.918776035308838,
+ "learning_rate": 9.928500419512521e-05,
+ "loss": 0.7275,
+ "step": 568
+ },
+ {
+ "epoch": 0.7389610389610389,
+ "grad_norm": 8.45152759552002,
+ "learning_rate": 9.927135742240022e-05,
+ "loss": 0.905,
+ "step": 569
+ },
+ {
+ "epoch": 0.7402597402597403,
+ "grad_norm": 7.562143325805664,
+ "learning_rate": 9.925758270608144e-05,
+ "loss": 0.7303,
+ "step": 570
+ },
+ {
+ "epoch": 0.7415584415584415,
+ "grad_norm": 7.238564968109131,
+ "learning_rate": 9.924368008597763e-05,
+ "loss": 0.6518,
+ "step": 571
+ },
+ {
+ "epoch": 0.7428571428571429,
+ "grad_norm": 6.896720886230469,
+ "learning_rate": 9.922964960226718e-05,
+ "loss": 0.6725,
+ "step": 572
+ },
+ {
+ "epoch": 0.7441558441558441,
+ "grad_norm": 7.941315650939941,
+ "learning_rate": 9.921549129549798e-05,
+ "loss": 0.8026,
+ "step": 573
+ },
+ {
+ "epoch": 0.7454545454545455,
+ "grad_norm": 6.776406288146973,
+ "learning_rate": 9.920120520658732e-05,
+ "loss": 0.6042,
+ "step": 574
+ },
+ {
+ "epoch": 0.7467532467532467,
+ "grad_norm": 7.763830661773682,
+ "learning_rate": 9.91867913768218e-05,
+ "loss": 0.7192,
+ "step": 575
+ },
+ {
+ "epoch": 0.7480519480519481,
+ "grad_norm": 7.134865760803223,
+ "learning_rate": 9.91722498478572e-05,
+ "loss": 0.6089,
+ "step": 576
+ },
+ {
+ "epoch": 0.7493506493506493,
+ "grad_norm": 8.958261489868164,
+ "learning_rate": 9.915758066171833e-05,
+ "loss": 0.92,
+ "step": 577
+ },
+ {
+ "epoch": 0.7506493506493507,
+ "grad_norm": 8.438769340515137,
+ "learning_rate": 9.91427838607989e-05,
+ "loss": 0.81,
+ "step": 578
+ },
+ {
+ "epoch": 0.7519480519480519,
+ "grad_norm": 8.106558799743652,
+ "learning_rate": 9.912785948786149e-05,
+ "loss": 0.7004,
+ "step": 579
+ },
+ {
+ "epoch": 0.7532467532467533,
+ "grad_norm": 7.914773464202881,
+ "learning_rate": 9.911280758603728e-05,
+ "loss": 0.636,
+ "step": 580
+ },
+ {
+ "epoch": 0.7545454545454545,
+ "grad_norm": 7.522397994995117,
+ "learning_rate": 9.90976281988261e-05,
+ "loss": 0.6059,
+ "step": 581
+ },
+ {
+ "epoch": 0.7558441558441559,
+ "grad_norm": 6.786513328552246,
+ "learning_rate": 9.908232137009617e-05,
+ "loss": 0.5995,
+ "step": 582
+ },
+ {
+ "epoch": 0.7571428571428571,
+ "grad_norm": 8.496878623962402,
+ "learning_rate": 9.906688714408396e-05,
+ "loss": 0.8563,
+ "step": 583
+ },
+ {
+ "epoch": 0.7584415584415585,
+ "grad_norm": 7.118294715881348,
+ "learning_rate": 9.905132556539418e-05,
+ "loss": 0.6008,
+ "step": 584
+ },
+ {
+ "epoch": 0.7597402597402597,
+ "grad_norm": 7.351891994476318,
+ "learning_rate": 9.90356366789996e-05,
+ "loss": 0.6576,
+ "step": 585
+ },
+ {
+ "epoch": 0.7610389610389611,
+ "grad_norm": 6.669191360473633,
+ "learning_rate": 9.901982053024083e-05,
+ "loss": 0.5438,
+ "step": 586
+ },
+ {
+ "epoch": 0.7623376623376623,
+ "grad_norm": 7.092779636383057,
+ "learning_rate": 9.900387716482637e-05,
+ "loss": 0.6347,
+ "step": 587
+ },
+ {
+ "epoch": 0.7636363636363637,
+ "grad_norm": 6.913083553314209,
+ "learning_rate": 9.898780662883227e-05,
+ "loss": 0.6002,
+ "step": 588
+ },
+ {
+ "epoch": 0.7649350649350649,
+ "grad_norm": 8.093656539916992,
+ "learning_rate": 9.897160896870218e-05,
+ "loss": 0.726,
+ "step": 589
+ },
+ {
+ "epoch": 0.7662337662337663,
+ "grad_norm": 8.512179374694824,
+ "learning_rate": 9.89552842312471e-05,
+ "loss": 0.8955,
+ "step": 590
+ },
+ {
+ "epoch": 0.7675324675324675,
+ "grad_norm": 7.220069408416748,
+ "learning_rate": 9.89388324636453e-05,
+ "loss": 0.5638,
+ "step": 591
+ },
+ {
+ "epoch": 0.7688311688311689,
+ "grad_norm": 7.283459186553955,
+ "learning_rate": 9.892225371344214e-05,
+ "loss": 0.6315,
+ "step": 592
+ },
+ {
+ "epoch": 0.7701298701298701,
+ "grad_norm": 7.870151042938232,
+ "learning_rate": 9.890554802854995e-05,
+ "loss": 0.7253,
+ "step": 593
+ },
+ {
+ "epoch": 0.7714285714285715,
+ "grad_norm": 7.145277500152588,
+ "learning_rate": 9.888871545724798e-05,
+ "loss": 0.6148,
+ "step": 594
+ },
+ {
+ "epoch": 0.7727272727272727,
+ "grad_norm": 6.762304782867432,
+ "learning_rate": 9.887175604818206e-05,
+ "loss": 0.4651,
+ "step": 595
+ },
+ {
+ "epoch": 0.7740259740259741,
+ "grad_norm": 6.68051290512085,
+ "learning_rate": 9.885466985036468e-05,
+ "loss": 0.5097,
+ "step": 596
+ },
+ {
+ "epoch": 0.7753246753246753,
+ "grad_norm": 8.16653060913086,
+ "learning_rate": 9.883745691317472e-05,
+ "loss": 0.7487,
+ "step": 597
+ },
+ {
+ "epoch": 0.7766233766233767,
+ "grad_norm": 7.994803428649902,
+ "learning_rate": 9.88201172863573e-05,
+ "loss": 0.7269,
+ "step": 598
+ },
+ {
+ "epoch": 0.7779220779220779,
+ "grad_norm": 8.129690170288086,
+ "learning_rate": 9.880265102002369e-05,
+ "loss": 0.8212,
+ "step": 599
+ },
+ {
+ "epoch": 0.7792207792207793,
+ "grad_norm": 8.369062423706055,
+ "learning_rate": 9.878505816465115e-05,
+ "loss": 0.822,
+ "step": 600
+ },
+ {
+ "epoch": 0.7805194805194805,
+ "grad_norm": 7.65264892578125,
+ "learning_rate": 9.876733877108281e-05,
+ "loss": 0.6172,
+ "step": 601
+ },
+ {
+ "epoch": 0.7818181818181819,
+ "grad_norm": 7.348762035369873,
+ "learning_rate": 9.874949289052744e-05,
+ "loss": 0.6488,
+ "step": 602
+ },
+ {
+ "epoch": 0.7831168831168831,
+ "grad_norm": 8.416467666625977,
+ "learning_rate": 9.873152057455939e-05,
+ "loss": 0.8928,
+ "step": 603
+ },
+ {
+ "epoch": 0.7844155844155845,
+ "grad_norm": 7.411664962768555,
+ "learning_rate": 9.871342187511838e-05,
+ "loss": 0.7068,
+ "step": 604
+ },
+ {
+ "epoch": 0.7857142857142857,
+ "grad_norm": 7.342609882354736,
+ "learning_rate": 9.869519684450943e-05,
+ "loss": 0.6558,
+ "step": 605
+ },
+ {
+ "epoch": 0.787012987012987,
+ "grad_norm": 6.897357940673828,
+ "learning_rate": 9.86768455354026e-05,
+ "loss": 0.6012,
+ "step": 606
+ },
+ {
+ "epoch": 0.7883116883116883,
+ "grad_norm": 7.639064788818359,
+ "learning_rate": 9.865836800083291e-05,
+ "loss": 0.7806,
+ "step": 607
+ },
+ {
+ "epoch": 0.7896103896103897,
+ "grad_norm": 6.563958168029785,
+ "learning_rate": 9.863976429420017e-05,
+ "loss": 0.5802,
+ "step": 608
+ },
+ {
+ "epoch": 0.7909090909090909,
+ "grad_norm": 7.272040367126465,
+ "learning_rate": 9.862103446926885e-05,
+ "loss": 0.7414,
+ "step": 609
+ },
+ {
+ "epoch": 0.7922077922077922,
+ "grad_norm": 7.528412818908691,
+ "learning_rate": 9.860217858016784e-05,
+ "loss": 0.6715,
+ "step": 610
+ },
+ {
+ "epoch": 0.7935064935064935,
+ "grad_norm": 6.993316173553467,
+ "learning_rate": 9.858319668139041e-05,
+ "loss": 0.6947,
+ "step": 611
+ },
+ {
+ "epoch": 0.7948051948051948,
+ "grad_norm": 6.731951713562012,
+ "learning_rate": 9.856408882779398e-05,
+ "loss": 0.6908,
+ "step": 612
+ },
+ {
+ "epoch": 0.7961038961038961,
+ "grad_norm": 6.824806213378906,
+ "learning_rate": 9.85448550746e-05,
+ "loss": 0.587,
+ "step": 613
+ },
+ {
+ "epoch": 0.7974025974025974,
+ "grad_norm": 7.066167831420898,
+ "learning_rate": 9.85254954773937e-05,
+ "loss": 0.642,
+ "step": 614
+ },
+ {
+ "epoch": 0.7987012987012987,
+ "grad_norm": 6.381121635437012,
+ "learning_rate": 9.850601009212407e-05,
+ "loss": 0.5082,
+ "step": 615
+ },
+ {
+ "epoch": 0.8,
+ "grad_norm": 7.728446960449219,
+ "learning_rate": 9.848639897510358e-05,
+ "loss": 0.6574,
+ "step": 616
+ },
+ {
+ "epoch": 0.8,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9453125,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.6188308000564575,
+ "eval_global_dataset_runtime": 113.7163,
+ "eval_global_dataset_samples_per_second": 9.418,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.9296875,
+ "eval_sts-test-1024_pearson_cosine": 0.8912631824974158,
+ "eval_sts-test-1024_spearman_cosine": 0.9274389830759132,
+ "eval_sts-test-128_pearson_cosine": 0.8876748515587677,
+ "eval_sts-test-128_spearman_cosine": 0.9237690075302462,
+ "eval_sts-test-256_pearson_cosine": 0.8892868596959533,
+ "eval_sts-test-256_spearman_cosine": 0.9269749549483829,
+ "eval_sts-test-32_pearson_cosine": 0.8512951958187567,
+ "eval_sts-test-32_spearman_cosine": 0.9137630429839818,
+ "eval_sts-test-512_pearson_cosine": 0.8923752440183839,
+ "eval_sts-test-512_spearman_cosine": 0.9280265372096147,
+ "eval_sts-test-64_pearson_cosine": 0.8733490483495416,
+ "eval_sts-test-64_spearman_cosine": 0.9214732140474348,
+ "eval_sts-test_pearson_cosine": 0.8935100137551328,
+ "eval_sts-test_spearman_cosine": 0.9288851324641034,
+ "step": 616
+ },
+ {
+ "epoch": 0.8012987012987013,
+ "grad_norm": 6.461012840270996,
+ "learning_rate": 9.846666218300807e-05,
+ "loss": 0.5828,
+ "step": 617
+ },
+ {
+ "epoch": 0.8025974025974026,
+ "grad_norm": 8.968891143798828,
+ "learning_rate": 9.844679977287661e-05,
+ "loss": 0.8306,
+ "step": 618
+ },
+ {
+ "epoch": 0.8038961038961039,
+ "grad_norm": 7.8341546058654785,
+ "learning_rate": 9.84268118021113e-05,
+ "loss": 0.6332,
+ "step": 619
+ },
+ {
+ "epoch": 0.8051948051948052,
+ "grad_norm": 6.980321884155273,
+ "learning_rate": 9.840669832847705e-05,
+ "loss": 0.6006,
+ "step": 620
+ },
+ {
+ "epoch": 0.8064935064935065,
+ "grad_norm": 6.770340442657471,
+ "learning_rate": 9.838645941010154e-05,
+ "loss": 0.5365,
+ "step": 621
+ },
+ {
+ "epoch": 0.8077922077922078,
+ "grad_norm": 7.772369384765625,
+ "learning_rate": 9.836609510547494e-05,
+ "loss": 0.6386,
+ "step": 622
+ },
+ {
+ "epoch": 0.8090909090909091,
+ "grad_norm": 7.803502082824707,
+ "learning_rate": 9.834560547344981e-05,
+ "loss": 0.684,
+ "step": 623
+ },
+ {
+ "epoch": 0.8103896103896104,
+ "grad_norm": 8.83973503112793,
+ "learning_rate": 9.83249905732409e-05,
+ "loss": 0.9585,
+ "step": 624
+ },
+ {
+ "epoch": 0.8116883116883117,
+ "grad_norm": 6.967188835144043,
+ "learning_rate": 9.830425046442496e-05,
+ "loss": 0.6687,
+ "step": 625
+ },
+ {
+ "epoch": 0.812987012987013,
+ "grad_norm": 7.9906487464904785,
+ "learning_rate": 9.828338520694064e-05,
+ "loss": 0.7117,
+ "step": 626
+ },
+ {
+ "epoch": 0.8142857142857143,
+ "grad_norm": 7.777108192443848,
+ "learning_rate": 9.826239486108822e-05,
+ "loss": 0.7052,
+ "step": 627
+ },
+ {
+ "epoch": 0.8155844155844156,
+ "grad_norm": 7.411710262298584,
+ "learning_rate": 9.82412794875295e-05,
+ "loss": 0.7474,
+ "step": 628
+ },
+ {
+ "epoch": 0.8168831168831169,
+ "grad_norm": 6.999947547912598,
+ "learning_rate": 9.822003914728759e-05,
+ "loss": 0.7309,
+ "step": 629
+ },
+ {
+ "epoch": 0.8181818181818182,
+ "grad_norm": 6.437720775604248,
+ "learning_rate": 9.81986739017468e-05,
+ "loss": 0.6185,
+ "step": 630
+ },
+ {
+ "epoch": 0.8194805194805195,
+ "grad_norm": 6.909036159515381,
+ "learning_rate": 9.81771838126524e-05,
+ "loss": 0.7055,
+ "step": 631
+ },
+ {
+ "epoch": 0.8207792207792208,
+ "grad_norm": 7.5900349617004395,
+ "learning_rate": 9.815556894211038e-05,
+ "loss": 0.8739,
+ "step": 632
+ },
+ {
+ "epoch": 0.8220779220779221,
+ "grad_norm": 6.764601230621338,
+ "learning_rate": 9.813382935258749e-05,
+ "loss": 0.7134,
+ "step": 633
+ },
+ {
+ "epoch": 0.8233766233766234,
+ "grad_norm": 6.6621012687683105,
+ "learning_rate": 9.811196510691077e-05,
+ "loss": 0.7014,
+ "step": 634
+ },
+ {
+ "epoch": 0.8246753246753247,
+ "grad_norm": 6.744335651397705,
+ "learning_rate": 9.808997626826762e-05,
+ "loss": 0.6907,
+ "step": 635
+ },
+ {
+ "epoch": 0.825974025974026,
+ "grad_norm": 6.413692474365234,
+ "learning_rate": 9.806786290020544e-05,
+ "loss": 0.6316,
+ "step": 636
+ },
+ {
+ "epoch": 0.8272727272727273,
+ "grad_norm": 8.33804988861084,
+ "learning_rate": 9.804562506663157e-05,
+ "loss": 1.013,
+ "step": 637
+ },
+ {
+ "epoch": 0.8285714285714286,
+ "grad_norm": 7.601145267486572,
+ "learning_rate": 9.802326283181302e-05,
+ "loss": 0.7022,
+ "step": 638
+ },
+ {
+ "epoch": 0.8298701298701299,
+ "grad_norm": 6.36918830871582,
+ "learning_rate": 9.800077626037634e-05,
+ "loss": 0.5908,
+ "step": 639
+ },
+ {
+ "epoch": 0.8311688311688312,
+ "grad_norm": 6.497655391693115,
+ "learning_rate": 9.79781654173074e-05,
+ "loss": 0.6008,
+ "step": 640
+ },
+ {
+ "epoch": 0.8324675324675325,
+ "grad_norm": 6.970991134643555,
+ "learning_rate": 9.795543036795122e-05,
+ "loss": 0.6932,
+ "step": 641
+ },
+ {
+ "epoch": 0.8337662337662337,
+ "grad_norm": 6.388566970825195,
+ "learning_rate": 9.793257117801176e-05,
+ "loss": 0.5935,
+ "step": 642
+ },
+ {
+ "epoch": 0.8350649350649351,
+ "grad_norm": 7.75444221496582,
+ "learning_rate": 9.790958791355175e-05,
+ "loss": 0.7817,
+ "step": 643
+ },
+ {
+ "epoch": 0.8363636363636363,
+ "grad_norm": 6.7867960929870605,
+ "learning_rate": 9.78864806409925e-05,
+ "loss": 0.5923,
+ "step": 644
+ },
+ {
+ "epoch": 0.8376623376623377,
+ "grad_norm": 6.315074920654297,
+ "learning_rate": 9.786324942711372e-05,
+ "loss": 0.5689,
+ "step": 645
+ },
+ {
+ "epoch": 0.8389610389610389,
+ "grad_norm": 6.039573669433594,
+ "learning_rate": 9.783989433905328e-05,
+ "loss": 0.5501,
+ "step": 646
+ },
+ {
+ "epoch": 0.8402597402597403,
+ "grad_norm": 7.127732753753662,
+ "learning_rate": 9.781641544430703e-05,
+ "loss": 0.7422,
+ "step": 647
+ },
+ {
+ "epoch": 0.8415584415584415,
+ "grad_norm": 8.375782012939453,
+ "learning_rate": 9.779281281072868e-05,
+ "loss": 0.9285,
+ "step": 648
+ },
+ {
+ "epoch": 0.8428571428571429,
+ "grad_norm": 6.235222816467285,
+ "learning_rate": 9.776908650652946e-05,
+ "loss": 0.6054,
+ "step": 649
+ },
+ {
+ "epoch": 0.8441558441558441,
+ "grad_norm": 6.5698771476745605,
+ "learning_rate": 9.774523660027807e-05,
+ "loss": 0.6312,
+ "step": 650
+ },
+ {
+ "epoch": 0.8454545454545455,
+ "grad_norm": 8.005461692810059,
+ "learning_rate": 9.772126316090041e-05,
+ "loss": 0.9075,
+ "step": 651
+ },
+ {
+ "epoch": 0.8467532467532467,
+ "grad_norm": 6.392460823059082,
+ "learning_rate": 9.769716625767939e-05,
+ "loss": 0.5705,
+ "step": 652
+ },
+ {
+ "epoch": 0.8480519480519481,
+ "grad_norm": 8.403264045715332,
+ "learning_rate": 9.767294596025469e-05,
+ "loss": 0.9817,
+ "step": 653
+ },
+ {
+ "epoch": 0.8493506493506493,
+ "grad_norm": 7.119625568389893,
+ "learning_rate": 9.764860233862263e-05,
+ "loss": 0.6819,
+ "step": 654
+ },
+ {
+ "epoch": 0.8506493506493507,
+ "grad_norm": 8.16954231262207,
+ "learning_rate": 9.762413546313596e-05,
+ "loss": 0.9261,
+ "step": 655
+ },
+ {
+ "epoch": 0.8519480519480519,
+ "grad_norm": 5.286291599273682,
+ "learning_rate": 9.759954540450357e-05,
+ "loss": 0.3939,
+ "step": 656
+ },
+ {
+ "epoch": 0.8532467532467533,
+ "grad_norm": 6.879117488861084,
+ "learning_rate": 9.757483223379038e-05,
+ "loss": 0.7817,
+ "step": 657
+ },
+ {
+ "epoch": 0.8545454545454545,
+ "grad_norm": 6.944469451904297,
+ "learning_rate": 9.754999602241713e-05,
+ "loss": 0.5585,
+ "step": 658
+ },
+ {
+ "epoch": 0.8558441558441559,
+ "grad_norm": 6.0197906494140625,
+ "learning_rate": 9.752503684216007e-05,
+ "loss": 0.4809,
+ "step": 659
+ },
+ {
+ "epoch": 0.8571428571428571,
+ "grad_norm": 9.095551490783691,
+ "learning_rate": 9.749995476515094e-05,
+ "loss": 0.9696,
+ "step": 660
+ },
+ {
+ "epoch": 0.8584415584415584,
+ "grad_norm": 6.5533928871154785,
+ "learning_rate": 9.747474986387654e-05,
+ "loss": 0.5146,
+ "step": 661
+ },
+ {
+ "epoch": 0.8597402597402597,
+ "grad_norm": 8.780946731567383,
+ "learning_rate": 9.744942221117869e-05,
+ "loss": 0.8808,
+ "step": 662
+ },
+ {
+ "epoch": 0.861038961038961,
+ "grad_norm": 6.859992504119873,
+ "learning_rate": 9.742397188025393e-05,
+ "loss": 0.5708,
+ "step": 663
+ },
+ {
+ "epoch": 0.8623376623376623,
+ "grad_norm": 6.31803560256958,
+ "learning_rate": 9.73983989446534e-05,
+ "loss": 0.4975,
+ "step": 664
+ },
+ {
+ "epoch": 0.8636363636363636,
+ "grad_norm": 6.68372106552124,
+ "learning_rate": 9.737270347828248e-05,
+ "loss": 0.5205,
+ "step": 665
+ },
+ {
+ "epoch": 0.8649350649350649,
+ "grad_norm": 8.428847312927246,
+ "learning_rate": 9.734688555540071e-05,
+ "loss": 0.9157,
+ "step": 666
+ },
+ {
+ "epoch": 0.8662337662337662,
+ "grad_norm": 7.586736679077148,
+ "learning_rate": 9.732094525062153e-05,
+ "loss": 0.7465,
+ "step": 667
+ },
+ {
+ "epoch": 0.8675324675324675,
+ "grad_norm": 8.633830070495605,
+ "learning_rate": 9.729488263891204e-05,
+ "loss": 0.8935,
+ "step": 668
+ },
+ {
+ "epoch": 0.8688311688311688,
+ "grad_norm": 6.911459922790527,
+ "learning_rate": 9.726869779559281e-05,
+ "loss": 0.7008,
+ "step": 669
+ },
+ {
+ "epoch": 0.8701298701298701,
+ "grad_norm": 5.816353797912598,
+ "learning_rate": 9.724239079633769e-05,
+ "loss": 0.4422,
+ "step": 670
+ },
+ {
+ "epoch": 0.8714285714285714,
+ "grad_norm": 5.5716423988342285,
+ "learning_rate": 9.721596171717352e-05,
+ "loss": 0.4216,
+ "step": 671
+ },
+ {
+ "epoch": 0.8727272727272727,
+ "grad_norm": 7.347598075866699,
+ "learning_rate": 9.718941063447997e-05,
+ "loss": 0.739,
+ "step": 672
+ },
+ {
+ "epoch": 0.874025974025974,
+ "grad_norm": 6.3041253089904785,
+ "learning_rate": 9.716273762498929e-05,
+ "loss": 0.5473,
+ "step": 673
+ },
+ {
+ "epoch": 0.8753246753246753,
+ "grad_norm": 6.651629447937012,
+ "learning_rate": 9.713594276578608e-05,
+ "loss": 0.6101,
+ "step": 674
+ },
+ {
+ "epoch": 0.8766233766233766,
+ "grad_norm": 6.883923530578613,
+ "learning_rate": 9.710902613430713e-05,
+ "loss": 0.6453,
+ "step": 675
+ },
+ {
+ "epoch": 0.8779220779220779,
+ "grad_norm": 6.905819892883301,
+ "learning_rate": 9.70819878083411e-05,
+ "loss": 0.6317,
+ "step": 676
+ },
+ {
+ "epoch": 0.8792207792207792,
+ "grad_norm": 6.912679672241211,
+ "learning_rate": 9.705482786602837e-05,
+ "loss": 0.6964,
+ "step": 677
+ },
+ {
+ "epoch": 0.8805194805194805,
+ "grad_norm": 6.441966533660889,
+ "learning_rate": 9.702754638586082e-05,
+ "loss": 0.617,
+ "step": 678
+ },
+ {
+ "epoch": 0.8818181818181818,
+ "grad_norm": 6.045241355895996,
+ "learning_rate": 9.700014344668151e-05,
+ "loss": 0.4839,
+ "step": 679
+ },
+ {
+ "epoch": 0.8831168831168831,
+ "grad_norm": 6.609492778778076,
+ "learning_rate": 9.697261912768454e-05,
+ "loss": 0.6383,
+ "step": 680
+ },
+ {
+ "epoch": 0.8844155844155844,
+ "grad_norm": 6.745547294616699,
+ "learning_rate": 9.69449735084148e-05,
+ "loss": 0.595,
+ "step": 681
+ },
+ {
+ "epoch": 0.8857142857142857,
+ "grad_norm": 6.410647869110107,
+ "learning_rate": 9.691720666876777e-05,
+ "loss": 0.5053,
+ "step": 682
+ },
+ {
+ "epoch": 0.887012987012987,
+ "grad_norm": 7.867794513702393,
+ "learning_rate": 9.688931868898919e-05,
+ "loss": 0.8086,
+ "step": 683
+ },
+ {
+ "epoch": 0.8883116883116883,
+ "grad_norm": 7.473386764526367,
+ "learning_rate": 9.686130964967495e-05,
+ "loss": 0.7465,
+ "step": 684
+ },
+ {
+ "epoch": 0.8896103896103896,
+ "grad_norm": 8.719533920288086,
+ "learning_rate": 9.683317963177075e-05,
+ "loss": 0.9109,
+ "step": 685
+ },
+ {
+ "epoch": 0.8909090909090909,
+ "grad_norm": 8.01264762878418,
+ "learning_rate": 9.680492871657194e-05,
+ "loss": 0.8457,
+ "step": 686
+ },
+ {
+ "epoch": 0.8922077922077922,
+ "grad_norm": 7.10344934463501,
+ "learning_rate": 9.677655698572326e-05,
+ "loss": 0.7412,
+ "step": 687
+ },
+ {
+ "epoch": 0.8935064935064935,
+ "grad_norm": 7.437696933746338,
+ "learning_rate": 9.674806452121865e-05,
+ "loss": 0.6259,
+ "step": 688
+ },
+ {
+ "epoch": 0.8948051948051948,
+ "grad_norm": 8.253927230834961,
+ "learning_rate": 9.671945140540091e-05,
+ "loss": 0.8406,
+ "step": 689
+ },
+ {
+ "epoch": 0.8961038961038961,
+ "grad_norm": 6.132811069488525,
+ "learning_rate": 9.66907177209615e-05,
+ "loss": 0.5437,
+ "step": 690
+ },
+ {
+ "epoch": 0.8974025974025974,
+ "grad_norm": 7.053062438964844,
+ "learning_rate": 9.66618635509404e-05,
+ "loss": 0.7564,
+ "step": 691
+ },
+ {
+ "epoch": 0.8987012987012987,
+ "grad_norm": 7.324159145355225,
+ "learning_rate": 9.663288897872576e-05,
+ "loss": 0.7379,
+ "step": 692
+ },
+ {
+ "epoch": 0.9,
+ "grad_norm": 6.331752300262451,
+ "learning_rate": 9.660379408805365e-05,
+ "loss": 0.6142,
+ "step": 693
+ },
+ {
+ "epoch": 0.9,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9375,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.90625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9375,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9375,
+ "eval_global_dataset_loss": 0.6449323296546936,
+ "eval_global_dataset_runtime": 113.6072,
+ "eval_global_dataset_samples_per_second": 9.427,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.90625,
+ "eval_sts-test-1024_pearson_cosine": 0.8851609078570057,
+ "eval_sts-test-1024_spearman_cosine": 0.9256469608704436,
+ "eval_sts-test-128_pearson_cosine": 0.8741462790740726,
+ "eval_sts-test-128_spearman_cosine": 0.9201083411908414,
+ "eval_sts-test-256_pearson_cosine": 0.876310078043887,
+ "eval_sts-test-256_spearman_cosine": 0.9244446410770436,
+ "eval_sts-test-32_pearson_cosine": 0.8577692639868538,
+ "eval_sts-test-32_spearman_cosine": 0.9079687781785645,
+ "eval_sts-test-512_pearson_cosine": 0.8840830849913406,
+ "eval_sts-test-512_spearman_cosine": 0.9270361808818767,
+ "eval_sts-test-64_pearson_cosine": 0.869592400395789,
+ "eval_sts-test-64_spearman_cosine": 0.9186414251117587,
+ "eval_sts-test_pearson_cosine": 0.8862542788008725,
+ "eval_sts-test_spearman_cosine": 0.9256408740817337,
+ "step": 693
+ },
+ {
+ "epoch": 0.9012987012987013,
+ "grad_norm": 5.754309177398682,
+ "learning_rate": 9.657457896300791e-05,
+ "loss": 0.5799,
+ "step": 694
+ },
+ {
+ "epoch": 0.9025974025974026,
+ "grad_norm": 6.375608444213867,
+ "learning_rate": 9.654524368801981e-05,
+ "loss": 0.5429,
+ "step": 695
+ },
+ {
+ "epoch": 0.9038961038961039,
+ "grad_norm": 7.073978900909424,
+ "learning_rate": 9.651578834786792e-05,
+ "loss": 0.6803,
+ "step": 696
+ },
+ {
+ "epoch": 0.9051948051948052,
+ "grad_norm": 6.796176910400391,
+ "learning_rate": 9.648621302767772e-05,
+ "loss": 0.5692,
+ "step": 697
+ },
+ {
+ "epoch": 0.9064935064935065,
+ "grad_norm": 7.01423978805542,
+ "learning_rate": 9.645651781292147e-05,
+ "loss": 0.665,
+ "step": 698
+ },
+ {
+ "epoch": 0.9077922077922078,
+ "grad_norm": 7.380173683166504,
+ "learning_rate": 9.642670278941794e-05,
+ "loss": 0.7442,
+ "step": 699
+ },
+ {
+ "epoch": 0.9090909090909091,
+ "grad_norm": 6.801253318786621,
+ "learning_rate": 9.63967680433321e-05,
+ "loss": 0.5755,
+ "step": 700
+ },
+ {
+ "epoch": 0.9103896103896104,
+ "grad_norm": 7.75513219833374,
+ "learning_rate": 9.636671366117496e-05,
+ "loss": 0.9233,
+ "step": 701
+ },
+ {
+ "epoch": 0.9116883116883117,
+ "grad_norm": 7.614108562469482,
+ "learning_rate": 9.633653972980323e-05,
+ "loss": 0.8297,
+ "step": 702
+ },
+ {
+ "epoch": 0.912987012987013,
+ "grad_norm": 6.449255466461182,
+ "learning_rate": 9.630624633641918e-05,
+ "loss": 0.7288,
+ "step": 703
+ },
+ {
+ "epoch": 0.9142857142857143,
+ "grad_norm": 7.443954944610596,
+ "learning_rate": 9.627583356857026e-05,
+ "loss": 0.8476,
+ "step": 704
+ },
+ {
+ "epoch": 0.9155844155844156,
+ "grad_norm": 6.5813679695129395,
+ "learning_rate": 9.624530151414893e-05,
+ "loss": 0.6464,
+ "step": 705
+ },
+ {
+ "epoch": 0.9168831168831169,
+ "grad_norm": 6.9515485763549805,
+ "learning_rate": 9.621465026139242e-05,
+ "loss": 0.8881,
+ "step": 706
+ },
+ {
+ "epoch": 0.9181818181818182,
+ "grad_norm": 6.7394022941589355,
+ "learning_rate": 9.618387989888239e-05,
+ "loss": 0.7609,
+ "step": 707
+ },
+ {
+ "epoch": 0.9194805194805195,
+ "grad_norm": 6.180837631225586,
+ "learning_rate": 9.615299051554479e-05,
+ "loss": 0.6834,
+ "step": 708
+ },
+ {
+ "epoch": 0.9207792207792208,
+ "grad_norm": 7.189302921295166,
+ "learning_rate": 9.612198220064944e-05,
+ "loss": 0.8218,
+ "step": 709
+ },
+ {
+ "epoch": 0.922077922077922,
+ "grad_norm": 7.317821979522705,
+ "learning_rate": 9.609085504380996e-05,
+ "loss": 0.7413,
+ "step": 710
+ },
+ {
+ "epoch": 0.9233766233766234,
+ "grad_norm": 6.8858442306518555,
+ "learning_rate": 9.605960913498342e-05,
+ "loss": 0.6379,
+ "step": 711
+ },
+ {
+ "epoch": 0.9246753246753247,
+ "grad_norm": 7.520847797393799,
+ "learning_rate": 9.602824456446999e-05,
+ "loss": 0.8378,
+ "step": 712
+ },
+ {
+ "epoch": 0.925974025974026,
+ "grad_norm": 6.346597194671631,
+ "learning_rate": 9.59967614229129e-05,
+ "loss": 0.5754,
+ "step": 713
+ },
+ {
+ "epoch": 0.9272727272727272,
+ "grad_norm": 7.224513053894043,
+ "learning_rate": 9.596515980129792e-05,
+ "loss": 0.7367,
+ "step": 714
+ },
+ {
+ "epoch": 0.9285714285714286,
+ "grad_norm": 6.537326812744141,
+ "learning_rate": 9.593343979095333e-05,
+ "loss": 0.6389,
+ "step": 715
+ },
+ {
+ "epoch": 0.9298701298701298,
+ "grad_norm": 5.621442794799805,
+ "learning_rate": 9.590160148354949e-05,
+ "loss": 0.4474,
+ "step": 716
+ },
+ {
+ "epoch": 0.9311688311688312,
+ "grad_norm": 6.461991786956787,
+ "learning_rate": 9.586964497109868e-05,
+ "loss": 0.6341,
+ "step": 717
+ },
+ {
+ "epoch": 0.9324675324675324,
+ "grad_norm": 5.925861358642578,
+ "learning_rate": 9.583757034595472e-05,
+ "loss": 0.4793,
+ "step": 718
+ },
+ {
+ "epoch": 0.9337662337662338,
+ "grad_norm": 6.9430646896362305,
+ "learning_rate": 9.580537770081285e-05,
+ "loss": 0.7057,
+ "step": 719
+ },
+ {
+ "epoch": 0.935064935064935,
+ "grad_norm": 7.913745403289795,
+ "learning_rate": 9.577306712870936e-05,
+ "loss": 0.9687,
+ "step": 720
+ },
+ {
+ "epoch": 0.9363636363636364,
+ "grad_norm": 6.996697425842285,
+ "learning_rate": 9.574063872302135e-05,
+ "loss": 0.7017,
+ "step": 721
+ },
+ {
+ "epoch": 0.9376623376623376,
+ "grad_norm": 7.043035984039307,
+ "learning_rate": 9.570809257746643e-05,
+ "loss": 0.7511,
+ "step": 722
+ },
+ {
+ "epoch": 0.938961038961039,
+ "grad_norm": 7.160142421722412,
+ "learning_rate": 9.567542878610251e-05,
+ "loss": 0.7509,
+ "step": 723
+ },
+ {
+ "epoch": 0.9402597402597402,
+ "grad_norm": 5.701127529144287,
+ "learning_rate": 9.564264744332748e-05,
+ "loss": 0.5742,
+ "step": 724
+ },
+ {
+ "epoch": 0.9415584415584416,
+ "grad_norm": 5.027484893798828,
+ "learning_rate": 9.560974864387896e-05,
+ "loss": 0.4425,
+ "step": 725
+ },
+ {
+ "epoch": 0.9428571428571428,
+ "grad_norm": 6.028113842010498,
+ "learning_rate": 9.557673248283401e-05,
+ "loss": 0.5795,
+ "step": 726
+ },
+ {
+ "epoch": 0.9441558441558442,
+ "grad_norm": 6.112645149230957,
+ "learning_rate": 9.554359905560886e-05,
+ "loss": 0.5996,
+ "step": 727
+ },
+ {
+ "epoch": 0.9454545454545454,
+ "grad_norm": 6.774581432342529,
+ "learning_rate": 9.551034845795865e-05,
+ "loss": 0.6483,
+ "step": 728
+ },
+ {
+ "epoch": 0.9467532467532468,
+ "grad_norm": 6.578814506530762,
+ "learning_rate": 9.547698078597714e-05,
+ "loss": 0.6893,
+ "step": 729
+ },
+ {
+ "epoch": 0.948051948051948,
+ "grad_norm": 6.666228294372559,
+ "learning_rate": 9.544349613609643e-05,
+ "loss": 0.5996,
+ "step": 730
+ },
+ {
+ "epoch": 0.9493506493506494,
+ "grad_norm": 7.729548931121826,
+ "learning_rate": 9.540989460508666e-05,
+ "loss": 0.7856,
+ "step": 731
+ },
+ {
+ "epoch": 0.9506493506493506,
+ "grad_norm": 7.698365211486816,
+ "learning_rate": 9.53761762900558e-05,
+ "loss": 0.7376,
+ "step": 732
+ },
+ {
+ "epoch": 0.951948051948052,
+ "grad_norm": 5.8187255859375,
+ "learning_rate": 9.53423412884493e-05,
+ "loss": 0.4758,
+ "step": 733
+ },
+ {
+ "epoch": 0.9532467532467532,
+ "grad_norm": 6.393637657165527,
+ "learning_rate": 9.530838969804979e-05,
+ "loss": 0.52,
+ "step": 734
+ },
+ {
+ "epoch": 0.9545454545454546,
+ "grad_norm": 7.343430042266846,
+ "learning_rate": 9.527432161697694e-05,
+ "loss": 0.6945,
+ "step": 735
+ },
+ {
+ "epoch": 0.9558441558441558,
+ "grad_norm": 6.76441764831543,
+ "learning_rate": 9.524013714368701e-05,
+ "loss": 0.5963,
+ "step": 736
+ },
+ {
+ "epoch": 0.9571428571428572,
+ "grad_norm": 6.522793769836426,
+ "learning_rate": 9.520583637697262e-05,
+ "loss": 0.5071,
+ "step": 737
+ },
+ {
+ "epoch": 0.9584415584415584,
+ "grad_norm": 7.000353813171387,
+ "learning_rate": 9.517141941596253e-05,
+ "loss": 0.7511,
+ "step": 738
+ },
+ {
+ "epoch": 0.9597402597402598,
+ "grad_norm": 6.261977195739746,
+ "learning_rate": 9.513688636012124e-05,
+ "loss": 0.4206,
+ "step": 739
+ },
+ {
+ "epoch": 0.961038961038961,
+ "grad_norm": 6.369211673736572,
+ "learning_rate": 9.510223730924879e-05,
+ "loss": 0.4838,
+ "step": 740
+ },
+ {
+ "epoch": 0.9623376623376624,
+ "grad_norm": 7.286876678466797,
+ "learning_rate": 9.506747236348045e-05,
+ "loss": 0.6777,
+ "step": 741
+ },
+ {
+ "epoch": 0.9636363636363636,
+ "grad_norm": 7.349566459655762,
+ "learning_rate": 9.503259162328642e-05,
+ "loss": 0.7201,
+ "step": 742
+ },
+ {
+ "epoch": 0.964935064935065,
+ "grad_norm": 6.486627101898193,
+ "learning_rate": 9.499759518947156e-05,
+ "loss": 0.5443,
+ "step": 743
+ },
+ {
+ "epoch": 0.9662337662337662,
+ "grad_norm": 7.116005897521973,
+ "learning_rate": 9.496248316317504e-05,
+ "loss": 0.7611,
+ "step": 744
+ },
+ {
+ "epoch": 0.9675324675324676,
+ "grad_norm": 7.029176712036133,
+ "learning_rate": 9.492725564587015e-05,
+ "loss": 0.7433,
+ "step": 745
+ },
+ {
+ "epoch": 0.9688311688311688,
+ "grad_norm": 8.111961364746094,
+ "learning_rate": 9.489191273936388e-05,
+ "loss": 0.8613,
+ "step": 746
+ },
+ {
+ "epoch": 0.9701298701298702,
+ "grad_norm": 5.125340938568115,
+ "learning_rate": 9.485645454579674e-05,
+ "loss": 0.4005,
+ "step": 747
+ },
+ {
+ "epoch": 0.9714285714285714,
+ "grad_norm": 6.340237617492676,
+ "learning_rate": 9.482088116764242e-05,
+ "loss": 0.5841,
+ "step": 748
+ },
+ {
+ "epoch": 0.9727272727272728,
+ "grad_norm": 6.906500816345215,
+ "learning_rate": 9.478519270770745e-05,
+ "loss": 0.7196,
+ "step": 749
+ },
+ {
+ "epoch": 0.974025974025974,
+ "grad_norm": 6.116445541381836,
+ "learning_rate": 9.474938926913098e-05,
+ "loss": 0.5471,
+ "step": 750
+ },
+ {
+ "epoch": 0.9753246753246754,
+ "grad_norm": 6.587038993835449,
+ "learning_rate": 9.471347095538446e-05,
+ "loss": 0.7167,
+ "step": 751
+ },
+ {
+ "epoch": 0.9766233766233766,
+ "grad_norm": 7.6972222328186035,
+ "learning_rate": 9.467743787027129e-05,
+ "loss": 0.7802,
+ "step": 752
+ },
+ {
+ "epoch": 0.977922077922078,
+ "grad_norm": 5.8324360847473145,
+ "learning_rate": 9.464129011792657e-05,
+ "loss": 0.4997,
+ "step": 753
+ },
+ {
+ "epoch": 0.9792207792207792,
+ "grad_norm": 6.904268741607666,
+ "learning_rate": 9.46050278028168e-05,
+ "loss": 0.6485,
+ "step": 754
+ },
+ {
+ "epoch": 0.9805194805194806,
+ "grad_norm": 7.760578155517578,
+ "learning_rate": 9.456865102973955e-05,
+ "loss": 0.7916,
+ "step": 755
+ },
+ {
+ "epoch": 0.9818181818181818,
+ "grad_norm": 7.374014377593994,
+ "learning_rate": 9.453215990382317e-05,
+ "loss": 0.6993,
+ "step": 756
+ },
+ {
+ "epoch": 0.9831168831168832,
+ "grad_norm": 7.127100944519043,
+ "learning_rate": 9.449555453052651e-05,
+ "loss": 0.7207,
+ "step": 757
+ },
+ {
+ "epoch": 0.9844155844155844,
+ "grad_norm": 6.48353385925293,
+ "learning_rate": 9.445883501563855e-05,
+ "loss": 0.6119,
+ "step": 758
+ },
+ {
+ "epoch": 0.9857142857142858,
+ "grad_norm": 7.246218204498291,
+ "learning_rate": 9.442200146527823e-05,
+ "loss": 0.7745,
+ "step": 759
+ },
+ {
+ "epoch": 0.987012987012987,
+ "grad_norm": 6.130473613739014,
+ "learning_rate": 9.438505398589392e-05,
+ "loss": 0.5289,
+ "step": 760
+ },
+ {
+ "epoch": 0.9883116883116884,
+ "grad_norm": 6.276796340942383,
+ "learning_rate": 9.434799268426335e-05,
+ "loss": 0.5566,
+ "step": 761
+ },
+ {
+ "epoch": 0.9896103896103896,
+ "grad_norm": 7.320113658905029,
+ "learning_rate": 9.431081766749312e-05,
+ "loss": 0.8124,
+ "step": 762
+ },
+ {
+ "epoch": 0.990909090909091,
+ "grad_norm": 6.15302848815918,
+ "learning_rate": 9.427352904301852e-05,
+ "loss": 0.632,
+ "step": 763
+ },
+ {
+ "epoch": 0.9922077922077922,
+ "grad_norm": 5.9906439781188965,
+ "learning_rate": 9.423612691860316e-05,
+ "loss": 0.5499,
+ "step": 764
+ },
+ {
+ "epoch": 0.9935064935064936,
+ "grad_norm": 6.963850498199463,
+ "learning_rate": 9.419861140233864e-05,
+ "loss": 0.7703,
+ "step": 765
+ },
+ {
+ "epoch": 0.9948051948051948,
+ "grad_norm": 6.3005595207214355,
+ "learning_rate": 9.416098260264425e-05,
+ "loss": 0.5896,
+ "step": 766
+ },
+ {
+ "epoch": 0.9961038961038962,
+ "grad_norm": 7.499933242797852,
+ "learning_rate": 9.41232406282667e-05,
+ "loss": 0.8109,
+ "step": 767
+ },
+ {
+ "epoch": 0.9974025974025974,
+ "grad_norm": 6.345822334289551,
+ "learning_rate": 9.408538558827975e-05,
+ "loss": 0.6031,
+ "step": 768
+ },
+ {
+ "epoch": 0.9987012987012988,
+ "grad_norm": 6.637183666229248,
+ "learning_rate": 9.404741759208395e-05,
+ "loss": 0.5933,
+ "step": 769
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 6.882605075836182,
+ "learning_rate": 9.400933674940625e-05,
+ "loss": 0.5364,
+ "step": 770
+ },
+ {
+ "epoch": 1.0,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.9375,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9453125,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9453125,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9453125,
+ "eval_global_dataset_loss": 0.6763572096824646,
+ "eval_global_dataset_runtime": 113.6773,
+ "eval_global_dataset_samples_per_second": 9.421,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.921875,
+ "eval_sts-test-1024_pearson_cosine": 0.8891517259069235,
+ "eval_sts-test-1024_spearman_cosine": 0.9288554146133434,
+ "eval_sts-test-128_pearson_cosine": 0.8824459040276136,
+ "eval_sts-test-128_spearman_cosine": 0.9266928143893601,
+ "eval_sts-test-256_pearson_cosine": 0.8867567584518057,
+ "eval_sts-test-256_spearman_cosine": 0.9309693205276476,
+ "eval_sts-test-32_pearson_cosine": 0.8620118716806239,
+ "eval_sts-test-32_spearman_cosine": 0.9141934147504104,
+ "eval_sts-test-512_pearson_cosine": 0.8891832703981848,
+ "eval_sts-test-512_spearman_cosine": 0.9302840197281932,
+ "eval_sts-test-64_pearson_cosine": 0.8738410245116717,
+ "eval_sts-test-64_spearman_cosine": 0.9216490148272323,
+ "eval_sts-test_pearson_cosine": 0.8897698289886751,
+ "eval_sts-test_spearman_cosine": 0.9294207698705734,
+ "step": 770
+ },
+ {
+ "epoch": 1.0012987012987014,
+ "grad_norm": 6.3086981773376465,
+ "learning_rate": 9.397114317029975e-05,
+ "loss": 0.5231,
+ "step": 771
+ },
+ {
+ "epoch": 1.0025974025974025,
+ "grad_norm": 6.194589614868164,
+ "learning_rate": 9.393283696514334e-05,
+ "loss": 0.5183,
+ "step": 772
+ },
+ {
+ "epoch": 1.0038961038961038,
+ "grad_norm": 6.340659141540527,
+ "learning_rate": 9.389441824464144e-05,
+ "loss": 0.5389,
+ "step": 773
+ },
+ {
+ "epoch": 1.0051948051948052,
+ "grad_norm": 7.020819664001465,
+ "learning_rate": 9.385588711982357e-05,
+ "loss": 0.6834,
+ "step": 774
+ },
+ {
+ "epoch": 1.0064935064935066,
+ "grad_norm": 5.873781681060791,
+ "learning_rate": 9.381724370204413e-05,
+ "loss": 0.5017,
+ "step": 775
+ },
+ {
+ "epoch": 1.0077922077922077,
+ "grad_norm": 6.680530548095703,
+ "learning_rate": 9.377848810298209e-05,
+ "loss": 0.6083,
+ "step": 776
+ },
+ {
+ "epoch": 1.009090909090909,
+ "grad_norm": 6.47283935546875,
+ "learning_rate": 9.373962043464056e-05,
+ "loss": 0.5966,
+ "step": 777
+ },
+ {
+ "epoch": 1.0103896103896104,
+ "grad_norm": 6.902966499328613,
+ "learning_rate": 9.370064080934653e-05,
+ "loss": 0.6968,
+ "step": 778
+ },
+ {
+ "epoch": 1.0116883116883117,
+ "grad_norm": 6.695742130279541,
+ "learning_rate": 9.366154933975058e-05,
+ "loss": 0.6885,
+ "step": 779
+ },
+ {
+ "epoch": 1.0129870129870129,
+ "grad_norm": 5.6577653884887695,
+ "learning_rate": 9.362234613882651e-05,
+ "loss": 0.4656,
+ "step": 780
+ },
+ {
+ "epoch": 1.0142857142857142,
+ "grad_norm": 5.678228378295898,
+ "learning_rate": 9.3583031319871e-05,
+ "loss": 0.5064,
+ "step": 781
+ },
+ {
+ "epoch": 1.0155844155844156,
+ "grad_norm": 6.301676273345947,
+ "learning_rate": 9.354360499650332e-05,
+ "loss": 0.5485,
+ "step": 782
+ },
+ {
+ "epoch": 1.016883116883117,
+ "grad_norm": 6.376684665679932,
+ "learning_rate": 9.3504067282665e-05,
+ "loss": 0.6034,
+ "step": 783
+ },
+ {
+ "epoch": 1.018181818181818,
+ "grad_norm": 6.012096405029297,
+ "learning_rate": 9.346441829261944e-05,
+ "loss": 0.5317,
+ "step": 784
+ },
+ {
+ "epoch": 1.0194805194805194,
+ "grad_norm": 6.788561820983887,
+ "learning_rate": 9.342465814095167e-05,
+ "loss": 0.6418,
+ "step": 785
+ },
+ {
+ "epoch": 1.0207792207792208,
+ "grad_norm": 6.6580915451049805,
+ "learning_rate": 9.338478694256798e-05,
+ "loss": 0.5612,
+ "step": 786
+ },
+ {
+ "epoch": 1.0220779220779221,
+ "grad_norm": 6.925287246704102,
+ "learning_rate": 9.334480481269555e-05,
+ "loss": 0.6258,
+ "step": 787
+ },
+ {
+ "epoch": 1.0233766233766233,
+ "grad_norm": 5.296406269073486,
+ "learning_rate": 9.330471186688216e-05,
+ "loss": 0.44,
+ "step": 788
+ },
+ {
+ "epoch": 1.0246753246753246,
+ "grad_norm": 5.672299861907959,
+ "learning_rate": 9.326450822099588e-05,
+ "loss": 0.5192,
+ "step": 789
+ },
+ {
+ "epoch": 1.025974025974026,
+ "grad_norm": 7.410915374755859,
+ "learning_rate": 9.322419399122467e-05,
+ "loss": 0.7895,
+ "step": 790
+ },
+ {
+ "epoch": 1.0272727272727273,
+ "grad_norm": 6.18645715713501,
+ "learning_rate": 9.318376929407607e-05,
+ "loss": 0.5412,
+ "step": 791
+ },
+ {
+ "epoch": 1.0285714285714285,
+ "grad_norm": 6.232503414154053,
+ "learning_rate": 9.314323424637687e-05,
+ "loss": 0.5783,
+ "step": 792
+ },
+ {
+ "epoch": 1.0298701298701298,
+ "grad_norm": 7.006043434143066,
+ "learning_rate": 9.310258896527278e-05,
+ "loss": 0.7388,
+ "step": 793
+ },
+ {
+ "epoch": 1.0311688311688312,
+ "grad_norm": 5.843970775604248,
+ "learning_rate": 9.306183356822812e-05,
+ "loss": 0.5155,
+ "step": 794
+ },
+ {
+ "epoch": 1.0324675324675325,
+ "grad_norm": 5.6648783683776855,
+ "learning_rate": 9.30209681730254e-05,
+ "loss": 0.5151,
+ "step": 795
+ },
+ {
+ "epoch": 1.0337662337662337,
+ "grad_norm": 6.363714694976807,
+ "learning_rate": 9.2979992897765e-05,
+ "loss": 0.6496,
+ "step": 796
+ },
+ {
+ "epoch": 1.035064935064935,
+ "grad_norm": 5.969563007354736,
+ "learning_rate": 9.293890786086489e-05,
+ "loss": 0.5297,
+ "step": 797
+ },
+ {
+ "epoch": 1.0363636363636364,
+ "grad_norm": 6.924633026123047,
+ "learning_rate": 9.289771318106027e-05,
+ "loss": 0.7569,
+ "step": 798
+ },
+ {
+ "epoch": 1.0376623376623377,
+ "grad_norm": 6.2910895347595215,
+ "learning_rate": 9.285640897740315e-05,
+ "loss": 0.5531,
+ "step": 799
+ },
+ {
+ "epoch": 1.0389610389610389,
+ "grad_norm": 6.823774814605713,
+ "learning_rate": 9.281499536926212e-05,
+ "loss": 0.6787,
+ "step": 800
+ },
+ {
+ "epoch": 1.0402597402597402,
+ "grad_norm": 6.377769470214844,
+ "learning_rate": 9.27734724763219e-05,
+ "loss": 0.6457,
+ "step": 801
+ },
+ {
+ "epoch": 1.0415584415584416,
+ "grad_norm": 6.330393314361572,
+ "learning_rate": 9.273184041858306e-05,
+ "loss": 0.5036,
+ "step": 802
+ },
+ {
+ "epoch": 1.042857142857143,
+ "grad_norm": 6.70945930480957,
+ "learning_rate": 9.269009931636166e-05,
+ "loss": 0.6496,
+ "step": 803
+ },
+ {
+ "epoch": 1.044155844155844,
+ "grad_norm": 6.34543514251709,
+ "learning_rate": 9.264824929028889e-05,
+ "loss": 0.5846,
+ "step": 804
+ },
+ {
+ "epoch": 1.0454545454545454,
+ "grad_norm": 6.833179950714111,
+ "learning_rate": 9.260629046131073e-05,
+ "loss": 0.6225,
+ "step": 805
+ },
+ {
+ "epoch": 1.0467532467532468,
+ "grad_norm": 5.559639930725098,
+ "learning_rate": 9.25642229506876e-05,
+ "loss": 0.4491,
+ "step": 806
+ },
+ {
+ "epoch": 1.0480519480519481,
+ "grad_norm": 6.042374610900879,
+ "learning_rate": 9.252204687999402e-05,
+ "loss": 0.5019,
+ "step": 807
+ },
+ {
+ "epoch": 1.0493506493506493,
+ "grad_norm": 7.4053215980529785,
+ "learning_rate": 9.247976237111823e-05,
+ "loss": 0.6818,
+ "step": 808
+ },
+ {
+ "epoch": 1.0506493506493506,
+ "grad_norm": 6.882972717285156,
+ "learning_rate": 9.243736954626186e-05,
+ "loss": 0.6625,
+ "step": 809
+ },
+ {
+ "epoch": 1.051948051948052,
+ "grad_norm": 5.182650089263916,
+ "learning_rate": 9.239486852793952e-05,
+ "loss": 0.3794,
+ "step": 810
+ },
+ {
+ "epoch": 1.0532467532467533,
+ "grad_norm": 7.047555923461914,
+ "learning_rate": 9.235225943897863e-05,
+ "loss": 0.573,
+ "step": 811
+ },
+ {
+ "epoch": 1.0545454545454545,
+ "grad_norm": 5.860465049743652,
+ "learning_rate": 9.230954240251881e-05,
+ "loss": 0.4855,
+ "step": 812
+ },
+ {
+ "epoch": 1.0558441558441558,
+ "grad_norm": 6.186687469482422,
+ "learning_rate": 9.226671754201169e-05,
+ "loss": 0.5567,
+ "step": 813
+ },
+ {
+ "epoch": 1.0571428571428572,
+ "grad_norm": 6.923488616943359,
+ "learning_rate": 9.222378498122052e-05,
+ "loss": 0.5972,
+ "step": 814
+ },
+ {
+ "epoch": 1.0584415584415585,
+ "grad_norm": 5.582354545593262,
+ "learning_rate": 9.218074484421978e-05,
+ "loss": 0.493,
+ "step": 815
+ },
+ {
+ "epoch": 1.0597402597402596,
+ "grad_norm": 5.751039028167725,
+ "learning_rate": 9.213759725539484e-05,
+ "loss": 0.4468,
+ "step": 816
+ },
+ {
+ "epoch": 1.061038961038961,
+ "grad_norm": 6.912971496582031,
+ "learning_rate": 9.209434233944167e-05,
+ "loss": 0.6582,
+ "step": 817
+ },
+ {
+ "epoch": 1.0623376623376624,
+ "grad_norm": 6.241170406341553,
+ "learning_rate": 9.20509802213663e-05,
+ "loss": 0.6123,
+ "step": 818
+ },
+ {
+ "epoch": 1.0636363636363637,
+ "grad_norm": 6.001036167144775,
+ "learning_rate": 9.200751102648467e-05,
+ "loss": 0.49,
+ "step": 819
+ },
+ {
+ "epoch": 1.0649350649350648,
+ "grad_norm": 6.102304458618164,
+ "learning_rate": 9.196393488042213e-05,
+ "loss": 0.5702,
+ "step": 820
+ },
+ {
+ "epoch": 1.0662337662337662,
+ "grad_norm": 6.685356140136719,
+ "learning_rate": 9.192025190911312e-05,
+ "loss": 0.6142,
+ "step": 821
+ },
+ {
+ "epoch": 1.0675324675324676,
+ "grad_norm": 5.738699913024902,
+ "learning_rate": 9.18764622388008e-05,
+ "loss": 0.4589,
+ "step": 822
+ },
+ {
+ "epoch": 1.068831168831169,
+ "grad_norm": 6.65635347366333,
+ "learning_rate": 9.183256599603671e-05,
+ "loss": 0.6596,
+ "step": 823
+ },
+ {
+ "epoch": 1.07012987012987,
+ "grad_norm": 6.769857406616211,
+ "learning_rate": 9.178856330768036e-05,
+ "loss": 0.6645,
+ "step": 824
+ },
+ {
+ "epoch": 1.0714285714285714,
+ "grad_norm": 6.2122368812561035,
+ "learning_rate": 9.17444543008989e-05,
+ "loss": 0.5507,
+ "step": 825
+ },
+ {
+ "epoch": 1.0727272727272728,
+ "grad_norm": 6.053091049194336,
+ "learning_rate": 9.17002391031667e-05,
+ "loss": 0.5414,
+ "step": 826
+ },
+ {
+ "epoch": 1.074025974025974,
+ "grad_norm": 5.982730388641357,
+ "learning_rate": 9.165591784226511e-05,
+ "loss": 0.4573,
+ "step": 827
+ },
+ {
+ "epoch": 1.0753246753246752,
+ "grad_norm": 5.181983947753906,
+ "learning_rate": 9.16114906462819e-05,
+ "loss": 0.4044,
+ "step": 828
+ },
+ {
+ "epoch": 1.0766233766233766,
+ "grad_norm": 6.529760360717773,
+ "learning_rate": 9.156695764361107e-05,
+ "loss": 0.5288,
+ "step": 829
+ },
+ {
+ "epoch": 1.077922077922078,
+ "grad_norm": 5.713877201080322,
+ "learning_rate": 9.152231896295232e-05,
+ "loss": 0.473,
+ "step": 830
+ },
+ {
+ "epoch": 1.0792207792207793,
+ "grad_norm": 5.574084281921387,
+ "learning_rate": 9.147757473331082e-05,
+ "loss": 0.4046,
+ "step": 831
+ },
+ {
+ "epoch": 1.0805194805194804,
+ "grad_norm": 6.5338358879089355,
+ "learning_rate": 9.143272508399678e-05,
+ "loss": 0.5648,
+ "step": 832
+ },
+ {
+ "epoch": 1.0818181818181818,
+ "grad_norm": 7.523197174072266,
+ "learning_rate": 9.138777014462503e-05,
+ "loss": 0.7126,
+ "step": 833
+ },
+ {
+ "epoch": 1.0831168831168831,
+ "grad_norm": 7.040456295013428,
+ "learning_rate": 9.13427100451147e-05,
+ "loss": 0.5931,
+ "step": 834
+ },
+ {
+ "epoch": 1.0844155844155845,
+ "grad_norm": 5.61920166015625,
+ "learning_rate": 9.129754491568886e-05,
+ "loss": 0.5465,
+ "step": 835
+ },
+ {
+ "epoch": 1.0857142857142856,
+ "grad_norm": 5.607574462890625,
+ "learning_rate": 9.125227488687407e-05,
+ "loss": 0.4738,
+ "step": 836
+ },
+ {
+ "epoch": 1.087012987012987,
+ "grad_norm": 6.3649516105651855,
+ "learning_rate": 9.120690008950008e-05,
+ "loss": 0.5459,
+ "step": 837
+ },
+ {
+ "epoch": 1.0883116883116883,
+ "grad_norm": 5.816140174865723,
+ "learning_rate": 9.11614206546994e-05,
+ "loss": 0.4774,
+ "step": 838
+ },
+ {
+ "epoch": 1.0896103896103897,
+ "grad_norm": 5.179698467254639,
+ "learning_rate": 9.111583671390697e-05,
+ "loss": 0.3781,
+ "step": 839
+ },
+ {
+ "epoch": 1.0909090909090908,
+ "grad_norm": 5.196139812469482,
+ "learning_rate": 9.107014839885971e-05,
+ "loss": 0.4029,
+ "step": 840
+ },
+ {
+ "epoch": 1.0922077922077922,
+ "grad_norm": 5.626153945922852,
+ "learning_rate": 9.102435584159622e-05,
+ "loss": 0.516,
+ "step": 841
+ },
+ {
+ "epoch": 1.0935064935064935,
+ "grad_norm": 6.147073745727539,
+ "learning_rate": 9.097845917445633e-05,
+ "loss": 0.558,
+ "step": 842
+ },
+ {
+ "epoch": 1.094805194805195,
+ "grad_norm": 6.763444423675537,
+ "learning_rate": 9.093245853008076e-05,
+ "loss": 0.6146,
+ "step": 843
+ },
+ {
+ "epoch": 1.096103896103896,
+ "grad_norm": 7.35480260848999,
+ "learning_rate": 9.088635404141069e-05,
+ "loss": 0.6729,
+ "step": 844
+ },
+ {
+ "epoch": 1.0974025974025974,
+ "grad_norm": 5.741674900054932,
+ "learning_rate": 9.084014584168747e-05,
+ "loss": 0.4631,
+ "step": 845
+ },
+ {
+ "epoch": 1.0987012987012987,
+ "grad_norm": 6.2829484939575195,
+ "learning_rate": 9.079383406445214e-05,
+ "loss": 0.5166,
+ "step": 846
+ },
+ {
+ "epoch": 1.1,
+ "grad_norm": 7.058021545410156,
+ "learning_rate": 9.074741884354506e-05,
+ "loss": 0.6677,
+ "step": 847
+ },
+ {
+ "epoch": 1.1,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.9140625,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.921875,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9296875,
+ "eval_global_dataset_loss": 0.7927160263061523,
+ "eval_global_dataset_runtime": 113.5705,
+ "eval_global_dataset_samples_per_second": 9.43,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.9140625,
+ "eval_sts-test-1024_pearson_cosine": 0.8833424951570886,
+ "eval_sts-test-1024_spearman_cosine": 0.9211738872614664,
+ "eval_sts-test-128_pearson_cosine": 0.8763123791960121,
+ "eval_sts-test-128_spearman_cosine": 0.9178787862910495,
+ "eval_sts-test-256_pearson_cosine": 0.8816487151898631,
+ "eval_sts-test-256_spearman_cosine": 0.9213876409591016,
+ "eval_sts-test-32_pearson_cosine": 0.8488917028735847,
+ "eval_sts-test-32_spearman_cosine": 0.9061946582928297,
+ "eval_sts-test-512_pearson_cosine": 0.8854829980855135,
+ "eval_sts-test-512_spearman_cosine": 0.9220568296707946,
+ "eval_sts-test-64_pearson_cosine": 0.8676984844858082,
+ "eval_sts-test-64_spearman_cosine": 0.9131711922935439,
+ "eval_sts-test_pearson_cosine": 0.88398317384196,
+ "eval_sts-test_spearman_cosine": 0.9202243482227238,
+ "step": 847
+ },
+ {
+ "epoch": 1.1012987012987012,
+ "grad_norm": 5.547793865203857,
+ "learning_rate": 9.070090031310558e-05,
+ "loss": 0.4758,
+ "step": 848
+ },
+ {
+ "epoch": 1.1025974025974026,
+ "grad_norm": 7.1977105140686035,
+ "learning_rate": 9.065427860757159e-05,
+ "loss": 0.6897,
+ "step": 849
+ },
+ {
+ "epoch": 1.103896103896104,
+ "grad_norm": 6.090529441833496,
+ "learning_rate": 9.060755386167912e-05,
+ "loss": 0.5571,
+ "step": 850
+ },
+ {
+ "epoch": 1.1051948051948053,
+ "grad_norm": 7.271042823791504,
+ "learning_rate": 9.056072621046206e-05,
+ "loss": 0.7302,
+ "step": 851
+ },
+ {
+ "epoch": 1.1064935064935064,
+ "grad_norm": 6.639366149902344,
+ "learning_rate": 9.051379578925165e-05,
+ "loss": 0.5423,
+ "step": 852
+ },
+ {
+ "epoch": 1.1077922077922078,
+ "grad_norm": 6.408458709716797,
+ "learning_rate": 9.046676273367613e-05,
+ "loss": 0.6124,
+ "step": 853
+ },
+ {
+ "epoch": 1.1090909090909091,
+ "grad_norm": 6.25244665145874,
+ "learning_rate": 9.041962717966035e-05,
+ "loss": 0.5472,
+ "step": 854
+ },
+ {
+ "epoch": 1.1103896103896105,
+ "grad_norm": 6.482189178466797,
+ "learning_rate": 9.037238926342544e-05,
+ "loss": 0.6467,
+ "step": 855
+ },
+ {
+ "epoch": 1.1116883116883116,
+ "grad_norm": 5.9097490310668945,
+ "learning_rate": 9.032504912148826e-05,
+ "loss": 0.5758,
+ "step": 856
+ },
+ {
+ "epoch": 1.112987012987013,
+ "grad_norm": 6.042409896850586,
+ "learning_rate": 9.027760689066116e-05,
+ "loss": 0.5486,
+ "step": 857
+ },
+ {
+ "epoch": 1.1142857142857143,
+ "grad_norm": 6.9155964851379395,
+ "learning_rate": 9.023006270805151e-05,
+ "loss": 0.7601,
+ "step": 858
+ },
+ {
+ "epoch": 1.1155844155844157,
+ "grad_norm": 5.6852850914001465,
+ "learning_rate": 9.018241671106134e-05,
+ "loss": 0.48,
+ "step": 859
+ },
+ {
+ "epoch": 1.1168831168831168,
+ "grad_norm": 4.734926223754883,
+ "learning_rate": 9.01346690373869e-05,
+ "loss": 0.3987,
+ "step": 860
+ },
+ {
+ "epoch": 1.1181818181818182,
+ "grad_norm": 5.621240139007568,
+ "learning_rate": 9.008681982501825e-05,
+ "loss": 0.5175,
+ "step": 861
+ },
+ {
+ "epoch": 1.1194805194805195,
+ "grad_norm": 5.820254325866699,
+ "learning_rate": 9.003886921223899e-05,
+ "loss": 0.546,
+ "step": 862
+ },
+ {
+ "epoch": 1.1207792207792209,
+ "grad_norm": 5.572368621826172,
+ "learning_rate": 8.999081733762568e-05,
+ "loss": 0.4782,
+ "step": 863
+ },
+ {
+ "epoch": 1.122077922077922,
+ "grad_norm": 6.293596267700195,
+ "learning_rate": 8.994266434004755e-05,
+ "loss": 0.5394,
+ "step": 864
+ },
+ {
+ "epoch": 1.1233766233766234,
+ "grad_norm": 6.250242233276367,
+ "learning_rate": 8.989441035866606e-05,
+ "loss": 0.5649,
+ "step": 865
+ },
+ {
+ "epoch": 1.1246753246753247,
+ "grad_norm": 6.430608749389648,
+ "learning_rate": 8.984605553293461e-05,
+ "loss": 0.6247,
+ "step": 866
+ },
+ {
+ "epoch": 1.1259740259740258,
+ "grad_norm": 5.867380142211914,
+ "learning_rate": 8.979760000259787e-05,
+ "loss": 0.5004,
+ "step": 867
+ },
+ {
+ "epoch": 1.1272727272727272,
+ "grad_norm": 5.570036888122559,
+ "learning_rate": 8.974904390769168e-05,
+ "loss": 0.4551,
+ "step": 868
+ },
+ {
+ "epoch": 1.1285714285714286,
+ "grad_norm": 5.702778339385986,
+ "learning_rate": 8.970038738854245e-05,
+ "loss": 0.4896,
+ "step": 869
+ },
+ {
+ "epoch": 1.12987012987013,
+ "grad_norm": 6.742293357849121,
+ "learning_rate": 8.965163058576683e-05,
+ "loss": 0.5858,
+ "step": 870
+ },
+ {
+ "epoch": 1.1311688311688313,
+ "grad_norm": 7.0269036293029785,
+ "learning_rate": 8.96027736402713e-05,
+ "loss": 0.6222,
+ "step": 871
+ },
+ {
+ "epoch": 1.1324675324675324,
+ "grad_norm": 7.605057239532471,
+ "learning_rate": 8.955381669325171e-05,
+ "loss": 0.6812,
+ "step": 872
+ },
+ {
+ "epoch": 1.1337662337662338,
+ "grad_norm": 6.691396713256836,
+ "learning_rate": 8.950475988619298e-05,
+ "loss": 0.6444,
+ "step": 873
+ },
+ {
+ "epoch": 1.135064935064935,
+ "grad_norm": 5.929075241088867,
+ "learning_rate": 8.945560336086855e-05,
+ "loss": 0.4732,
+ "step": 874
+ },
+ {
+ "epoch": 1.1363636363636362,
+ "grad_norm": 6.509552001953125,
+ "learning_rate": 8.94063472593401e-05,
+ "loss": 0.4766,
+ "step": 875
+ },
+ {
+ "epoch": 1.1376623376623376,
+ "grad_norm": 6.235232353210449,
+ "learning_rate": 8.935699172395707e-05,
+ "loss": 0.5361,
+ "step": 876
+ },
+ {
+ "epoch": 1.138961038961039,
+ "grad_norm": 6.687936305999756,
+ "learning_rate": 8.930753689735622e-05,
+ "loss": 0.6461,
+ "step": 877
+ },
+ {
+ "epoch": 1.1402597402597403,
+ "grad_norm": 6.929695129394531,
+ "learning_rate": 8.925798292246134e-05,
+ "loss": 0.6055,
+ "step": 878
+ },
+ {
+ "epoch": 1.1415584415584417,
+ "grad_norm": 6.169885635375977,
+ "learning_rate": 8.920832994248267e-05,
+ "loss": 0.5212,
+ "step": 879
+ },
+ {
+ "epoch": 1.1428571428571428,
+ "grad_norm": 5.577329158782959,
+ "learning_rate": 8.915857810091665e-05,
+ "loss": 0.5049,
+ "step": 880
+ },
+ {
+ "epoch": 1.1441558441558441,
+ "grad_norm": 6.382843971252441,
+ "learning_rate": 8.910872754154539e-05,
+ "loss": 0.6102,
+ "step": 881
+ },
+ {
+ "epoch": 1.1454545454545455,
+ "grad_norm": 6.197054386138916,
+ "learning_rate": 8.90587784084363e-05,
+ "loss": 0.5279,
+ "step": 882
+ },
+ {
+ "epoch": 1.1467532467532466,
+ "grad_norm": 6.374361991882324,
+ "learning_rate": 8.900873084594164e-05,
+ "loss": 0.6135,
+ "step": 883
+ },
+ {
+ "epoch": 1.148051948051948,
+ "grad_norm": 5.588046073913574,
+ "learning_rate": 8.895858499869816e-05,
+ "loss": 0.4196,
+ "step": 884
+ },
+ {
+ "epoch": 1.1493506493506493,
+ "grad_norm": 6.311435699462891,
+ "learning_rate": 8.890834101162668e-05,
+ "loss": 0.634,
+ "step": 885
+ },
+ {
+ "epoch": 1.1506493506493507,
+ "grad_norm": 5.719446182250977,
+ "learning_rate": 8.885799902993157e-05,
+ "loss": 0.5867,
+ "step": 886
+ },
+ {
+ "epoch": 1.151948051948052,
+ "grad_norm": 6.009184837341309,
+ "learning_rate": 8.880755919910047e-05,
+ "loss": 0.5583,
+ "step": 887
+ },
+ {
+ "epoch": 1.1532467532467532,
+ "grad_norm": 5.596704959869385,
+ "learning_rate": 8.875702166490373e-05,
+ "loss": 0.4851,
+ "step": 888
+ },
+ {
+ "epoch": 1.1545454545454545,
+ "grad_norm": 5.781062602996826,
+ "learning_rate": 8.870638657339417e-05,
+ "loss": 0.5288,
+ "step": 889
+ },
+ {
+ "epoch": 1.155844155844156,
+ "grad_norm": 8.139880180358887,
+ "learning_rate": 8.865565407090644e-05,
+ "loss": 0.8795,
+ "step": 890
+ },
+ {
+ "epoch": 1.157142857142857,
+ "grad_norm": 5.296769618988037,
+ "learning_rate": 8.860482430405678e-05,
+ "loss": 0.4514,
+ "step": 891
+ },
+ {
+ "epoch": 1.1584415584415584,
+ "grad_norm": 6.376003265380859,
+ "learning_rate": 8.855389741974244e-05,
+ "loss": 0.5311,
+ "step": 892
+ },
+ {
+ "epoch": 1.1597402597402597,
+ "grad_norm": 3.899746894836426,
+ "learning_rate": 8.850287356514146e-05,
+ "loss": 0.2328,
+ "step": 893
+ },
+ {
+ "epoch": 1.161038961038961,
+ "grad_norm": 6.138718605041504,
+ "learning_rate": 8.845175288771201e-05,
+ "loss": 0.4935,
+ "step": 894
+ },
+ {
+ "epoch": 1.1623376623376624,
+ "grad_norm": 6.556604385375977,
+ "learning_rate": 8.840053553519215e-05,
+ "loss": 0.605,
+ "step": 895
+ },
+ {
+ "epoch": 1.1636363636363636,
+ "grad_norm": 6.4802374839782715,
+ "learning_rate": 8.834922165559927e-05,
+ "loss": 0.5583,
+ "step": 896
+ },
+ {
+ "epoch": 1.164935064935065,
+ "grad_norm": 7.814708232879639,
+ "learning_rate": 8.829781139722979e-05,
+ "loss": 0.7555,
+ "step": 897
+ },
+ {
+ "epoch": 1.1662337662337663,
+ "grad_norm": 4.639040470123291,
+ "learning_rate": 8.824630490865858e-05,
+ "loss": 0.3022,
+ "step": 898
+ },
+ {
+ "epoch": 1.1675324675324674,
+ "grad_norm": 5.5277533531188965,
+ "learning_rate": 8.819470233873868e-05,
+ "loss": 0.4693,
+ "step": 899
+ },
+ {
+ "epoch": 1.1688311688311688,
+ "grad_norm": 7.185070514678955,
+ "learning_rate": 8.81430038366008e-05,
+ "loss": 0.6197,
+ "step": 900
+ },
+ {
+ "epoch": 1.1701298701298701,
+ "grad_norm": 6.97157096862793,
+ "learning_rate": 8.809120955165288e-05,
+ "loss": 0.5576,
+ "step": 901
+ },
+ {
+ "epoch": 1.1714285714285715,
+ "grad_norm": 7.55079460144043,
+ "learning_rate": 8.803931963357962e-05,
+ "loss": 0.6272,
+ "step": 902
+ },
+ {
+ "epoch": 1.1727272727272728,
+ "grad_norm": 7.3252153396606445,
+ "learning_rate": 8.798733423234218e-05,
+ "loss": 0.6684,
+ "step": 903
+ },
+ {
+ "epoch": 1.174025974025974,
+ "grad_norm": 6.965551853179932,
+ "learning_rate": 8.793525349817765e-05,
+ "loss": 0.6356,
+ "step": 904
+ },
+ {
+ "epoch": 1.1753246753246753,
+ "grad_norm": 7.822068214416504,
+ "learning_rate": 8.788307758159859e-05,
+ "loss": 0.729,
+ "step": 905
+ },
+ {
+ "epoch": 1.1766233766233767,
+ "grad_norm": 6.392927646636963,
+ "learning_rate": 8.783080663339264e-05,
+ "loss": 0.5553,
+ "step": 906
+ },
+ {
+ "epoch": 1.1779220779220778,
+ "grad_norm": 5.718196868896484,
+ "learning_rate": 8.777844080462212e-05,
+ "loss": 0.466,
+ "step": 907
+ },
+ {
+ "epoch": 1.1792207792207792,
+ "grad_norm": 4.9061384201049805,
+ "learning_rate": 8.772598024662351e-05,
+ "loss": 0.4107,
+ "step": 908
+ },
+ {
+ "epoch": 1.1805194805194805,
+ "grad_norm": 7.346360683441162,
+ "learning_rate": 8.767342511100712e-05,
+ "loss": 0.8187,
+ "step": 909
+ },
+ {
+ "epoch": 1.1818181818181819,
+ "grad_norm": 6.1015801429748535,
+ "learning_rate": 8.762077554965651e-05,
+ "loss": 0.5597,
+ "step": 910
+ },
+ {
+ "epoch": 1.1831168831168832,
+ "grad_norm": 6.522135257720947,
+ "learning_rate": 8.756803171472816e-05,
+ "loss": 0.6618,
+ "step": 911
+ },
+ {
+ "epoch": 1.1844155844155844,
+ "grad_norm": 5.506206035614014,
+ "learning_rate": 8.751519375865103e-05,
+ "loss": 0.5037,
+ "step": 912
+ },
+ {
+ "epoch": 1.1857142857142857,
+ "grad_norm": 6.1652302742004395,
+ "learning_rate": 8.746226183412603e-05,
+ "loss": 0.6033,
+ "step": 913
+ },
+ {
+ "epoch": 1.187012987012987,
+ "grad_norm": 4.877734184265137,
+ "learning_rate": 8.74092360941257e-05,
+ "loss": 0.3695,
+ "step": 914
+ },
+ {
+ "epoch": 1.1883116883116882,
+ "grad_norm": 6.115694522857666,
+ "learning_rate": 8.735611669189365e-05,
+ "loss": 0.5632,
+ "step": 915
+ },
+ {
+ "epoch": 1.1896103896103896,
+ "grad_norm": 6.231703281402588,
+ "learning_rate": 8.730290378094422e-05,
+ "loss": 0.6387,
+ "step": 916
+ },
+ {
+ "epoch": 1.190909090909091,
+ "grad_norm": 5.317518711090088,
+ "learning_rate": 8.724959751506196e-05,
+ "loss": 0.4989,
+ "step": 917
+ },
+ {
+ "epoch": 1.1922077922077923,
+ "grad_norm": 6.745793342590332,
+ "learning_rate": 8.71961980483012e-05,
+ "loss": 0.7159,
+ "step": 918
+ },
+ {
+ "epoch": 1.1935064935064936,
+ "grad_norm": 5.351443767547607,
+ "learning_rate": 8.714270553498567e-05,
+ "loss": 0.4639,
+ "step": 919
+ },
+ {
+ "epoch": 1.1948051948051948,
+ "grad_norm": 5.908309459686279,
+ "learning_rate": 8.708912012970796e-05,
+ "loss": 0.52,
+ "step": 920
+ },
+ {
+ "epoch": 1.1961038961038961,
+ "grad_norm": 5.1110687255859375,
+ "learning_rate": 8.703544198732911e-05,
+ "loss": 0.431,
+ "step": 921
+ },
+ {
+ "epoch": 1.1974025974025975,
+ "grad_norm": 5.573983669281006,
+ "learning_rate": 8.698167126297823e-05,
+ "loss": 0.5008,
+ "step": 922
+ },
+ {
+ "epoch": 1.1987012987012986,
+ "grad_norm": 6.01815938949585,
+ "learning_rate": 8.692780811205192e-05,
+ "loss": 0.5209,
+ "step": 923
+ },
+ {
+ "epoch": 1.2,
+ "grad_norm": 5.274326801300049,
+ "learning_rate": 8.687385269021392e-05,
+ "loss": 0.4301,
+ "step": 924
+ },
+ {
+ "epoch": 1.2,
+ "eval_allNLI--triplets-1024_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-128_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-256_cosine_accuracy": 0.9296875,
+ "eval_allNLI--triplets-32_cosine_accuracy": 0.921875,
+ "eval_allNLI--triplets-512_cosine_accuracy": 0.9296875,
+ "eval_allNLI-triplets_cosine_accuracy": 0.9296875,
+ "eval_global_dataset_loss": 0.6650819778442383,
+ "eval_global_dataset_runtime": 113.6493,
+ "eval_global_dataset_samples_per_second": 9.424,
+ "eval_global_dataset_steps_per_second": 0.079,
+ "eval_sequential_score": 0.921875,
+ "eval_sts-test-1024_pearson_cosine": 0.885149365257635,
+ "eval_sts-test-1024_spearman_cosine": 0.9258567760577375,
+ "eval_sts-test-128_pearson_cosine": 0.880812025351224,
+ "eval_sts-test-128_spearman_cosine": 0.9247529190228797,
+ "eval_sts-test-256_pearson_cosine": 0.8861095686328364,
+ "eval_sts-test-256_spearman_cosine": 0.9268503548030278,
+ "eval_sts-test-32_pearson_cosine": 0.8553627166015102,
+ "eval_sts-test-32_spearman_cosine": 0.9153789063632594,
+ "eval_sts-test-512_pearson_cosine": 0.88725994150817,
+ "eval_sts-test-512_spearman_cosine": 0.9272581696465901,
+ "eval_sts-test-64_pearson_cosine": 0.8741180224496476,
+ "eval_sts-test-64_spearman_cosine": 0.922292066152112,
+ "eval_sts-test_pearson_cosine": 0.8878947010894737,
+ "eval_sts-test_spearman_cosine": 0.9258002047273748,
+ "step": 924
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 2310,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 3,
+ "save_steps": 231,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 0.0,
+ "train_batch_size": 192,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-924/training_args.bin b/checkpoint-924/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..792c593f3faf30c2f0b16520bd3c97d97741b53a
--- /dev/null
+++ b/checkpoint-924/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9fadd592bad93c127ab2cc1e37bb17c0477dfea67bb61a8baf05d9892c2d39d8
+size 5880